diff --git a/.github/DISCUSSION_TEMPLATE/bugs.yml b/.github/DISCUSSION_TEMPLATE/bugs.yml index 711b85c88334..59d0ee088365 100644 --- a/.github/DISCUSSION_TEMPLATE/bugs.yml +++ b/.github/DISCUSSION_TEMPLATE/bugs.yml @@ -116,7 +116,7 @@ body: label: Checklist description: Have you tried the following? options: - - label: Run `trivy image --reset` + - label: Run `trivy clean --all` - label: Read [the troubleshooting](https://aquasecurity.github.io/trivy/latest/docs/references/troubleshooting/) - type: markdown attributes: diff --git a/.github/actions/trivy-triage/helpers.js b/.github/actions/trivy-triage/helpers.js index 121d5b38ffaa..3b477dfad5a6 100644 --- a/.github/actions/trivy-triage/helpers.js +++ b/.github/actions/trivy-triage/helpers.js @@ -5,7 +5,8 @@ module.exports = { const category = discussion.category.name; const body = discussion.body; if (category !== "Ideas") { - consolt.log("skipping discussion with category ${category} and body ${body}"); + console.log(`skipping discussion with category ${category} and body ${body}`); + return []; } const scannerPattern = /### Scanner\n\n(.+)/; const scannerFound = body.match(scannerPattern); diff --git a/.github/actions/trivy-triage/helpers.test.js b/.github/actions/trivy-triage/helpers.test.js index 3ef2ef810124..7db708bcfd3f 100644 --- a/.github/actions/trivy-triage/helpers.test.js +++ b/.github/actions/trivy-triage/helpers.test.js @@ -73,5 +73,15 @@ describe('trivy-triage', async function() { assert(!labels.includes('FilesystemLabel')); assert(!labels.includes('MisconfigurationLabel')); }); + it('process only relevant categories', async function() { + const discussion = { + body: 'hello world', + category: { + name: 'Announcements' + } + }; + const labels = detectDiscussionLabels(discussion, configDiscussionLabels); + assert(labels.length === 0); + }); }); }); diff --git a/.github/workflows/semantic-pr.yaml b/.github/workflows/semantic-pr.yaml index b136c9ae5ca9..4005520f1949 100644 --- a/.github/workflows/semantic-pr.yaml +++ b/.github/workflows/semantic-pr.yaml @@ -29,7 +29,6 @@ jobs: chore revert release - BREAKING scopes: | vuln diff --git a/.golangci.yaml b/.golangci.yaml index 83dae8d06e61..95c66cc69f73 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -1,4 +1,14 @@ linters-settings: + depguard: + rules: + main: + list-mode: lax + deny: + # Cannot use gomodguard, which examines go.mod, as "golang.org/x/exp/slices" is not a module and doesn't appear in go.mod. + - pkg: "golang.org/x/exp/slices" + desc: "Use 'slices' instead" + - pkg: "golang.org/x/exp/maps" + desc: "Use 'maps' or 'github.com/samber/lo' instead" dupl: threshold: 100 errcheck: @@ -74,13 +84,11 @@ linters-settings: ignore-generated-header: true testifylint: enable-all: true - disable: - - float-compare - linters: disable-all: true enable: - bodyclose + - depguard - gci - goconst - gocritic diff --git a/docs/community/contribute/pr.md b/docs/community/contribute/pr.md index 0f7cc70ec22e..0324bb6f8059 100644 --- a/docs/community/contribute/pr.md +++ b/docs/community/contribute/pr.md @@ -185,12 +185,20 @@ others: The `` can be empty (e.g. if the change is a global or difficult to assign to a single component), in which case the parentheses are omitted. +**Breaking changes** + +A PR, introducing a breaking API change, needs to append a `!` after the type/scope. + ### Example titles ``` feat(alma): add support for AlmaLinux ``` +``` +feat(vuln)!: delete the existing CLI flag +``` + ``` fix(oracle): handle advisories with ksplice versions ``` diff --git a/docs/docs/compliance/compliance.md b/docs/docs/compliance/compliance.md index 2301fc3fb279..5ff9c6ac6652 100644 --- a/docs/docs/compliance/compliance.md +++ b/docs/docs/compliance/compliance.md @@ -35,9 +35,231 @@ to specify a built-in compliance report, select it by ID like `trivy --complianc For the list of built-in compliance reports, please see the relevant section: - [Docker compliance](../target/container_image.md#compliance) -- [Kubernetes compliance](../target/kubernetes.md#compliance) +- [Kubernetes compliance](../target/kubernetes.md#compliance) - [AWS compliance](../target/aws.md#compliance) +## Contribute a Built-in Compliance Report + +### Define a Compliance spec, based on CIS benchmark or other specs + +Here is an example for CIS compliance report: + +```yaml +--- +spec: + id: k8s-cis-1.23 + title: CIS Kubernetes Benchmarks v1.23 + description: CIS Kubernetes Benchmarks + platform: k8s + type: cis + version: '1.23' + relatedResources: + - https://www.cisecurity.org/benchmark/kubernetes + controls: + - id: 1.1.1 + name: Ensure that the API server pod specification file permissions are set to + 600 or more restrictive + description: Ensure that the API server pod specification file has permissions + of 600 or more restrictive + checks: + - id: AVD-KCV-0073 + commands: + - id: CMD-0001 + severity: HIGH + +``` + +### Compliance ID + +ID field is the name used to execute the compliance scan via trivy +example: + +```sh +trivy k8s --compliance k8s-cis-1.23 +``` + +ID naming convention: {platform}-{type}-{version} + +### Compliance Platform + +The platform field specifies the type of platform on which to run this compliance report. +Supported platforms: + +- k8s (native kubernetes cluster) +- eks (elastic kubernetes service) +- aks (azure kubernetes service) +- gke (google kubernetes engine) +- rke2 (rancher kubernetes engine v2) +- ocp (OpenShift Container Platform) +- docker (docker engine) +- aws (amazon web services) + +### Compliance Type + +The type field specifies the kind compliance report. + +- cis (Center for Internet Security) +- nsa (National Security Agency) +- pss (Pod Security Standards) + +### Compliance Version + +The version field specifies the version of the compliance report. + +- 1.23 + +### Compliance Check ID + +Specify the check ID that needs to be evaluated based on the information collected from the command data output to assess the control. + +Example of how to define check data under [checks folder](https://github.com/aquasecurity/trivy-checks/tree/main/checks): + +```sh +# METADATA +# title: "Ensure that the --kubeconfig kubelet.conf file permissions are set to 600 or more restrictive" +# description: "Ensure that the kubelet.conf file has permissions of 600 or more restrictive." +# scope: package +# schemas: +# - input: schema["kubernetes"] +# related_resources: +# - https://www.cisecurity.org/benchmark/kubernetes +# custom: +# id: KCV0073 +# avd_id: AVD-KCV-0073 +# severity: HIGH +# short_code: ensure-kubelet.conf-file-permissions-600-or-more-restrictive. +# recommended_action: "Change the kubelet.conf file permissions to 600 or more restrictive if exist" +# input: +# selector: +# - type: kubernetes +package builtin.kubernetes.KCV0073 + +import data.lib.kubernetes + +types := ["master", "worker"] + +validate_kubelet_file_permission(sp) := {"kubeletConfFilePermissions": violation} { + sp.kind == "NodeInfo" + sp.type == types[_] + violation := {permission | permission = sp.info.kubeletConfFilePermissions.values[_]; permission > 600} + count(violation) > 0 +} + +deny[res] { + output := validate_kubelet_file_permission(input) + msg := "Ensure that the --kubeconfig kubelet.conf file permissions are set to 600 or more restrictive" + res := result.new(msg, output) +} +``` + +### Compliance Command ID + +***Note:*** This field is not mandatory, it is relevant to k8s compliance report when node-collector is in use + +Specify the command ID (#ref) that needs to be executed to collect the information required to evaluate the control. + +Example of how to define command data under [commands folder](https://github.com/aquasecurity/trivy-checks/tree/main/commands) + +```yaml +--- +- id: CMD-0001 + key: kubeletConfFilePermissions + title: kubelet.conf file permissions + nodeType: worker + audit: stat -c %a $kubelet.kubeconfig + platfroms: + - k8s + - aks +``` + +#### Command ID + +Find the next command ID by running the command on [trivy-checks project](https://github.com/aquasecurity/trivy-checks). + +```sh +make command-id +``` + +#### Command Key + +- Re-use an existing key or specifiy a new one (make sure key name has no spaces) + +Note: The key value should match the key name evaluated by the Rego check. + +### Command Title + +Represent the purpose of the command + +### Command NodeType + +Specify the node type on which the command is supposed to run. + +- worker +- master + +### Command Audit + +Specify here the shell command to be used please make sure to add error supression (2>/dev/null) + +### Command Platforms + +The list of platforms that support this command. Name should be taken from this list [Platforms](#compliance-platform) + +### Command Config Files + +The commands use a configuration file that helps obtain the paths to binaries and configuration files based on different platforms (e.g., Rancher, native Kubernetes, etc.). + +For example: + +```yaml +kubelet: + bins: + - kubelet + - hyperkube kubelet + confs: + - /etc/kubernetes/kubelet-config.yaml + - /var/lib/kubelet/config.yaml +``` + +### Commands Files Location + +Currently checks files location are :`https://github.com/aquasecurity/trivy-checks/tree/main/checks` + +Command files location: `https://github.com/aquasecurity/trivy-checks/tree/main/commands` +under command file + +Note: command config files will be located under `https://github.com/aquasecurity/trivy-checks/tree/main/commands` as well + +### Node-collector output + +The node collector will read commands and execute each command, and incorporate the output into the NodeInfo resource. + +example: + +```json +{ + "apiVersion": "v1", + "kind": "NodeInfo", + "metadata": { + "creationTimestamp": "2023-01-04T11:37:11+02:00" + }, + "type": "master", + "info": { + "adminConfFileOwnership": { + "values": [ + "root:root" + ] + }, + "adminConfFilePermissions": { + "values": [ + 600 + ] + } + ... + } +} +``` + ## Custom compliance You can create your own custom compliance report. A compliance report is a simple YAML document in the following format: diff --git a/docs/docs/configuration/cache.md b/docs/docs/configuration/cache.md index ff3a373c22ce..8817a2adb3ea 100644 --- a/docs/docs/configuration/cache.md +++ b/docs/docs/configuration/cache.md @@ -9,52 +9,81 @@ The cache directory includes The cache option is common to all scanners. ## Clear Caches -The `--clear-cache` option removes caches. +`trivy clean` subcommand removes caches. -**The scan is not performed.** - -``` -$ trivy image --clear-cache +```bash +$ trivy clean --scan-cache ```
Result ``` -2019-11-15T15:13:26.209+0200 INFO Reopening vulnerability DB -2019-11-15T15:13:26.209+0200 INFO Removing image caches... +2024-06-21T21:58:21+04:00 INFO Removing scan cache... ```
+If you want to delete cached vulnerability databases, use `--vuln-db`. +You can also delete all caches with `--all`. +See `trivy clean --help` for details. + ## Cache Directory Specify where the cache is stored with `--cache-dir`. -``` +```bash $ trivy --cache-dir /tmp/trivy/ image python:3.4-alpine3.9 ``` -## Cache Backend +## Scan Cache Backend !!! warning "EXPERIMENTAL" This feature might change without preserving backwards compatibility. -Trivy supports local filesystem and Redis as the cache backend. This option is useful especially for client/server mode. +Trivy utilizes a scan cache to store analysis results, such as package lists. +It supports three types of backends for this cache: -Two options: - -- `fs` - - the cache path can be specified by `--cache-dir` -- `redis://` +- Local File System (`fs`) + - The cache path can be specified by `--cache-dir` +- Memory (`memory`) +- Redis (`redis://`) - `redis://[HOST]:[PORT]` - TTL can be configured via `--cache-ttl` +### Local File System +The local file system backend is the default choice for container and VM image scans. +When scanning container images, it stores analysis results on a per-layer basis, using layer IDs as keys. +This approach enables faster scans of the same container image or different images that share layers. + +!!! note + Internally, this backend uses [BoltDB][boltdb], which has an important limitation: only one process can access the cache at a time. + Subsequent processes attempting to access the cache will be locked. + For more details on this limitation, refer to the [troubleshooting guide][parallel-run]. + +### Memory +The memory backend stores analysis results in memory, which means the cache is discarded when the process ends. +This makes it useful in scenarios where caching is not required or desired. +It serves as the default for repository, filesystem and SBOM scans and can also be employed for container image scans when caching is unnecessary. + +To use the memory backend for a container image scan, you can use the following command: + +```bash +$ trivy image debian:11 --cache-backend memory ``` + +### Redis + +The Redis backend is particularly useful when you need to share the cache across multiple Trivy instances. +You can set up Trivy to use a Redis backend with a command like this: + +```bash $ trivy server --cache-backend redis://localhost:6379 ``` +This approach allows for centralized caching, which can be beneficial in distributed or high-concurrency environments. + If you want to use TLS with Redis, you can enable it by specifying the `--redis-tls` flag. -```shell +```bash $ trivy server --cache-backend redis://localhost:6379 --redis-tls ``` @@ -71,6 +100,8 @@ $ trivy server --cache-backend redis://localhost:6379 \ [trivy-db]: ./db.md#vulnerability-database [trivy-java-db]: ./db.md#java-index-database [misconf-checks]: ../scanner/misconfiguration/check/builtin.md +[boltdb]: https://github.com/etcd-io/bbolt +[parallel-run]: https://aquasecurity.github.io/trivy/v0.52/docs/references/troubleshooting/#running-in-parallel-takes-same-time-as-series-run [^1]: Downloaded when scanning for vulnerabilities [^2]: Downloaded when scanning `jar/war/par/ear` files diff --git a/docs/docs/configuration/db.md b/docs/docs/configuration/db.md index 1479a79183a1..f6525fb61568 100644 --- a/docs/docs/configuration/db.md +++ b/docs/docs/configuration/db.md @@ -78,8 +78,10 @@ $ trivy image --java-db-repository registry.gitlab.com/gitlab-org/security-produ `java-db-registry:latest` => `java-db-registry:latest`, but `java-db-registry` => `java-db-registry:1`. ## Remove DBs -The `--reset` flag removes all caches and databases. +"trivy clean" command removes caches and databases. ``` -$ trivy image --reset +$ trivy clean --vuln-db --java-db +2024-06-24T11:42:31+06:00 INFO Removing vulnerability database... +2024-06-24T11:42:31+06:00 INFO Removing Java database... ``` \ No newline at end of file diff --git a/docs/docs/configuration/reporting.md b/docs/docs/configuration/reporting.md index b8b61d34a346..17bf0b864283 100644 --- a/docs/docs/configuration/reporting.md +++ b/docs/docs/configuration/reporting.md @@ -64,6 +64,7 @@ The following languages are currently supported: | PHP | [composer.lock][composer-lock] | | Java | [pom.xml][pom-xml] | | | [*gradle.lockfile][gradle-lockfile] | +| | [*.sbt.lock][sbt-lockfile] | | Dart | [pubspec.lock][pubspec-lock] | This tree is the reverse of the dependency graph. @@ -447,5 +448,6 @@ $ trivy convert --format table --severity CRITICAL result.json [composer-lock]: ../coverage/language/php.md#composer [pom-xml]: ../coverage/language/java.md#pomxml [gradle-lockfile]: ../coverage/language/java.md#gradlelock +[sbt-lockfile]: ../coverage/language/java.md#sbt [pubspec-lock]: ../coverage/language/dart.md#dart [cargo-binaries]: ../coverage/language/rust.md#binaries \ No newline at end of file diff --git a/docs/docs/coverage/language/c.md b/docs/docs/coverage/language/c.md index 276340a806bd..b156ccc85510 100644 --- a/docs/docs/coverage/language/c.md +++ b/docs/docs/coverage/language/c.md @@ -23,10 +23,11 @@ In order to detect dependencies, Trivy searches for `conan.lock`[^1]. ### Licenses The Conan lock file doesn't contain any license information. -To obtain licenses we parse the `conanfile.py` files from the [conan cache directory][conan-cache-dir]. +To obtain licenses we parse the `conanfile.py` files from the [conan v1 cache directory][conan-v1-cache-dir] and [conan v2 cache directory][conan-v2-cache-dir]. To correctly detection licenses, ensure that the cache directory contains all dependencies used. -[conan-cache-dir]: https://docs.conan.io/1/mastering/custom_cache.html +[conan-v1-cache-dir]: https://docs.conan.io/1/mastering/custom_cache.html +[conan-v2-cache-dir]: https://docs.conan.io/2/reference/environment.html#conan-home [dependency-graph]: ../../configuration/reporting.md#show-origins-of-vulnerable-dependencies [^1]: The local cache should contain the dependencies used. See [licenses](#licenses). diff --git a/docs/docs/coverage/language/golang.md b/docs/docs/coverage/language/golang.md index 3d57edade7ec..6b3646329318 100644 --- a/docs/docs/coverage/language/golang.md +++ b/docs/docs/coverage/language/golang.md @@ -66,7 +66,7 @@ such as `go mod download`, `go mod tidy`, etc. Trivy traverses `$GOPATH/pkg/mod` and collects those extra information. ### Go binaries -Trivy scans binaries built by Go. +Trivy scans binaries built by Go, which include [module information](https://tip.golang.org/doc/go1.18#go-version). If there is a Go binary in your container image, Trivy automatically finds and scans it. Also, you can scan your local binaries. diff --git a/docs/docs/coverage/language/index.md b/docs/docs/coverage/language/index.md index eb694bbcc228..df8203f93691 100644 --- a/docs/docs/coverage/language/index.md +++ b/docs/docs/coverage/language/index.md @@ -26,7 +26,8 @@ On the other hand, when the target is a post-build artifact, like a container im | | egg package[^1] | ✅ | ✅ | - | - | | | wheel package[^2] | ✅ | ✅ | - | - | | | conda package[^3] | ✅ | ✅ | - | - | -| [PHP](php.md) | composer.lock | ✅ | ✅ | ✅ | ✅ | +| [PHP](php.md) | composer.lock | - | - | ✅ | ✅ | +| | installed.json | ✅ | ✅ | - | - | | [Node.js](nodejs.md) | package-lock.json | - | - | ✅ | ✅ | | | yarn.lock | - | - | ✅ | ✅ | | | pnpm-lock.yaml | - | - | ✅ | ✅ | @@ -38,6 +39,7 @@ On the other hand, when the target is a post-build artifact, like a container im | [Java](java.md) | JAR/WAR/PAR/EAR[^4] | ✅ | ✅ | - | - | | | pom.xml | - | - | ✅ | ✅ | | | *gradle.lockfile | - | - | ✅ | ✅ | +| | *.sbt.lock | - | - | ✅ | ✅ | | [Go](golang.md) | Binaries built by Go | ✅ | ✅ | - | - | | | go.mod | - | - | ✅ | ✅ | | [Rust](rust.md) | Cargo.lock | ✅ | ✅ | ✅ | ✅ | diff --git a/docs/docs/coverage/language/java.md b/docs/docs/coverage/language/java.md index 87db939ea288..bb90366c1772 100644 --- a/docs/docs/coverage/language/java.md +++ b/docs/docs/coverage/language/java.md @@ -1,5 +1,5 @@ # Java -Trivy supports three types of Java scanning: `JAR/WAR/PAR/EAR`, `pom.xml` and `*gradle.lockfile` files. +Trivy supports four types of Java scanning: `JAR/WAR/PAR/EAR`, `pom.xml`, `*gradle.lockfile` and `*.sbt.lock` files. Each artifact supports the following scanners: @@ -8,6 +8,7 @@ Each artifact supports the following scanners: | JAR/WAR/PAR/EAR | ✓ | ✓ | - | | pom.xml | ✓ | ✓ | ✓ | | *gradle.lockfile | ✓ | ✓ | ✓ | +| *.sbt.lock | ✓ | ✓ | - | The following table provides an outline of the features Trivy offers. @@ -16,6 +17,7 @@ The following table provides an outline of the features Trivy offers. | JAR/WAR/PAR/EAR | Trivy Java DB | Include | - | - | | pom.xml | Maven repository [^1] | Exclude | ✓ | ✓[^7] | | *gradle.lockfile | - | Exclude | ✓ | ✓ | +| *.sbt.lock | - | Exclude | - | ✓ | These may be enabled or disabled depending on the target. See [here](./index.md) for the detail. @@ -94,6 +96,15 @@ Trity also can detect licenses for dependencies. Make sure that you have cache[^8] directory to find licenses from `*.pom` dependency files. + +## SBT + +`build.sbt.lock` files only contain information about used dependencies. This requires a lockfile generated using the +[sbt-dependency-lock][sbt-dependency-lock] plugin. + +!!!note + All necessary files are checked locally. SBT file scanning doesn't require internet access. + [^1]: Uses maven repository to get information about dependencies. Internet access required. [^2]: It means `*.jar`, `*.war`, `*.par` and `*.ear` file [^3]: `ArtifactID`, `GroupID` and `Version` @@ -106,4 +117,5 @@ Make sure that you have cache[^8] directory to find licenses from `*.pom` depend [dependency-graph]: ../../configuration/reporting.md#show-origins-of-vulnerable-dependencies [maven-invoker-plugin]: https://maven.apache.org/plugins/maven-invoker-plugin/usage.html [maven-central]: https://repo.maven.apache.org/maven2/ -[maven-pom-repos]: https://maven.apache.org/settings.html#repositories \ No newline at end of file +[maven-pom-repos]: https://maven.apache.org/settings.html#repositories +[sbt-dependency-lock]: https://stringbean.github.io/sbt-dependency-lock diff --git a/docs/docs/coverage/language/php.md b/docs/docs/coverage/language/php.md index 6fa138c35290..9fe38bf4990d 100644 --- a/docs/docs/coverage/language/php.md +++ b/docs/docs/coverage/language/php.md @@ -4,23 +4,27 @@ Trivy supports [Composer][composer], which is a tool for dependency management i The following scanners are supported. -| Package manager | SBOM | Vulnerability | License | -| --------------- | :---: | :-----------: | :-----: | -| Composer | ✓ | ✓ | ✓ | +| Package manager | SBOM | Vulnerability | License | +|-----------------|:----:|:-------------:|:-------:| +| Composer | ✓ | ✓ | ✓ | The following table provides an outline of the features Trivy offers. -| Package manager | File | Transitive dependencies | Dev dependencies | [Dependency graph][dependency-graph] | Position | -|-----------------|---------------|:-----------------------:|:----------------:|:------------------------------------:|:--------:| -| Composer | composer.lock | ✓ | Excluded | ✓ | ✓ | +| Package manager | File | Transitive dependencies | Dev dependencies | [Dependency graph][dependency-graph] | Position | +|-----------------|----------------|:-----------------------:|:----------------:|:------------------------------------:|:--------:| +| Composer | composer.lock | ✓ | Excluded | ✓ | ✓ | +| Composer | installed.json | ✓ | Excluded | - | ✓ | -## Composer +## composer.lock In order to detect dependencies, Trivy searches for `composer.lock`. Trivy also supports dependency trees; however, to display an accurate tree, it needs to know whether each package is a direct dependency of the project. Since this information is not included in `composer.lock`, Trivy parses `composer.json`, which should be located next to `composer.lock`. If you want to see the dependency tree, please ensure that `composer.json` is present. +## installed.json +Trivy also supports dependency detection for `installed.json` files. By default, you can find this file at `path_to_app/vendor/composer/installed.json`. + [composer]: https://getcomposer.org/ [dependency-graph]: ../../configuration/reporting.md#show-origins-of-vulnerable-dependencies \ No newline at end of file diff --git a/docs/docs/references/configuration/cli/trivy.md b/docs/docs/references/configuration/cli/trivy.md index f3c543a210f9..2992bc0faa9b 100644 --- a/docs/docs/references/configuration/cli/trivy.md +++ b/docs/docs/references/configuration/cli/trivy.md @@ -43,7 +43,7 @@ trivy [global flags] command [flags] target ### SEE ALSO -* [trivy aws](trivy_aws.md) - [EXPERIMENTAL] Scan AWS account +* [trivy clean](trivy_clean.md) - Remove cached files * [trivy config](trivy_config.md) - Scan config files for misconfigurations * [trivy convert](trivy_convert.md) - Convert Trivy JSON report into a different format * [trivy filesystem](trivy_filesystem.md) - Scan local filesystem diff --git a/docs/docs/references/configuration/cli/trivy_aws.md b/docs/docs/references/configuration/cli/trivy_aws.md deleted file mode 100644 index fad5d106bc16..000000000000 --- a/docs/docs/references/configuration/cli/trivy_aws.md +++ /dev/null @@ -1,127 +0,0 @@ -## trivy aws - -[EXPERIMENTAL] Scan AWS account - -### Synopsis - -Scan an AWS account for misconfigurations. Trivy uses the same authentication methods as the AWS CLI. See https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html - -The following services are supported: - -- accessanalyzer -- api-gateway -- athena -- cloudfront -- cloudtrail -- cloudwatch -- codebuild -- documentdb -- dynamodb -- ec2 -- ecr -- ecs -- efs -- eks -- elasticache -- elasticsearch -- elb -- emr -- iam -- kinesis -- kms -- lambda -- mq -- msk -- neptune -- rds -- redshift -- s3 -- sns -- sqs -- ssm -- workspaces - - -``` -trivy aws [flags] -``` - -### Examples - -``` - # basic scanning - $ trivy aws --region us-east-1 - - # limit scan to a single service: - $ trivy aws --region us-east-1 --service s3 - - # limit scan to multiple services: - $ trivy aws --region us-east-1 --service s3 --service ec2 - - # force refresh of cache for fresh results - $ trivy aws --region us-east-1 --update-cache - -``` - -### Options - -``` - --account string The AWS account to scan. It's useful to specify this when reviewing cached results for multiple accounts. - --arn string The AWS ARN to show results for. Useful to filter results once a scan is cached. - --cf-params strings specify paths to override the CloudFormation parameters files - --check-namespaces strings Rego namespaces - --checks-bundle-repository string OCI registry URL to retrieve checks bundle from (default "ghcr.io/aquasecurity/trivy-checks:0") - --compliance string compliance report to generate (aws-cis-1.2,aws-cis-1.4) - --config-check strings specify the paths to the Rego check files or to the directories containing them, applying config files - --config-data strings specify paths from which data for the Rego checks will be recursively loaded - --dependency-tree [EXPERIMENTAL] show dependency origin tree of vulnerable packages - --endpoint string AWS Endpoint override - --exit-code int specify exit code when any security issues are found - -f, --format string format (table,json,template,sarif,cyclonedx,spdx,spdx-json,github,cosign-vuln) (default "table") - --helm-api-versions strings Available API versions used for Capabilities.APIVersions. This flag is the same as the api-versions flag of the helm template command. (can specify multiple or separate values with commas: policy/v1/PodDisruptionBudget,apps/v1/Deployment) - --helm-kube-version string Kubernetes version used for Capabilities.KubeVersion. This flag is the same as the kube-version flag of the helm template command. - --helm-set strings specify Helm values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) - --helm-set-file strings specify Helm values from respective files specified via the command line (can specify multiple or separate values with commas: key1=path1,key2=path2) - --helm-set-string strings specify Helm string values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) - --helm-values strings specify paths to override the Helm values.yaml files - -h, --help help for aws - --ignore-policy string specify the Rego file path to evaluate each vulnerability - --ignorefile string specify .trivyignore file (default ".trivyignore") - --include-deprecated-checks include deprecated checks - --include-non-failures include successes and exceptions, available with '--scanners misconfig' - --list-all-pkgs output all packages in the JSON report regardless of vulnerability - --max-cache-age duration The maximum age of the cloud cache. Cached data will be required from the cloud provider if it is older than this. (default 24h0m0s) - --misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot]) - -o, --output string output file name - --output-plugin-arg string [EXPERIMENTAL] output plugin arguments - --region string AWS Region to scan - --report string specify a report format for the output (all,summary) (default "all") - --reset-checks-bundle remove checks bundle - --service strings Only scan AWS Service(s) specified with this flag. Can specify multiple services using --service A --service B etc. - -s, --severity strings severities of security issues to be displayed (UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL) (default [UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL]) - --skip-check-update skip fetching rego check updates - --skip-service strings Skip selected AWS Service(s) specified with this flag. Can specify multiple services using --skip-service A --skip-service B etc. - -t, --template string output template - --tf-exclude-downloaded-modules exclude misconfigurations for downloaded terraform modules - --tf-vars strings specify paths to override the Terraform tfvars files - --trace enable more verbose trace output for custom queries - --update-cache Update the cache for the applicable cloud provider instead of using cached results. -``` - -### Options inherited from parent commands - -``` - --cache-dir string cache directory (default "/path/to/cache") - -c, --config string config path (default "trivy.yaml") - -d, --debug debug mode - --generate-default-config write the default config to trivy-default.yaml - --insecure allow insecure server connections - -q, --quiet suppress progress bar and log output - --timeout duration timeout (default 5m0s) - -v, --version show version -``` - -### SEE ALSO - -* [trivy](trivy.md) - Unified security scanner - diff --git a/docs/docs/references/configuration/cli/trivy_clean.md b/docs/docs/references/configuration/cli/trivy_clean.md new file mode 100644 index 000000000000..7a997bf7b581 --- /dev/null +++ b/docs/docs/references/configuration/cli/trivy_clean.md @@ -0,0 +1,50 @@ +## trivy clean + +Remove cached files + +``` +trivy clean [flags] +``` + +### Examples + +``` + # Remove all caches + $ trivy clean --all + + # Remove scan cache + $ trivy clean --scan-cache + + # Remove vulnerability database + $ trivy clean --vuln-db + +``` + +### Options + +``` + -a, --all remove all caches + --checks-bundle remove checks bundle + -h, --help help for clean + --java-db remove Java database + --scan-cache remove scan cache (container and VM image analysis results) + --vuln-db remove vulnerability database +``` + +### Options inherited from parent commands + +``` + --cache-dir string cache directory (default "/path/to/cache") + -c, --config string config path (default "trivy.yaml") + -d, --debug debug mode + --generate-default-config write the default config to trivy-default.yaml + --insecure allow insecure server connections + -q, --quiet suppress progress bar and log output + --timeout duration timeout (default 5m0s) + -v, --version show version +``` + +### SEE ALSO + +* [trivy](trivy.md) - Unified security scanner + diff --git a/docs/docs/references/configuration/cli/trivy_config.md b/docs/docs/references/configuration/cli/trivy_config.md index 993570f1587b..0176c09ea58f 100644 --- a/docs/docs/references/configuration/cli/trivy_config.md +++ b/docs/docs/references/configuration/cli/trivy_config.md @@ -9,12 +9,11 @@ trivy config [flags] DIR ### Options ``` - --cache-backend string cache backend (e.g. redis://localhost:6379) (default "fs") + --cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "memory") --cache-ttl duration cache TTL when using redis as cache backend --cf-params strings specify paths to override the CloudFormation parameters files --check-namespaces strings Rego namespaces --checks-bundle-repository string OCI registry URL to retrieve checks bundle from (default "ghcr.io/aquasecurity/trivy-checks:0") - --clear-cache clear image caches without scanning --compliance string compliance report to generate --config-check strings specify the paths to the Rego check files or to the directories containing them, applying config files --config-data strings specify paths from which data for the Rego checks will be recursively loaded @@ -45,7 +44,6 @@ trivy config [flags] DIR --redis-tls enable redis TLS with public certificates, if using redis as cache backend --registry-token string registry token --report string specify a compliance report format for the output (all,summary) (default "all") - --reset-checks-bundle remove checks bundle -s, --severity strings severities of security issues to be displayed (UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL) (default [UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL]) --skip-check-update skip fetching rego check updates --skip-dirs strings specify the directories or glob patterns to skip diff --git a/docs/docs/references/configuration/cli/trivy_filesystem.md b/docs/docs/references/configuration/cli/trivy_filesystem.md index 4d90f4e87e63..ae88ed8ca83b 100644 --- a/docs/docs/references/configuration/cli/trivy_filesystem.md +++ b/docs/docs/references/configuration/cli/trivy_filesystem.md @@ -19,12 +19,11 @@ trivy filesystem [flags] PATH ### Options ``` - --cache-backend string cache backend (e.g. redis://localhost:6379) (default "fs") + --cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "memory") --cache-ttl duration cache TTL when using redis as cache backend --cf-params strings specify paths to override the CloudFormation parameters files --check-namespaces strings Rego namespaces --checks-bundle-repository string OCI registry URL to retrieve checks bundle from (default "ghcr.io/aquasecurity/trivy-checks:0") - --clear-cache clear image caches without scanning --compliance string compliance report to generate --config-check strings specify the paths to the Rego check files or to the directories containing them, applying config files --config-data strings specify paths from which data for the Rego checks will be recursively loaded @@ -71,8 +70,6 @@ trivy filesystem [flags] PATH --registry-token string registry token --rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev") --report string specify a compliance report format for the output (all,summary) (default "all") - --reset remove all caches and database - --reset-checks-bundle remove checks bundle --sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (oci,rekor) --scanners strings comma-separated list of what security issues to detect (vuln,misconfig,secret,license) (default [vuln,secret]) --secret-config string specify a path to config file for secret scanning (default "trivy-secret.yaml") diff --git a/docs/docs/references/configuration/cli/trivy_image.md b/docs/docs/references/configuration/cli/trivy_image.md index c61c6b648d7c..8c3fe309f929 100644 --- a/docs/docs/references/configuration/cli/trivy_image.md +++ b/docs/docs/references/configuration/cli/trivy_image.md @@ -34,12 +34,11 @@ trivy image [flags] IMAGE_NAME ### Options ``` - --cache-backend string cache backend (e.g. redis://localhost:6379) (default "fs") + --cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "fs") --cache-ttl duration cache TTL when using redis as cache backend --check-namespaces strings Rego namespaces --checks-bundle-repository string OCI registry URL to retrieve checks bundle from (default "ghcr.io/aquasecurity/trivy-checks:0") - --clear-cache clear image caches without scanning - --compliance string compliance report to generate (docker-cis) + --compliance string compliance report to generate (docker-cis-1.6.0) --config-check strings specify the paths to the Rego check files or to the directories containing them, applying config files --config-data strings specify paths from which data for the Rego checks will be recursively loaded --custom-headers strings custom headers in client mode @@ -92,8 +91,6 @@ trivy image [flags] IMAGE_NAME --rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev") --removed-pkgs detect vulnerabilities of removed packages (only for Alpine) --report string specify a format for the compliance report. (all,summary) (default "summary") - --reset remove all caches and database - --reset-checks-bundle remove checks bundle --sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (oci,rekor) --scanners strings comma-separated list of what security issues to detect (vuln,misconfig,secret,license) (default [vuln,secret]) --secret-config string specify a path to config file for secret scanning (default "trivy-secret.yaml") diff --git a/docs/docs/references/configuration/cli/trivy_kubernetes.md b/docs/docs/references/configuration/cli/trivy_kubernetes.md index 54dc2db07f75..201eee466765 100644 --- a/docs/docs/references/configuration/cli/trivy_kubernetes.md +++ b/docs/docs/references/configuration/cli/trivy_kubernetes.md @@ -30,12 +30,11 @@ trivy kubernetes [flags] [CONTEXT] ``` --burst int specify the maximum burst for throttle (default 10) - --cache-backend string cache backend (e.g. redis://localhost:6379) (default "fs") + --cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "fs") --cache-ttl duration cache TTL when using redis as cache backend --check-namespaces strings Rego namespaces --checks-bundle-repository string OCI registry URL to retrieve checks bundle from (default "ghcr.io/aquasecurity/trivy-checks:0") - --clear-cache clear image caches without scanning - --compliance string compliance report to generate (k8s-nsa,k8s-cis,k8s-pss-baseline,k8s-pss-restricted) + --compliance string compliance report to generate (k8s-nsa-1.0,k8s-cis-1.23,eks-cis-1.4,rke2-cis-1.24,k8s-pss-baseline-0.1,k8s-pss-restricted-0.1) --config-check strings specify the paths to the Rego check files or to the directories containing them, applying config files --config-data strings specify paths from which data for the Rego checks will be recursively loaded --db-repository string OCI repository to retrieve trivy-db from (default "ghcr.io/aquasecurity/trivy-db:2") @@ -72,7 +71,7 @@ trivy kubernetes [flags] [CONTEXT] --list-all-pkgs output all packages in the JSON report regardless of vulnerability --misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot]) --no-progress suppress progress bar - --node-collector-imageref string indicate the image reference for the node-collector scan job (default "ghcr.io/aquasecurity/node-collector:0.2.1") + --node-collector-imageref string indicate the image reference for the node-collector scan job (default "ghcr.io/aquasecurity/node-collector:0.3.1") --node-collector-namespace string specify the namespace in which the node-collector job should be deployed (default "trivy-temp") --offline-scan do not issue API requests to identify dependencies -o, --output string output file name @@ -87,8 +86,6 @@ trivy kubernetes [flags] [CONTEXT] --registry-token string registry token --rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev") --report string specify a report format for the output (all,summary) (default "all") - --reset remove all caches and database - --reset-checks-bundle remove checks bundle --sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (oci,rekor) --scanners strings comma-separated list of what security issues to detect (vuln,misconfig,secret,rbac) (default [vuln,misconfig,secret,rbac]) --secret-config string specify a path to config file for secret scanning (default "trivy-secret.yaml") diff --git a/docs/docs/references/configuration/cli/trivy_repository.md b/docs/docs/references/configuration/cli/trivy_repository.md index 4f73c0d4e13d..cf85082bee7f 100644 --- a/docs/docs/references/configuration/cli/trivy_repository.md +++ b/docs/docs/references/configuration/cli/trivy_repository.md @@ -19,12 +19,11 @@ trivy repository [flags] (REPO_PATH | REPO_URL) ``` --branch string pass the branch name to be scanned - --cache-backend string cache backend (e.g. redis://localhost:6379) (default "fs") + --cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "memory") --cache-ttl duration cache TTL when using redis as cache backend --cf-params strings specify paths to override the CloudFormation parameters files --check-namespaces strings Rego namespaces --checks-bundle-repository string OCI registry URL to retrieve checks bundle from (default "ghcr.io/aquasecurity/trivy-checks:0") - --clear-cache clear image caches without scanning --commit string pass the commit hash to be scanned --config-check strings specify the paths to the Rego check files or to the directories containing them, applying config files --config-data strings specify paths from which data for the Rego checks will be recursively loaded @@ -70,8 +69,6 @@ trivy repository [flags] (REPO_PATH | REPO_URL) --redis-tls enable redis TLS with public certificates, if using redis as cache backend --registry-token string registry token --rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev") - --reset remove all caches and database - --reset-checks-bundle remove checks bundle --sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (oci,rekor) --scanners strings comma-separated list of what security issues to detect (vuln,misconfig,secret,license) (default [vuln,secret]) --secret-config string specify a path to config file for secret scanning (default "trivy-secret.yaml") diff --git a/docs/docs/references/configuration/cli/trivy_rootfs.md b/docs/docs/references/configuration/cli/trivy_rootfs.md index 6f264b5c2362..6ab7705ff633 100644 --- a/docs/docs/references/configuration/cli/trivy_rootfs.md +++ b/docs/docs/references/configuration/cli/trivy_rootfs.md @@ -22,12 +22,11 @@ trivy rootfs [flags] ROOTDIR ### Options ``` - --cache-backend string cache backend (e.g. redis://localhost:6379) (default "fs") + --cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "memory") --cache-ttl duration cache TTL when using redis as cache backend --cf-params strings specify paths to override the CloudFormation parameters files --check-namespaces strings Rego namespaces --checks-bundle-repository string OCI registry URL to retrieve checks bundle from (default "ghcr.io/aquasecurity/trivy-checks:0") - --clear-cache clear image caches without scanning --config-check strings specify the paths to the Rego check files or to the directories containing them, applying config files --config-data strings specify paths from which data for the Rego checks will be recursively loaded --custom-headers strings custom headers in client mode @@ -72,8 +71,6 @@ trivy rootfs [flags] ROOTDIR --redis-tls enable redis TLS with public certificates, if using redis as cache backend --registry-token string registry token --rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev") - --reset remove all caches and database - --reset-checks-bundle remove checks bundle --sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (oci,rekor) --scanners strings comma-separated list of what security issues to detect (vuln,misconfig,secret,license) (default [vuln,secret]) --secret-config string specify a path to config file for secret scanning (default "trivy-secret.yaml") diff --git a/docs/docs/references/configuration/cli/trivy_sbom.md b/docs/docs/references/configuration/cli/trivy_sbom.md index d5c4030d7281..2adbd12a253a 100644 --- a/docs/docs/references/configuration/cli/trivy_sbom.md +++ b/docs/docs/references/configuration/cli/trivy_sbom.md @@ -20,9 +20,8 @@ trivy sbom [flags] SBOM_PATH ### Options ``` - --cache-backend string cache backend (e.g. redis://localhost:6379) (default "fs") + --cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "memory") --cache-ttl duration cache TTL when using redis as cache backend - --clear-cache clear image caches without scanning --compliance string compliance report to generate --custom-headers strings custom headers in client mode --db-repository string OCI repository to retrieve trivy-db from (default "ghcr.io/aquasecurity/trivy-db:2") @@ -49,7 +48,6 @@ trivy sbom [flags] SBOM_PATH --redis-key string redis key file location, if using redis as cache backend --redis-tls enable redis TLS with public certificates, if using redis as cache backend --rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev") - --reset remove all caches and database --sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (oci,rekor) --scanners strings comma-separated list of what security issues to detect (vuln,license) (default [vuln]) --server string server address in client mode diff --git a/docs/docs/references/configuration/cli/trivy_server.md b/docs/docs/references/configuration/cli/trivy_server.md index d888034c34bf..4291496e34f1 100644 --- a/docs/docs/references/configuration/cli/trivy_server.md +++ b/docs/docs/references/configuration/cli/trivy_server.md @@ -20,9 +20,8 @@ trivy server [flags] ### Options ``` - --cache-backend string cache backend (e.g. redis://localhost:6379) (default "fs") + --cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "fs") --cache-ttl duration cache TTL when using redis as cache backend - --clear-cache clear image caches without scanning --db-repository string OCI repository to retrieve trivy-db from (default "ghcr.io/aquasecurity/trivy-db:2") --download-db-only download/update vulnerability database but don't run a scan --enable-modules strings [EXPERIMENTAL] module names to enable @@ -36,7 +35,6 @@ trivy server [flags] --redis-key string redis key file location, if using redis as cache backend --redis-tls enable redis TLS with public certificates, if using redis as cache backend --registry-token string registry token - --reset remove all caches and database --skip-db-update skip updating vulnerability database --token string for authentication in client/server mode --token-header string specify a header name for token in client/server mode (default "Trivy-Token") diff --git a/docs/docs/references/configuration/cli/trivy_vm.md b/docs/docs/references/configuration/cli/trivy_vm.md index 51b7ad43cf38..5ad96c87b0df 100644 --- a/docs/docs/references/configuration/cli/trivy_vm.md +++ b/docs/docs/references/configuration/cli/trivy_vm.md @@ -21,10 +21,9 @@ trivy vm [flags] VM_IMAGE ``` --aws-region string AWS region to scan - --cache-backend string cache backend (e.g. redis://localhost:6379) (default "fs") + --cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "fs") --cache-ttl duration cache TTL when using redis as cache backend --checks-bundle-repository string OCI registry URL to retrieve checks bundle from (default "ghcr.io/aquasecurity/trivy-checks:0") - --clear-cache clear image caches without scanning --compliance string compliance report to generate --custom-headers strings custom headers in client mode --db-repository string OCI repository to retrieve trivy-db from (default "ghcr.io/aquasecurity/trivy-db:2") @@ -62,8 +61,6 @@ trivy vm [flags] VM_IMAGE --redis-key string redis key file location, if using redis as cache backend --redis-tls enable redis TLS with public certificates, if using redis as cache backend --rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev") - --reset remove all caches and database - --reset-checks-bundle remove checks bundle --sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (oci,rekor) --scanners strings comma-separated list of what security issues to detect (vuln,misconfig,secret,license) (default [vuln,secret]) --secret-config string specify a path to config file for secret scanning (default "trivy-secret.yaml") diff --git a/docs/docs/references/troubleshooting.md b/docs/docs/references/troubleshooting.md index 7d2c3258aa2c..d271882c5ecb 100644 --- a/docs/docs/references/troubleshooting.md +++ b/docs/docs/references/troubleshooting.md @@ -264,10 +264,10 @@ $ brew install aquasecurity/trivy/trivy ## Others ### Unknown error -Try again with `--reset` option: +Try again after running `trivy clean --all`: ``` -$ trivy image --reset +$ trivy clean --all ``` [air-gapped]: ../advanced/air-gap.md diff --git a/docs/docs/scanner/vulnerability.md b/docs/docs/scanner/vulnerability.md index ee76a8e6844c..55403dda2207 100644 --- a/docs/docs/scanner/vulnerability.md +++ b/docs/docs/scanner/vulnerability.md @@ -1,13 +1,12 @@ # Vulnerability Scanning -Trivy detects known vulnerabilities according to the versions of installed packages. +Trivy detects known vulnerabilities in software components that it finds in the scan target. -The following packages are supported. +The following are supported: - [OS packages](#os-packages) - [Language-specific packages](#language-specific-packages) -- [Kubernetes components (control plane, node and addons)](#kubernetes) - -Trivy also detects known vulnerabilities in Kubernetes components using KBOM (Kubernetes bill of Material) scanning. To learn more, see the [documentation for Kubernetes scanning](../target/kubernetes.md#KBOM). +- [Non-packaged software](#non-packaged-software) +- [Kubernetes components](#kubernetes) ## OS Packages Trivy is capable of automatically detecting installed OS packages when scanning container images, VM images and running hosts. @@ -66,7 +65,44 @@ If the data source does not provide a severity, the severity is determined based | 7.0-8.9 | High | | 9.0-10.0 | Critical | -If the CVSS score is also not provided, it falls back to [NVD][nvd], and if NVD does not have severity, it will be UNKNOWN. +If the CVSS score is also not provided, it falls back to [NVD][nvd]. + +NVD and some vendors may delay severity analysis, while other vendors, such as Red Hat, are able to quickly evaluate and announce the severity of vulnerabilities. +To avoid marking too many vulnerabilities as "UNKNOWN" severity, Trivy uses severity ratings from other vendors when the NVD information is not yet available. +The order of preference for vendor severity data can be found [here](https://github.com/aquasecurity/trivy-db/blob/79d0fbd1e246f3c77eef4b9826fe4bf65940b221/pkg/vulnsrc/vulnerability/vulnerability.go#L17-L19). + +You can reference `SeveritySource` in the [JSON reporting format](../configuration/reporting.md#json) to see from where the severity is taken for a given vulnerability. + +```shell +"SeveritySource": "debian", +``` + + +In addition, you can see all the vendor severity ratings. + +```json +"VendorSeverity": { + "amazon": 2, + "cbl-mariner": 4, + "ghsa": 4, + "nvd": 4, + "photon": 4, + "redhat": 2, + "ubuntu": 2 +} +``` + +Here is the severity mapping in Trivy: + +| Number | Severity | +|:------:|----------| +| 0 | Unknown | +| 1 | Low | +| 2 | Medium | +| 3 | High | +| 4 | Critical | + +If no vendor has a severity, the `UNKNOWN` severity will be used. ### Unfixed Vulnerabilities The unfixed/unfixable vulnerabilities mean that the patch has not yet been provided on their distribution. @@ -101,9 +137,18 @@ See [here](../coverage/language/index.md#supported-languages) for the supported [^1]: Intentional delay between vulnerability disclosure and registration in the DB +## Non-packaged software + +If you have software that is not managed by a package manager, Trivy can still detect vulnerabilities in it in some cases: + +- [Using SBOM from Sigstore Rekor](../supply-chain/attestation/rekor/#non-packaged-binaries) +- [Go Binaries with embedded module information](../coverage/language/golang/#go-binaries) +- [Rust Binaries with embedded information](../coverage/language/rust/#binaries) +- [SBOM embedded in container images](../supply-chain/container-image/#sbom-embedded-in-container-images) + ## Kubernetes -Trivy can detect vulnerabilities in Kubernetes clusters and components. +Trivy can detect vulnerabilities in Kubernetes clusters and components by scanning a Kubernetes Cluster, or a KBOM (Kubernetes bill of Material). To learn more, see the [documentation for Kubernetes scanning](../target/kubernetes.md). ### Data Sources diff --git a/docs/docs/supply-chain/sbom.md b/docs/docs/supply-chain/sbom.md index cb3a68c9d8f3..ed57195b3550 100644 --- a/docs/docs/supply-chain/sbom.md +++ b/docs/docs/supply-chain/sbom.md @@ -731,17 +731,20 @@ $ cat result.spdx.json | jq . ## Scanning -Trivy can take SBOM documents as input for scanning. + +### SBOM as Target +Trivy can take SBOM documents as input for scanning, e.g `trivy sbom ./sbom.spdx`. See [here](../target/sbom.md) for more details. -Also, Trivy searches for SBOM files in container images. +### SBOM Detection inside Targets +Trivy searches for SBOM files in container images with the following extensions: +- `.spdx` +- `.spdx.json` +- `.cdx` +- `.cdx.json` -```bash -$ trivy image bitnami/elasticsearch:8.7.1 -``` +In addition, Trivy automatically detects SBOM files in [Bitnami images](https://github.com/bitnami/containers), [see here](../coverage/os/bitnami.md) for more details. -For example, [Bitnami images](https://github.com/bitnami/containers) contain SBOM files in `/opt/bitnami` directory. -Trivy automatically detects the SBOM files and uses them for scanning. It is enabled in the following targets. | Target | Enabled | @@ -755,6 +758,9 @@ It is enabled in the following targets. | AWS | | | SBOM | | +### SBOM Discovery for Container Images + +When scanning container images, Trivy can discover SBOM for those images. [See here](../target/container_image.md) for more details. [spdx]: https://spdx.dev/wp-content/uploads/sites/41/2020/08/SPDX-specification-2-2.pdf diff --git a/docs/docs/target/container_image.md b/docs/docs/target/container_image.md index 948cf9678283..94acc954a743 100644 --- a/docs/docs/target/container_image.md +++ b/docs/docs/target/container_image.md @@ -436,14 +436,14 @@ The following reports are available out of the box: | Compliance | Version | Name for command | More info | |----------------------------------------|---------|------------------|---------------------------------------------------------------------------------------------| -| CIS Docker Community Edition Benchmark | 1.1.0 | `docker-cis` | [Link](https://www.aquasec.com/cloud-native-academy/docker-container/docker-cis-benchmark/) | +| CIS Docker Community Edition Benchmark | 1.1.0 | `docker-cis-1.6.0` | [Link](https://www.aquasec.com/cloud-native-academy/docker-container/docker-cis-benchmark/) | ### Examples Scan a container image configuration and generate a compliance summary report: ``` -$ trivy image --compliance docker-cis [YOUR_IMAGE_NAME] +trivy image --compliance docker-cis-1.6.0 [YOUR_IMAGE_NAME] ``` !!! note diff --git a/docs/docs/target/kubernetes.md b/docs/docs/target/kubernetes.md index a92057dc5341..10253aff4c72 100644 --- a/docs/docs/target/kubernetes.md +++ b/docs/docs/target/kubernetes.md @@ -355,12 +355,14 @@ For an overview of Trivy's Compliance feature, including working with custom com The following reports are available out of the box: -| Compliance | Name for command | More info | -|----------------------------------------------|----------------------|---------------------------------------------------------------------------------------------------------------------| -| NSA, CISA Kubernetes Hardening Guidance v1.2 | `k8s-nsa` | [Link](https://media.defense.gov/2022/Aug/29/2003066362/-1/-1/0/CTR_KUBERNETES_HARDENING_GUIDANCE_1.2_20220829.PDF) | -| CIS Benchmark for Kubernetes v1.23 | `k8s-cis` | [Link](https://www.cisecurity.org/benchmark/kubernetes) | -| Pod Security Standards, Baseline | `k8s-pss-baseline` | [Link](https://kubernetes.io/docs/concepts/security/pod-security-standards/#baseline) | -| Pod Security Standards, Restricted | `k8s-pss-restricted` | [Link](https://kubernetes.io/docs/concepts/security/pod-security-standards/#restricted) | +| Compliance | Name for command | More info | +|----------------------------------------------|--------------------------|---------------------------------------------------------------------------------------------------------------------| +| NSA, CISA Kubernetes Hardening Guidance v1.0 | `k8s-nsa-1.0` | [Link](https://media.defense.gov/2022/Aug/29/2003066362/-1/-1/0/CTR_KUBERNETES_HARDENING_GUIDANCE_1.2_20220829.PDF) | +| CIS Benchmark for Kubernetes v1.23 | `k8s-cis-1.23` | [Link](https://www.cisecurity.org/benchmark/kubernetes) | +| CIS Benchmark for RKE2 v1.24 | `rke2-cis-1.24` | [Link](https://www.cisecurity.org/benchmark/kubernetes) | +| CIS Benchmark for EKS v1.4 | `eks-cis-1.4` | [Link](https://www.cisecurity.org/benchmark/kubernetes) | +| Pod Security Standards, Baseline | `k8s-pss-baseline-0.1` | [Link](https://kubernetes.io/docs/concepts/security/pod-security-standards/#baseline) | +| Pod Security Standards, Restricted | `k8s-pss-restricted-0.1` | [Link](https://kubernetes.io/docs/concepts/security/pod-security-standards/#restricted) | Examples: @@ -376,7 +378,7 @@ Get the detailed report for checks: ``` -trivy k8s --compliance=k8s-cis --report all +trivy k8s --compliance=k8s-cis-1.23 --report all ``` @@ -384,7 +386,7 @@ Get summary report in JSON format: ``` -trivy k8s --compliance=k8s-cis --report summary --format json +trivy k8s --compliance=k8s-cis-1.23 --report summary --format json ``` @@ -392,7 +394,7 @@ Get detailed report in JSON format: ``` -trivy k8s --compliance=k8s-cis --report all --format json +trivy k8s --compliance=k8s-cis-1.23 --report all --format json ``` diff --git a/docs/ecosystem/prod.md b/docs/ecosystem/prod.md index 8f9b8fb71be0..4d037ebdc78d 100644 --- a/docs/ecosystem/prod.md +++ b/docs/ecosystem/prod.md @@ -29,3 +29,11 @@ You can use Kyverno to ensure and enforce that deployed workloads' images are sc Trivy is integrated into Zora as a vulnerability scanner plugin. 👉 Get it at: + +## Helmper (Community) + +[Helmper](https://christoffernissen.github.io/helmper/) is a go program that reads Helm Charts from remote OCI registries and pushes the Helm Charts and the Helm Charts container images to your OCI registries with optional OS level vulnerability patching + +Trivy is integrated into Helmper as a vulnerability scanner in combination with Copacetic to fix detected vulnerabilities. + +👉 Get it at: diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md index 51ea9e207214..61f6e42871bb 100644 --- a/docs/getting-started/installation.md +++ b/docs/getting-started/installation.md @@ -61,7 +61,7 @@ brew install trivy Arch Linux Package Repository. ```bash -pacman -S trivy +sudo pacman -S trivy ``` References: @@ -163,17 +163,17 @@ The plugin used by both tools is developped [here](https://github.com/zufardhiya ### Download Binary -1. Download the file for your operating system/architecture from [GitHub Release assets](https://github.com/aquasecurity/trivy/releases/tag/{{ git.tag }}) (`curl -LO https://url.to/trivy.tar.gz`). +1. Download the file for your operating system/architecture from [GitHub Release assets](https://github.com/aquasecurity/trivy/releases/tag/{{ git.tag }}). 2. Unpack the downloaded archive (`tar -xzf ./trivy.tar.gz`). -3. Put the binary somewhere in your `$PATH` (e.g `mv ./trivy /usr/local/bin/`). -4. Make sure the binary has execution bit turned on (`chmod +x ./trivy`). +3. Make sure the binary has execution bit turned on (`chmod +x ./trivy`). +4. Put the binary somewhere in your `$PATH` (e.g `sudo mv ./trivy /usr/local/bin/`). ### Install Script The process above can be automated by the following script: ```bash -curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin {{ git.tag }} +curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sudo sh -s -- -b /usr/local/bin {{ git.tag }} ``` ### Install from source diff --git a/docs/tutorials/integrations/gitlab-ci.md b/docs/tutorials/integrations/gitlab-ci.md index dbfe46d1ca4d..8b4e8c34e7bb 100644 --- a/docs/tutorials/integrations/gitlab-ci.md +++ b/docs/tutorials/integrations/gitlab-ci.md @@ -85,8 +85,6 @@ container_scanning: FULL_IMAGE_NAME: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG script: - trivy --version - # cache cleanup is needed when scanning images with the same tags, it does not remove the database - - time trivy image --clear-cache # update vulnerabilities db - time trivy image --download-db-only # Builds report and puts it in the default workdir $CI_PROJECT_DIR, so `artifacts:` can take it from there diff --git a/go.mod b/go.mod index 97bc846086a5..e1607b6b8b39 100644 --- a/go.mod +++ b/go.mod @@ -2,7 +2,7 @@ module github.com/aquasecurity/trivy go 1.22.0 -toolchain go1.22.2 +toolchain go1.22.4 require ( github.com/Azure/azure-sdk-for-go v68.0.0+incompatible @@ -22,23 +22,20 @@ require ( github.com/aquasecurity/go-npm-version v0.0.0-20201110091526-0b796d180798 github.com/aquasecurity/go-pep440-version v0.0.0-20210121094942-22b2f8951d46 github.com/aquasecurity/go-version v0.0.0-20240603093900-cf8a8d29271d - github.com/aquasecurity/loading v0.0.5 github.com/aquasecurity/table v1.8.0 github.com/aquasecurity/testdocker v0.0.0-20240613070307-2c3868d658ac github.com/aquasecurity/tml v0.6.1 - github.com/aquasecurity/trivy-aws v0.9.1-0.20240607040622-8a7f09cd891f - github.com/aquasecurity/trivy-checks v0.11.0 + github.com/aquasecurity/trivy-checks v0.13.0 github.com/aquasecurity/trivy-db v0.0.0-20231005141211-4fc651f7ac8d github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48 - github.com/aquasecurity/trivy-kubernetes v0.6.7-0.20240516051533-4c5a4aad13b7 + github.com/aquasecurity/trivy-kubernetes v0.6.7-0.20240627095026-cf9d48837f6d github.com/aws/aws-sdk-go-v2 v1.27.2 github.com/aws/aws-sdk-go-v2/config v1.27.18 github.com/aws/aws-sdk-go-v2/credentials v1.17.18 - github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.24 github.com/aws/aws-sdk-go-v2/service/ec2 v1.163.1 github.com/aws/aws-sdk-go-v2/service/ecr v1.28.5 github.com/aws/aws-sdk-go-v2/service/s3 v1.55.1 - github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 + github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 // indirect github.com/aws/smithy-go v1.20.2 github.com/bitnami/go-version v0.0.0-20231130084017-bb00604d650c github.com/bmatcuk/doublestar/v4 v4.6.1 @@ -54,11 +51,11 @@ require ( github.com/go-openapi/strfmt v0.23.0 github.com/go-redis/redis/v8 v8.11.5 github.com/golang-jwt/jwt/v5 v5.2.1 - github.com/google/go-containerregistry v0.19.1 + github.com/google/go-containerregistry v0.19.2 github.com/google/licenseclassifier/v2 v2.0.0 github.com/google/uuid v1.6.0 github.com/google/wire v0.6.0 - github.com/hashicorp/go-getter v1.7.4 + github.com/hashicorp/go-getter v1.7.5 github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-retryablehttp v0.7.7 github.com/hashicorp/go-uuid v1.0.3 @@ -119,7 +116,7 @@ require ( github.com/zclconf/go-cty-yaml v1.0.3 go.etcd.io/bbolt v1.3.10 golang.org/x/crypto v0.24.0 - golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa + golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa // indirect golang.org/x/mod v0.17.0 golang.org/x/net v0.26.0 golang.org/x/sync v0.7.0 @@ -129,7 +126,7 @@ require ( google.golang.org/protobuf v1.34.1 gopkg.in/yaml.v3 v3.0.1 helm.sh/helm/v3 v3.15.1 - k8s.io/api v0.30.1 + k8s.io/api v0.30.2 k8s.io/utils v0.0.0-20231127182322-b307cd553661 modernc.org/sqlite v1.30.0 sigs.k8s.io/yaml v1.4.0 @@ -170,52 +167,16 @@ require ( github.com/antchfx/xpath v1.3.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect - github.com/aws/aws-sdk-go v1.53.0 // indirect - github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 // indirect + github.com/aws/aws-sdk-go v1.54.6 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 // indirect github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 // indirect github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect - github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.9 // indirect - github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.26.7 // indirect - github.com/aws/aws-sdk-go-v2/service/apigateway v1.21.6 // indirect - github.com/aws/aws-sdk-go-v2/service/apigatewayv2 v1.18.6 // indirect - github.com/aws/aws-sdk-go-v2/service/athena v1.37.3 // indirect - github.com/aws/aws-sdk-go-v2/service/cloudfront v1.36.4 // indirect - github.com/aws/aws-sdk-go-v2/service/cloudtrail v1.35.6 // indirect - github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.32.2 // indirect - github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.30.1 // indirect - github.com/aws/aws-sdk-go-v2/service/codebuild v1.26.5 // indirect - github.com/aws/aws-sdk-go-v2/service/docdb v1.34.4 // indirect - github.com/aws/aws-sdk-go-v2/service/dynamodb v1.26.8 // indirect github.com/aws/aws-sdk-go-v2/service/ebs v1.21.7 // indirect - github.com/aws/aws-sdk-go-v2/service/ecs v1.35.6 // indirect - github.com/aws/aws-sdk-go-v2/service/efs v1.28.1 // indirect - github.com/aws/aws-sdk-go-v2/service/eks v1.41.0 // indirect - github.com/aws/aws-sdk-go-v2/service/elasticache v1.34.6 // indirect - github.com/aws/aws-sdk-go-v2/service/elasticloadbalancingv2 v1.26.6 // indirect - github.com/aws/aws-sdk-go-v2/service/elasticsearchservice v1.25.0 // indirect - github.com/aws/aws-sdk-go-v2/service/emr v1.36.0 // indirect - github.com/aws/aws-sdk-go-v2/service/iam v1.28.7 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.11 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.9 // indirect - github.com/aws/aws-sdk-go-v2/service/kafka v1.28.5 // indirect - github.com/aws/aws-sdk-go-v2/service/kinesis v1.24.6 // indirect - github.com/aws/aws-sdk-go-v2/service/kms v1.32.1 // indirect - github.com/aws/aws-sdk-go-v2/service/lambda v1.49.6 // indirect - github.com/aws/aws-sdk-go-v2/service/mq v1.20.6 // indirect - github.com/aws/aws-sdk-go-v2/service/neptune v1.28.1 // indirect - github.com/aws/aws-sdk-go-v2/service/rds v1.66.1 // indirect - github.com/aws/aws-sdk-go-v2/service/redshift v1.39.7 // indirect - github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.26.0 // indirect - github.com/aws/aws-sdk-go-v2/service/sns v1.26.6 // indirect - github.com/aws/aws-sdk-go-v2/service/sqs v1.29.6 // indirect github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 // indirect - github.com/aws/aws-sdk-go-v2/service/workspaces v1.38.1 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect github.com/briandowns/spinner v1.23.0 // indirect @@ -244,6 +205,7 @@ require ( github.com/docker/go-metrics v0.0.1 // indirect github.com/docker/go-units v0.5.0 // indirect github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7 // indirect + github.com/dsnet/compress v0.0.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/emicklei/go-restful/v3 v3.11.0 // indirect github.com/emirpasic/gods v1.18.1 // indirect @@ -287,6 +249,7 @@ require ( github.com/gorilla/websocket v1.5.0 // indirect github.com/gosuri/uitable v0.0.4 // indirect github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-safetemp v1.0.0 // indirect @@ -383,10 +346,10 @@ require ( go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 // indirect go.opentelemetry.io/otel v1.27.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.27.0 // indirect go.opentelemetry.io/otel/metric v1.27.0 // indirect go.opentelemetry.io/otel/sdk v1.27.0 // indirect go.opentelemetry.io/otel/trace v1.27.0 // indirect - go.opentelemetry.io/proto/otlp v1.2.0 // indirect go.starlark.net v0.0.0-20230525235612-a134d8f9ddca // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect @@ -405,14 +368,14 @@ require ( gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect k8s.io/apiextensions-apiserver v0.30.0 // indirect - k8s.io/apimachinery v0.30.1 // indirect + k8s.io/apimachinery v0.30.2 // indirect k8s.io/apiserver v0.30.0 // indirect - k8s.io/cli-runtime v0.30.0 // indirect - k8s.io/client-go v0.30.0 // indirect - k8s.io/component-base v0.30.0 // indirect + k8s.io/cli-runtime v0.30.2 // indirect + k8s.io/client-go v0.30.2 // indirect + k8s.io/component-base v0.30.1 // indirect k8s.io/klog/v2 v2.120.1 // indirect k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340 // indirect - k8s.io/kubectl v0.30.0 // indirect + k8s.io/kubectl v0.30.1 // indirect modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 // indirect modernc.org/libc v1.50.9 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/go.sum b/go.sum index d47b409f6090..d5318916faeb 100644 --- a/go.sum +++ b/go.sum @@ -755,8 +755,6 @@ github.com/aquasecurity/bolt-fixtures v0.0.0-20200903104109-d34e7f983986 h1:2a30 github.com/aquasecurity/bolt-fixtures v0.0.0-20200903104109-d34e7f983986/go.mod h1:NT+jyeCzXk6vXR5MTkdn4z64TgGfE5HMLC8qfj5unl8= github.com/aquasecurity/go-gem-version v0.0.0-20201115065557-8eed6fe000ce h1:QgBRgJvtEOBtUXilDb1MLi1p1MWoyFDXAu5DEUl5nwM= github.com/aquasecurity/go-gem-version v0.0.0-20201115065557-8eed6fe000ce/go.mod h1:HXgVzOPvXhVGLJs4ZKO817idqr/xhwsTcj17CLYY74s= -github.com/aquasecurity/go-mock-aws v0.0.0-20240523055201-a4152219967f h1:NRq3oUfkheKgoYPjNUApUtClKaBRcc6KzdcBHqZPrAM= -github.com/aquasecurity/go-mock-aws v0.0.0-20240523055201-a4152219967f/go.mod h1:95xczqqItx1yPSrYG2SQM2gi2lqoYG9i3pLsYKSTpgI= github.com/aquasecurity/go-npm-version v0.0.0-20201110091526-0b796d180798 h1:eveqE9ivrt30CJ7dOajOfBavhZ4zPqHcZe/4tKp0alc= github.com/aquasecurity/go-npm-version v0.0.0-20201110091526-0b796d180798/go.mod h1:hxbJZtKlO4P8sZ9nztizR6XLoE33O+BkPmuYQ4ACyz0= github.com/aquasecurity/go-pep440-version v0.0.0-20210121094942-22b2f8951d46 h1:vmXNl+HDfqqXgr0uY1UgK1GAhps8nbAAtqHNBcgyf+4= @@ -765,24 +763,20 @@ github.com/aquasecurity/go-version v0.0.0-20201107203531-5e48ac5d022a/go.mod h1: github.com/aquasecurity/go-version v0.0.0-20210121072130-637058cfe492/go.mod h1:9Beu8XsUNNfzml7WBf3QmyPToP1wm1Gj/Vc5UJKqTzU= github.com/aquasecurity/go-version v0.0.0-20240603093900-cf8a8d29271d h1:4zour5Sh9chOg+IqIinIcJ3qtr3cIf8FdFY6aArlXBw= github.com/aquasecurity/go-version v0.0.0-20240603093900-cf8a8d29271d/go.mod h1:1cPOp4BaQZ1G2F5fnw4dFz6pkOyXJI9KTuak8ghIl3U= -github.com/aquasecurity/loading v0.0.5 h1:2iq02sPSSMU+ULFPmk0v0lXnK/eZ2e0dRAj/Dl5TvuM= -github.com/aquasecurity/loading v0.0.5/go.mod h1:NSHeeq1JTDTFuXAe87q4yQ2DX57pXiaQMqq8Zm9HCJA= github.com/aquasecurity/table v1.8.0 h1:9ntpSwrUfjrM6/YviArlx/ZBGd6ix8W+MtojQcM7tv0= github.com/aquasecurity/table v1.8.0/go.mod h1:eqOmvjjB7AhXFgFqpJUEE/ietg7RrMSJZXyTN8E/wZw= github.com/aquasecurity/testdocker v0.0.0-20240613070307-2c3868d658ac h1:dy7xjLOAAeCNycqJ3kws4vDFGm8WdeCovkHXf2um5uA= github.com/aquasecurity/testdocker v0.0.0-20240613070307-2c3868d658ac/go.mod h1:nyavBQqxtIkQh99lQE1ssup3i2uIq1+giL7tOSHapYk= github.com/aquasecurity/tml v0.6.1 h1:y2ZlGSfrhnn7t4ZJ/0rotuH+v5Jgv6BDDO5jB6A9gwo= github.com/aquasecurity/tml v0.6.1/go.mod h1:OnYMWY5lvI9ejU7yH9LCberWaaTBW7hBFsITiIMY2yY= -github.com/aquasecurity/trivy-aws v0.9.1-0.20240607040622-8a7f09cd891f h1:LS8Xb8Lb0mosGay+hk7hkt8jVc+L8msTdjJCU+ICcS8= -github.com/aquasecurity/trivy-aws v0.9.1-0.20240607040622-8a7f09cd891f/go.mod h1:pfwElhU8kilUmgib1xBw91ZBPJya6EZ1unwvqC0ijh4= -github.com/aquasecurity/trivy-checks v0.11.0 h1:hS5gSQyuyIITrY/kCY2AWQMUSwXLpdtbHDPaCs6eSaI= -github.com/aquasecurity/trivy-checks v0.11.0/go.mod h1:IAK3eHcKNxIHo/ckxKoHsXmEpUG45/38grW5bBjL9lw= +github.com/aquasecurity/trivy-checks v0.13.0 h1:na6PTdY4U0uK/fjz3HNRYBxvYSJ8vgTb57a5T8Y5t9w= +github.com/aquasecurity/trivy-checks v0.13.0/go.mod h1:Xec/SMVGV66I7RgUqOX9MEr+YxBqHXDVLTYmpspPi3E= github.com/aquasecurity/trivy-db v0.0.0-20231005141211-4fc651f7ac8d h1:fjI9mkoTUAkbGqpzt9nJsO24RAdfG+ZSiLFj0G2jO8c= github.com/aquasecurity/trivy-db v0.0.0-20231005141211-4fc651f7ac8d/go.mod h1:cj9/QmD9N3OZnKQMp+/DvdV+ym3HyIkd4e+F0ZM3ZGs= github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48 h1:JVgBIuIYbwG+ekC5lUHUpGJboPYiCcxiz06RCtz8neI= github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48/go.mod h1:Ldya37FLi0e/5Cjq2T5Bty7cFkzUDwTcPeQua+2M8i8= -github.com/aquasecurity/trivy-kubernetes v0.6.7-0.20240516051533-4c5a4aad13b7 h1:bLmh/xuC/7abvt9S/xnODTQRu8fW6BhFHS6Cmbn0RNU= -github.com/aquasecurity/trivy-kubernetes v0.6.7-0.20240516051533-4c5a4aad13b7/go.mod h1:HSpAJE8Y5Cjjg0Aw/0lqd3vMihN/FxBEj/f/7yDi/Uc= +github.com/aquasecurity/trivy-kubernetes v0.6.7-0.20240627095026-cf9d48837f6d h1:z5Ug+gqNjgHzCo7rmv6wKTmyJ8E3bAVEU2AASo3740s= +github.com/aquasecurity/trivy-kubernetes v0.6.7-0.20240627095026-cf9d48837f6d/go.mod h1:HOhrqoyIeTxpwnKr1EyWtQ+rt2XahV8b0UDBrRpSfEQ= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= @@ -793,114 +787,40 @@ github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3d github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= github.com/aws/aws-sdk-go v1.44.122/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= -github.com/aws/aws-sdk-go v1.53.0 h1:MMo1x1ggPPxDfHMXJnQudTbGXYlD4UigUAud1DJxPVo= -github.com/aws/aws-sdk-go v1.53.0/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= +github.com/aws/aws-sdk-go v1.54.6 h1:HEYUib3yTt8E6vxjMWM3yAq5b+qjj/6aKA62mkgux9g= +github.com/aws/aws-sdk-go v1.54.6/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= github.com/aws/aws-sdk-go-v2 v1.27.2 h1:pLsTXqX93rimAOZG2FIYraDQstZaaGVVN4tNw65v0h8= github.com/aws/aws-sdk-go-v2 v1.27.2/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 h1:x6xsQXGSmW6frevwDA+vi/wqhp1ct18mVXYN08/93to= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2/go.mod h1:lPprDr1e6cJdyYeGXnRaJoP4Md+cDBvi2eOj00BlGmg= github.com/aws/aws-sdk-go-v2/config v1.27.18 h1:wFvAnwOKKe7QAyIxziwSKjmer9JBMH1vzIL6W+fYuKk= github.com/aws/aws-sdk-go-v2/config v1.27.18/go.mod h1:0xz6cgdX55+kmppvPm2IaKzIXOheGJhAufacPJaXZ7c= github.com/aws/aws-sdk-go-v2/credentials v1.17.18 h1:D/ALDWqK4JdY3OFgA2thcPO1c9aYTT5STS/CvnkqY1c= github.com/aws/aws-sdk-go-v2/credentials v1.17.18/go.mod h1:JuitCWq+F5QGUrmMPsk945rop6bB57jdscu+Glozdnc= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 h1:dDgptDO9dxeFkXy+tEgVkzSClHZje/6JkPW5aZyEvrQ= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5/go.mod h1:gjvE2KBUgUQhcv89jqxrIxH9GaKs1JbZzWejj/DaHGA= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.24 h1:FzNwpVTZDCvm597Ty6mGYvxTolyC1oup0waaKntZI4E= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.24/go.mod h1:wM9NElT/Wn6n3CT1eyVcXtfCy8lSVjjQXfdawQbSShc= github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 h1:cy8ahBJuhtM8GTTSyOkfy6WVPV1IE+SS5/wfXUYuulw= github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9/go.mod h1:CZBXGLaJnEZI6EVNcPd7a6B5IC5cA/GkRWtu9fp3S6Y= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 h1:A4SYk07ef04+vxZToz9LWvAXl9LW0NClpPpMsi31cz0= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9/go.mod h1:5jJcHuwDagxN+ErjQ3PU3ocf6Ylc/p9x+BLO/+X4iXw= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.9 h1:vHyZxoLVOgrI8GqX7OMHLXp4YYoxeEsrjweXKpye+ds= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.9/go.mod h1:z9VXZsWA2BvZNH1dT0ToUYwMu/CR9Skkj/TBX+mceZw= -github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.26.7 h1:rLdKcienXrk+JFX1+DZg160ebG8lIF2nFvnEZL7dnII= -github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.26.7/go.mod h1:cwqaWBOZXu8pqEE1ZC4Sw2ycZLjwKrRP5tOAJFgCbYc= -github.com/aws/aws-sdk-go-v2/service/apigateway v1.21.6 h1:ePPaOVn92r5n8Neecdpy93hDmR0PBH6H6b7VQCE5vKE= -github.com/aws/aws-sdk-go-v2/service/apigateway v1.21.6/go.mod h1:P/zwE9uiC6eK/kL3CS60lxTTVC2zAvaS4iW31io41V4= -github.com/aws/aws-sdk-go-v2/service/apigatewayv2 v1.18.6 h1:bCdxKjM8DpkNJXnOLVx+Hnav0eM4yJK8kof56VvIjMc= -github.com/aws/aws-sdk-go-v2/service/apigatewayv2 v1.18.6/go.mod h1:zQ6tOYz7oGI7MbLRDBXfo63puDoTroVcVNXWfmRDA1E= -github.com/aws/aws-sdk-go-v2/service/athena v1.37.3 h1:qNLkDi/rOaauOuh33a4MNZjyfxvwIgC5qsDiHPvjDk0= -github.com/aws/aws-sdk-go-v2/service/athena v1.37.3/go.mod h1:MlpC6swcjh1Il80u6XoeY2BTHIZRZWvoXOfaq3rfh8I= -github.com/aws/aws-sdk-go-v2/service/cloudfront v1.36.4 h1:8qjQzwztUVdFJi/wrhPXxRgSbyAKDsnJuduHaw+yP30= -github.com/aws/aws-sdk-go-v2/service/cloudfront v1.36.4/go.mod h1:lHdM6itntBCcjvqxEHDoHkXRicwgY9aoPRptXuMdbgk= -github.com/aws/aws-sdk-go-v2/service/cloudtrail v1.35.6 h1:Yc+avPLGARzp4A9Oi9VRxvlcGqI+0MYIg4tPSupKv2U= -github.com/aws/aws-sdk-go-v2/service/cloudtrail v1.35.6/go.mod h1:zrqdG1b+4AGoTwTMVFzvzY7ARB3GPo4gKRuK8WPEo8w= -github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.32.2 h1:vQfCIHSDouEvbE4EuDrlCGKcrtABEqF3cMt61nGEV4g= -github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.32.2/go.mod h1:3ToKMEhVj+Q+HzZ8Hqin6LdAKtsi3zVXVNUPpQMd+Xk= -github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.30.1 h1:ZMgx58Tqyr8kTSR9zLzX+W933ujDYleOtFedvn0xHg8= -github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.30.1/go.mod h1:4Oeb7n2r/ApBIHphQkprve380p/RpPWBotumd44EDGg= -github.com/aws/aws-sdk-go-v2/service/codebuild v1.26.5 h1:EPnlDd4V2EXywlOPAw/pMUW4PHUgSulKm4zXFU6bixE= -github.com/aws/aws-sdk-go-v2/service/codebuild v1.26.5/go.mod h1:G2JUWf01sbb5/A8qGcM4dqy4nbl4y4IGWmaCDWAvA2Y= -github.com/aws/aws-sdk-go-v2/service/docdb v1.34.4 h1:0hvzmeEwiNthBmi2mpTnZgqFCKUxKoLWaQYzulEnqk4= -github.com/aws/aws-sdk-go-v2/service/docdb v1.34.4/go.mod h1:KSNSbXXGchzkLYCDwq9H9ZfPs2zn0SIVgs7LXsfPlRQ= -github.com/aws/aws-sdk-go-v2/service/dynamodb v1.26.8 h1:XKO0BswTDeZMLDBd/b5pCEZGttNXrzRUVtFvp2Ak/Vo= -github.com/aws/aws-sdk-go-v2/service/dynamodb v1.26.8/go.mod h1:N5tqZcYMM0N1PN7UQYJNWuGyO886OfnMhf/3MAbqMcI= github.com/aws/aws-sdk-go-v2/service/ebs v1.21.7 h1:CRzzXjmgx9p362yO39D6hbZULdMI23gaKqSxijJCXHM= github.com/aws/aws-sdk-go-v2/service/ebs v1.21.7/go.mod h1:wnsHqpi3RgDwklS5SPHUgjcUUpontGPKJ+GJYOdV7pY= github.com/aws/aws-sdk-go-v2/service/ec2 v1.163.1 h1:0RiDkJO1veM6/FQ+GJcGiIhZgPwXlscX29B0zFE4Ulo= github.com/aws/aws-sdk-go-v2/service/ec2 v1.163.1/go.mod h1:gYk1NtyvkH1SxPcndDtfro3lwbiE5t0tW4eRki5YnOQ= github.com/aws/aws-sdk-go-v2/service/ecr v1.28.5 h1:dvvTFXpWSv9+8lTNPl1EPNZL6BCUV6MgVckEMvXaOgk= github.com/aws/aws-sdk-go-v2/service/ecr v1.28.5/go.mod h1:Ogt6AOZ/sPBlJZpVFJgOK+jGGREuo8DMjNg+O/7gpjI= -github.com/aws/aws-sdk-go-v2/service/ecs v1.35.6 h1:Sc2mLjyA1R8z2l705AN7Wr7QOlnUxVnGPJeDIVyUSrs= -github.com/aws/aws-sdk-go-v2/service/ecs v1.35.6/go.mod h1:LzHcyOEvaLjbc5e+fP/KmPWBr+h/Ef+EHvnf1Pzo368= -github.com/aws/aws-sdk-go-v2/service/efs v1.28.1 h1:dKtJBzCIew4/VDsYgrx6v140cIpQVoe93kCNniYATtE= -github.com/aws/aws-sdk-go-v2/service/efs v1.28.1/go.mod h1:ha+/WvylFi6dkfF2xfPekJWCNLGuD5PWIFrRRMz3psc= -github.com/aws/aws-sdk-go-v2/service/eks v1.41.0 h1:/bitqsA6wgIS2vgjtHJi1JG3SOTbobs1mCdeJBLOacY= -github.com/aws/aws-sdk-go-v2/service/eks v1.41.0/go.mod h1:GFqWNwDLyuSevADun69Dg5aurANpv8KNrz2vxYPEqmw= -github.com/aws/aws-sdk-go-v2/service/elasticache v1.34.6 h1:Y/5eE9Sc+OBID9pZ4EVFzyQviv1d1RbqB17HRur9ySg= -github.com/aws/aws-sdk-go-v2/service/elasticache v1.34.6/go.mod h1:iPx2i26hgUULkNh1Jk4QzYzzQKd2nXl/rD9Fm5hQ2uk= -github.com/aws/aws-sdk-go-v2/service/elasticloadbalancingv2 v1.26.6 h1:twI2uRmpbm0KBog3Ay61IqOtNp6+QxKfSA78zftME/o= -github.com/aws/aws-sdk-go-v2/service/elasticloadbalancingv2 v1.26.6/go.mod h1:Tpt4kC8x1HfYuh2rG/6yXZrxjABETERrUl9IdA/IS98= -github.com/aws/aws-sdk-go-v2/service/elasticsearchservice v1.25.0 h1:LPEsYRsC6r3edPHO8KlZJNW0xxyfLHMXJ466MdHuBbQ= -github.com/aws/aws-sdk-go-v2/service/elasticsearchservice v1.25.0/go.mod h1:CAXUsQvYQVzsXO36npqK3aUlxx2xMSM1Dun3O9jnaEE= -github.com/aws/aws-sdk-go-v2/service/emr v1.36.0 h1:FdeZ7AYOvyL09KH250Ncz4LF4SB1Vo9l7KZzn/LIrgQ= -github.com/aws/aws-sdk-go-v2/service/emr v1.36.0/go.mod h1:Drh6y2qLaw/wnDKTIcdqM2m358MIRXsZ2Bj2tjhVLq0= -github.com/aws/aws-sdk-go-v2/service/iam v1.28.7 h1:FKPRDYZOO0Eur19vWUL1B40Op0j89KQj3kARjrszMK8= -github.com/aws/aws-sdk-go-v2/service/iam v1.28.7/go.mod h1:YzMYyQ7S4twfYzLjwP24G1RAxypozVZeNaG1r2jxRms= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.11 h1:4vt9Sspk59EZyHCAEMaktHKiq0C09noRTQorXD/qV+s= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.11/go.mod h1:5jHR79Tv+Ccq6rwYh+W7Nptmw++WiFafMfR42XhwNl8= -github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11 h1:e9AVb17H4x5FTE5KWIP5M1Du+9M86pS+Hw0lBUdN8EY= -github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11/go.mod h1:B90ZQJa36xo0ph9HsoteI1+r8owgQH/U1QNfqZQkj1Q= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 h1:o4T+fKxA3gTMcluBNZZXE9DNaMkJuUL1O3mffCUjoJo= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11/go.mod h1:84oZdJ+VjuJKs9v1UTC9NaodRZRseOXCTgku+vQJWR8= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.9 h1:TE2i0A9ErH1YfRSvXfCr2SQwfnqsoJT9nPQ9kj0lkxM= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.9/go.mod h1:9TzXX3MehQNGPwCZ3ka4CpwQsoAMWSF48/b+De9rfVM= -github.com/aws/aws-sdk-go-v2/service/kafka v1.28.5 h1:yCkyZDGahaCaAkdpVx8Te05t6eW2FarBLunVC8S23nU= -github.com/aws/aws-sdk-go-v2/service/kafka v1.28.5/go.mod h1:/KmX+vXMPJGAB56reo95tnsXa6QPNx6qli4L1AmYb7E= -github.com/aws/aws-sdk-go-v2/service/kinesis v1.24.6 h1:FO/aIHk86VePDUh/3Q/A5pnvu45miO1GZB8rIq2BUlA= -github.com/aws/aws-sdk-go-v2/service/kinesis v1.24.6/go.mod h1:Sj7qc+P/GOGOPMDn8+B7Cs+WPq1Gk+R6CXRXVhZtWcA= -github.com/aws/aws-sdk-go-v2/service/kms v1.32.1 h1:FARrQLRQXpCFYylIUVF1dRij6YbPCmtwudq9NBk4kFc= -github.com/aws/aws-sdk-go-v2/service/kms v1.32.1/go.mod h1:8lETO9lelSG2B6KMXFh2OwPPqGV6WQM3RqLAEjP1xaU= -github.com/aws/aws-sdk-go-v2/service/lambda v1.49.6 h1:w8lI9zlVwRTL9f4KB9fRThddhRivv+EQQzv2nU8JDQo= -github.com/aws/aws-sdk-go-v2/service/lambda v1.49.6/go.mod h1:0V5z1X/8NA9eQ5cZSz5ZaHU8xA/hId2ZAlsHeO7Jrdk= -github.com/aws/aws-sdk-go-v2/service/mq v1.20.6 h1:n86T5yw0kS6a5nbpkEpDzLPCBXXb35lx3iDkmQWlizA= -github.com/aws/aws-sdk-go-v2/service/mq v1.20.6/go.mod h1:phfKOOpMQhlBv2KE8gF17P82zLcSedA9b7fMSGTLBdQ= -github.com/aws/aws-sdk-go-v2/service/neptune v1.28.1 h1:e+DGEARs5GfHuzDwztENiomdLa0sjs55ub27juoFdt0= -github.com/aws/aws-sdk-go-v2/service/neptune v1.28.1/go.mod h1:jHUFaho5cVpplTDO6bctuLbvnm8F+Xd27RGIJvVTlYI= -github.com/aws/aws-sdk-go-v2/service/rds v1.66.1 h1:TafjIpDW/+l7s+f3EIONaFsNvNfwVH21NkWYrE0hbEE= -github.com/aws/aws-sdk-go-v2/service/rds v1.66.1/go.mod h1:MYzRMSdY70kcS8AFg0aHmk/xj6VAe0UfaCCoLrBWPow= -github.com/aws/aws-sdk-go-v2/service/redshift v1.39.7 h1:k4WaqQ7LHSGrSftCRXTRLv7WaozXu+fZ1jdisQSR2eU= -github.com/aws/aws-sdk-go-v2/service/redshift v1.39.7/go.mod h1:8hU0Ax6q6QA+jrMcWTE0A4YH594MQoWP3EzGO3GH5Dw= github.com/aws/aws-sdk-go-v2/service/s3 v1.55.1 h1:UAxBuh0/8sFJk1qOkvOKewP5sWeWaTPDknbQz0ZkDm0= github.com/aws/aws-sdk-go-v2/service/s3 v1.55.1/go.mod h1:hWjsYGjVuqCgfoveVcVFPXIWgz0aByzwaxKlN1StKcM= -github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.26.0 h1:dPCRgAL4WD9tSMaDglRNGOiAtSTjkwNiUW5GDpWFfHA= -github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.26.0/go.mod h1:4Ae1NCLK6ghmjzd45Tc33GgCKhUWD2ORAlULtMO1Cbs= -github.com/aws/aws-sdk-go-v2/service/sns v1.26.6 h1:w2YwF8889ardGU3Y0qZbJ4Zzh+Q/QqKZ4kwkK7JFvnI= -github.com/aws/aws-sdk-go-v2/service/sns v1.26.6/go.mod h1:IrcbquqMupzndZ20BXxDxjM7XenTRhbwBOetk4+Z5oc= -github.com/aws/aws-sdk-go-v2/service/sqs v1.29.6 h1:UdbDTllc7cmusTTMy1dcTrYKRl4utDEsmKh9ZjvhJCc= -github.com/aws/aws-sdk-go-v2/service/sqs v1.29.6/go.mod h1:mCUv04gd/7g+/HNzDB4X6dzJuygji0ckvB3Lg/TdG5Y= github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 h1:gEYM2GSpr4YNWc6hCd5nod4+d4kd9vWIAWrmGuLdlMw= github.com/aws/aws-sdk-go-v2/service/sso v1.20.11/go.mod h1:gVvwPdPNYehHSP9Rs7q27U1EU+3Or2ZpXvzAYJNh63w= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 h1:iXjh3uaH3vsVcnyZX7MqCoCfcyxIrVE9iOQruRaWPrQ= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5/go.mod h1:5ZXesEuy/QcO0WUnt+4sDkxhdXRHTu2yG0uCSH8B6os= github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 h1:M/1u4HBpwLuMtjlxuI2y6HoVLzF5e2mfxHCg7ZVMYmk= github.com/aws/aws-sdk-go-v2/service/sts v1.28.12/go.mod h1:kcfd+eTdEi/40FIbLq4Hif3XMXnl5b/+t/KTfLt9xIk= -github.com/aws/aws-sdk-go-v2/service/workspaces v1.38.1 h1:pqxn3fcZDgWmo8GMUjlxVBdakcGo0AeUb7mjX33pJIQ= -github.com/aws/aws-sdk-go-v2/service/workspaces v1.38.1/go.mod h1:kP5rUlnqfno/obflnKX4KMBWkoVHLDI8oCka9U0opRo= github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -1171,6 +1091,9 @@ github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7 h1:UhxFibDNY/bfvqU github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE= github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= +github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q= +github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo= +github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= @@ -1426,8 +1349,9 @@ github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-containerregistry v0.19.1 h1:yMQ62Al6/V0Z7CqIrrS1iYoA5/oQCm88DeNujc7C1KY= github.com/google/go-containerregistry v0.19.1/go.mod h1:YCMFNQeeXeLF+dnhhWkqDItx/JSkH01j1Kis4PsjzFI= +github.com/google/go-containerregistry v0.19.2 h1:TannFKE1QSajsP6hPWb5oJNgKe1IKjHukIKDUmvsV6w= +github.com/google/go-containerregistry v0.19.2/go.mod h1:YCMFNQeeXeLF+dnhhWkqDItx/JSkH01j1Kis4PsjzFI= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= @@ -1521,7 +1445,6 @@ github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks= github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w= @@ -1533,8 +1456,8 @@ github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-getter v1.7.4 h1:3yQjWuxICvSpYwqSayAdKRFcvBl1y/vogCxczWSmix0= -github.com/hashicorp/go-getter v1.7.4/go.mod h1:W7TalhMmbPmsSMdNjD0ZskARur/9GJ17cfHTRtXV744= +github.com/hashicorp/go-getter v1.7.5 h1:dT58k9hQ/vbxNMwoI5+xFYAJuv6152UNvdHokfI5wE4= +github.com/hashicorp/go-getter v1.7.5/go.mod h1:W7TalhMmbPmsSMdNjD0ZskARur/9GJ17cfHTRtXV744= github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I= @@ -1619,6 +1542,7 @@ github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQL github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= +github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= @@ -1627,6 +1551,7 @@ github.com/klauspost/compress v1.16.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQs github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f h1:GvCU5GXhHq+7LeOzx/haG7HSIZokl3/0GkoUFzsRJjg= github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f/go.mod h1:q59u9px8b7UTj0nIjEjvmTWekazka6xIt6Uogz5Dm+8= @@ -2102,6 +2027,7 @@ github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1 github.com/twitchtv/twirp v8.1.3+incompatible h1:+F4TdErPgSUbMZMwp13Q/KgDVuI7HJXP61mNV3/7iuU= github.com/twitchtv/twirp v8.1.3+incompatible/go.mod h1:RRJoFSAmTEh2weEqWtpPE3vFK5YBhA6bqp2l1kfCC5A= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= +github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8= github.com/ulikunitz/xz v0.5.10/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ulikunitz/xz v0.5.11 h1:kpFauv27b6ynzBNT/Xy+1k+fK4WswhN/6PN5WhFAGw8= github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= @@ -3060,32 +2986,32 @@ honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= k8s.io/api v0.20.1/go.mod h1:KqwcCVogGxQY3nBlRpwt+wpAMF/KjaCc7RpywacvqUo= k8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ= k8s.io/api v0.20.6/go.mod h1:X9e8Qag6JV/bL5G6bU8sdVRltWKmdHsFUGS3eVndqE8= -k8s.io/api v0.30.1 h1:kCm/6mADMdbAxmIh0LBjS54nQBE+U4KmbCfIkF5CpJY= -k8s.io/api v0.30.1/go.mod h1:ddbN2C0+0DIiPntan/bye3SW3PdwLa11/0yqwvuRrJM= +k8s.io/api v0.30.2 h1:+ZhRj+28QT4UOH+BKznu4CBgPWgkXO7XAvMcMl0qKvI= +k8s.io/api v0.30.2/go.mod h1:ULg5g9JvOev2dG0u2hig4Z7tQ2hHIuS+m8MNZ+X6EmI= k8s.io/apiextensions-apiserver v0.30.0 h1:jcZFKMqnICJfRxTgnC4E+Hpcq8UEhT8B2lhBcQ+6uAs= k8s.io/apiextensions-apiserver v0.30.0/go.mod h1:N9ogQFGcrbWqAY9p2mUAL5mGxsLqwgtUce127VtRX5Y= k8s.io/apimachinery v0.20.1/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= k8s.io/apimachinery v0.20.4/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= k8s.io/apimachinery v0.20.6/go.mod h1:ejZXtW1Ra6V1O5H8xPBGz+T3+4gfkTCeExAHKU57MAc= -k8s.io/apimachinery v0.30.1 h1:ZQStsEfo4n65yAdlGTfP/uSHMQSoYzU/oeEbkmF7P2U= -k8s.io/apimachinery v0.30.1/go.mod h1:iexa2somDaxdnj7bha06bhb43Zpa6eWH8N8dbqVjTUc= +k8s.io/apimachinery v0.30.2 h1:fEMcnBj6qkzzPGSVsAZtQThU62SmQ4ZymlXRC5yFSCg= +k8s.io/apimachinery v0.30.2/go.mod h1:iexa2somDaxdnj7bha06bhb43Zpa6eWH8N8dbqVjTUc= k8s.io/apiserver v0.20.1/go.mod h1:ro5QHeQkgMS7ZGpvf4tSMx6bBOgPfE+f52KwvXfScaU= k8s.io/apiserver v0.20.4/go.mod h1:Mc80thBKOyy7tbvFtB4kJv1kbdD0eIH8k8vianJcbFM= k8s.io/apiserver v0.20.6/go.mod h1:QIJXNt6i6JB+0YQRNcS0hdRHJlMhflFmsBDeSgT1r8Q= k8s.io/apiserver v0.30.0 h1:QCec+U72tMQ+9tR6A0sMBB5Vh6ImCEkoKkTDRABWq6M= k8s.io/apiserver v0.30.0/go.mod h1:smOIBq8t0MbKZi7O7SyIpjPsiKJ8qa+llcFCluKyqiY= -k8s.io/cli-runtime v0.30.0 h1:0vn6/XhOvn1RJ2KJOC6IRR2CGqrpT6QQF4+8pYpWQ48= -k8s.io/cli-runtime v0.30.0/go.mod h1:vATpDMATVTMA79sZ0YUCzlMelf6rUjoBzlp+RnoM+cg= +k8s.io/cli-runtime v0.30.2 h1:ooM40eEJusbgHNEqnHziN9ZpLN5U4WcQGsdLKVxpkKE= +k8s.io/cli-runtime v0.30.2/go.mod h1:Y4g/2XezFyTATQUbvV5WaChoUGhojv/jZAtdp5Zkm0A= k8s.io/client-go v0.20.1/go.mod h1:/zcHdt1TeWSd5HoUe6elJmHSQ6uLLgp4bIJHVEuy+/Y= k8s.io/client-go v0.20.4/go.mod h1:LiMv25ND1gLUdBeYxBIwKpkSC5IsozMMmOOeSJboP+k= k8s.io/client-go v0.20.6/go.mod h1:nNQMnOvEUEsOzRRFIIkdmYOjAZrC8bgq0ExboWSU1I0= -k8s.io/client-go v0.30.0 h1:sB1AGGlhY/o7KCyCEQ0bPWzYDL0pwOZO4vAtTSh/gJQ= -k8s.io/client-go v0.30.0/go.mod h1:g7li5O5256qe6TYdAMyX/otJqMhIiGgTapdLchhmOaY= +k8s.io/client-go v0.30.2 h1:sBIVJdojUNPDU/jObC+18tXWcTJVcwyqS9diGdWHk50= +k8s.io/client-go v0.30.2/go.mod h1:JglKSWULm9xlJLx4KCkfLLQ7XwtlbflV6uFFSHTMgVs= k8s.io/component-base v0.20.1/go.mod h1:guxkoJnNoh8LNrbtiQOlyp2Y2XFCZQmrcg2n/DeYNLk= k8s.io/component-base v0.20.4/go.mod h1:t4p9EdiagbVCJKrQ1RsA5/V4rFQNDfRlevJajlGwgjI= k8s.io/component-base v0.20.6/go.mod h1:6f1MPBAeI+mvuts3sIdtpjljHWBQ2cIy38oBIWMYnrM= -k8s.io/component-base v0.30.0 h1:cj6bp38g0ainlfYtaOQuRELh5KSYjhKxM+io7AUIk4o= -k8s.io/component-base v0.30.0/go.mod h1:V9x/0ePFNaKeKYA3bOvIbrNoluTSG+fSJKjLdjOoeXQ= +k8s.io/component-base v0.30.1 h1:bvAtlPh1UrdaZL20D9+sWxsJljMi0QZ3Lmw+kmZAaxQ= +k8s.io/component-base v0.30.1/go.mod h1:e/X9kDiOebwlI41AvBHuWdqFriSRrX50CdwA9TFaHLI= k8s.io/cri-api v0.17.3/go.mod h1:X1sbHmuXhwaHs9xxYffLqJogVsnI+f6cPRcgPel7ywM= k8s.io/cri-api v0.20.1/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= k8s.io/cri-api v0.20.4/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= @@ -3098,8 +3024,8 @@ k8s.io/klog/v2 v2.120.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE= k8s.io/kube-openapi v0.0.0-20201113171705-d219536bb9fd/go.mod h1:WOJ3KddDSol4tAGcJo0Tvi+dK12EcqSLqcWsryKMpfM= k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340 h1:BZqlfIlq5YbRMFko6/PM7FjZpUb45WallggurYhKGag= k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340/go.mod h1:yD4MZYeKMBwQKVht279WycxKyM84kkAx2DPrTXaeb98= -k8s.io/kubectl v0.30.0 h1:xbPvzagbJ6RNYVMVuiHArC1grrV5vSmmIcSZuCdzRyk= -k8s.io/kubectl v0.30.0/go.mod h1:zgolRw2MQXLPwmic2l/+iHs239L49fhSeICuMhQQXTI= +k8s.io/kubectl v0.30.1 h1:sHFIRI3oP0FFZmBAVEE8ErjnTyXDPkBcvO88mH9RjuY= +k8s.io/kubectl v0.30.1/go.mod h1:7j+L0Cc38RYEcx+WH3y44jRBe1Q1jxdGPKkX0h4iDq0= k8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk= k8s.io/utils v0.0.0-20201110183641-67b214c5f920/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= k8s.io/utils v0.0.0-20231127182322-b307cd553661 h1:FepOBzJ0GXm8t0su67ln2wAZjbQ6RxQGZDnzuLcrUTI= diff --git a/integration/aws_cloud_test.go b/integration/aws_cloud_test.go deleted file mode 100644 index 481ce6ca0cf6..000000000000 --- a/integration/aws_cloud_test.go +++ /dev/null @@ -1,78 +0,0 @@ -//go:build integration - -package integration - -import ( - "context" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/aquasecurity/trivy/internal/testutil" - awscommands "github.com/aquasecurity/trivy/pkg/cloud/aws/commands" - "github.com/aquasecurity/trivy/pkg/flag" -) - -func TestAwsCommandRun(t *testing.T) { - tests := []struct { - name string - options flag.Options - envs map[string]string - wantErr string - }{ - { - name: "fail without region", - options: flag.Options{ - RegoOptions: flag.RegoOptions{SkipCheckUpdate: true}, - }, - envs: map[string]string{ - "AWS_ACCESS_KEY_ID": "test", - "AWS_SECRET_ACCESS_KEY": "test", - }, - wantErr: "aws region is required", - }, - { - name: "fail without creds", - envs: map[string]string{ - "AWS_PROFILE": "non-existent-profile", - }, - options: flag.Options{ - RegoOptions: flag.RegoOptions{SkipCheckUpdate: true}, - AWSOptions: flag.AWSOptions{ - Region: "us-east-1", - }, - }, - wantErr: "non-existent-profile", - }, - } - - ctx := context.Background() - - localstackC, addr, err := testutil.SetupLocalStack(ctx, "2.2.0") - require.NoError(t, err) - defer localstackC.Terminate(ctx) - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - - tt.options.AWSOptions.Endpoint = addr - tt.options.GlobalOptions.Timeout = time.Minute - - for k, v := range tt.envs { - t.Setenv(k, v) - } - - err := awscommands.Run(context.Background(), tt.options) - - if tt.wantErr != "" { - require.Error(t, err) - assert.Contains(t, err.Error(), tt.wantErr, tt.name) - return - } - require.NoError(t, err) - }) - } - -} diff --git a/integration/convert_test.go b/integration/convert_test.go new file mode 100644 index 000000000000..803ba538dcba --- /dev/null +++ b/integration/convert_test.go @@ -0,0 +1,69 @@ +//go:build integration + +package integration + +import ( + "path/filepath" + "testing" + + "github.com/aquasecurity/trivy/pkg/types" +) + +func TestConvert(t *testing.T) { + type args struct { + input string + format string + scanners string + } + tests := []struct { + name string + args args + golden string + override OverrideFunc + }{ + { + name: "npm", + args: args{ + input: "testdata/npm.json.golden", + format: "cyclonedx", + }, + golden: "testdata/npm-cyclonedx.json.golden", + }, + { + name: "npm without package UID", + args: args{ + input: "testdata/fixtures/convert/npm.json.golden", + format: "cyclonedx", + }, + golden: "testdata/npm-cyclonedx.json.golden", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + osArgs := []string{ + "convert", + "--cache-dir", + t.TempDir(), + "-q", + "--format", + tt.args.format, + } + + // Set up the output file + outputFile := filepath.Join(t.TempDir(), "output.json") + if *update { + outputFile = tt.golden + } + + osArgs = append(osArgs, "--output", outputFile) + osArgs = append(osArgs, tt.args.input) + + // Run "trivy convert" + runTest(t, osArgs, tt.golden, outputFile, types.Format(tt.args.format), runOptions{ + fakeUUID: "3ff14136-e09f-4df9-80ea-%012d", + }) + }) + } + +} diff --git a/integration/integration_test.go b/integration/integration_test.go index aeb91dfe783e..e9d534da3e06 100644 --- a/integration/integration_test.go +++ b/integration/integration_test.go @@ -28,9 +28,10 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" "github.com/aquasecurity/trivy-db/pkg/metadata" + + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/clock" "github.com/aquasecurity/trivy/pkg/commands" - "github.com/aquasecurity/trivy/pkg/dbtest" "github.com/aquasecurity/trivy/pkg/types" "github.com/aquasecurity/trivy/pkg/uuid" diff --git a/integration/repo_test.go b/integration/repo_test.go index e11e5a21a8b4..e07b48b950b7 100644 --- a/integration/repo_test.go +++ b/integration/repo_test.go @@ -153,6 +153,14 @@ func TestRepository(t *testing.T) { }, golden: "testdata/gradle.json.golden", }, + { + name: "sbt", + args: args{ + scanner: types.VulnerabilityScanner, + input: "testdata/fixtures/repo/sbt", + }, + golden: "testdata/sbt.json.golden", + }, { name: "conan", args: args{ @@ -242,6 +250,16 @@ func TestRepository(t *testing.T) { }, golden: "testdata/test-repo.json.golden", }, + { + name: "installed.json", + args: args{ + command: "rootfs", + scanner: types.VulnerabilityScanner, + listAllPkgs: true, + input: "testdata/fixtures/repo/composer-vendor", + }, + golden: "testdata/composer.vendor.json.golden", + }, { name: "dockerfile", args: args{ diff --git a/integration/testdata/alpine-310.sarif.golden b/integration/testdata/alpine-310.sarif.golden index 535bd2d09f71..a875ba35fecf 100644 --- a/integration/testdata/alpine-310.sarif.golden +++ b/integration/testdata/alpine-310.sarif.golden @@ -184,6 +184,7 @@ } }, "properties": { + "imageID": "sha256:961769676411f082461f9ef46626dd7a2d1e2b2a38e6a44364bcbecf51e66dd4", "imageName": "testdata/fixtures/images/alpine-310.tar.gz", "repoDigests": null, "repoTags": null diff --git a/integration/testdata/composer.vendor.json.golden b/integration/testdata/composer.vendor.json.golden new file mode 100644 index 000000000000..ebb1f65a0824 --- /dev/null +++ b/integration/testdata/composer.vendor.json.golden @@ -0,0 +1,131 @@ +{ + "SchemaVersion": 2, + "CreatedAt": "2021-08-25T12:20:30.000000005Z", + "ArtifactName": "testdata/fixtures/repo/composer-vendor", + "ArtifactType": "filesystem", + "Metadata": { + "ImageConfig": { + "architecture": "", + "created": "0001-01-01T00:00:00Z", + "os": "", + "rootfs": { + "type": "", + "diff_ids": null + }, + "config": {} + } + }, + "Results": [ + { + "Target": "installed.json", + "Class": "lang-pkgs", + "Type": "composer-vendor", + "Packages": [ + { + "ID": "guzzlehttp/psr7@1.8.3", + "Name": "guzzlehttp/psr7", + "Identifier": { + "PURL": "pkg:composer/guzzlehttp/psr7@1.8.3", + "UID": "25fca97fe23aa7b1" + }, + "Version": "1.8.3", + "Licenses": [ + "MIT" + ], + "DependsOn": [ + "psr/http-message@1.1", + "ralouphie/getallheaders@3.0.3" + ], + "Layer": {}, + "Locations": [ + { + "StartLine": 3, + "EndLine": 115 + } + ] + }, + { + "ID": "psr/http-message@1.1", + "Name": "psr/http-message", + "Identifier": { + "PURL": "pkg:composer/psr/http-message@1.1", + "UID": "299d8ff4461e894" + }, + "Version": "1.1", + "Licenses": [ + "MIT" + ], + "Layer": {}, + "Locations": [ + { + "StartLine": 116, + "EndLine": 171 + } + ] + }, + { + "ID": "ralouphie/getallheaders@3.0.3", + "Name": "ralouphie/getallheaders", + "Identifier": { + "PURL": "pkg:composer/ralouphie/getallheaders@3.0.3", + "UID": "c383e94d979a209c" + }, + "Version": "3.0.3", + "Licenses": [ + "MIT" + ], + "Layer": {}, + "Locations": [ + { + "StartLine": 172, + "EndLine": 218 + } + ] + } + ], + "Vulnerabilities": [ + { + "VulnerabilityID": "CVE-2022-24775", + "PkgID": "guzzlehttp/psr7@1.8.3", + "PkgName": "guzzlehttp/psr7", + "PkgIdentifier": { + "PURL": "pkg:composer/guzzlehttp/psr7@1.8.3", + "UID": "25fca97fe23aa7b1" + }, + "InstalledVersion": "1.8.3", + "FixedVersion": "1.8.4", + "Status": "fixed", + "Layer": {}, + "SeveritySource": "ghsa", + "PrimaryURL": "https://avd.aquasec.com/nvd/cve-2022-24775", + "DataSource": { + "ID": "ghsa", + "Name": "GitHub Security Advisory Composer", + "URL": "https://github.com/advisories?query=type%%3Areviewed+ecosystem%%3Acomposer" + }, + "Title": "Improper Input Validation in guzzlehttp/psr7", + "Description": "### Impact\nIn proper header parsing. An attacker could sneak in a new line character and pass untrusted values. \n\n### Patches\nThe issue is patched in 1.8.4 and 2.1.1.\n\n### Workarounds\nThere are no known workarounds.\n", + "Severity": "HIGH", + "CweIDs": [ + "CWE-20" + ], + "VendorSeverity": { + "ghsa": 3 + }, + "CVSS": { + "ghsa": { + "V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:H/A:N", + "V3Score": 7.5 + } + }, + "References": [ + "https://github.com/guzzle/psr7/security/advisories/GHSA-q7rv-6hp3-vh96", + "https://nvd.nist.gov/vuln/detail/CVE-2022-24775" + ], + "PublishedDate": "2022-03-25T19:26:33Z", + "LastModifiedDate": "2022-06-14T20:02:29Z" + } + ] + } + ] +} diff --git a/integration/testdata/fixtures/convert/npm.json.golden b/integration/testdata/fixtures/convert/npm.json.golden new file mode 100644 index 000000000000..a576da82c72e --- /dev/null +++ b/integration/testdata/fixtures/convert/npm.json.golden @@ -0,0 +1,381 @@ +{ + "SchemaVersion": 2, + "CreatedAt": "2021-08-25T12:20:30.000000005Z", + "ArtifactName": "testdata/fixtures/repo/npm", + "ArtifactType": "repository", + "Metadata": { + "ImageConfig": { + "architecture": "", + "created": "0001-01-01T00:00:00Z", + "os": "", + "rootfs": { + "type": "", + "diff_ids": null + }, + "config": {} + } + }, + "Results": [ + { + "Target": "package-lock.json", + "Class": "lang-pkgs", + "Type": "npm", + "Packages": [ + { + "ID": "asap@2.0.6", + "Name": "asap", + "Identifier": { + "PURL": "pkg:npm/asap@2.0.6" + }, + "Version": "2.0.6", + "Layer": {}, + "Locations": [ + { + "StartLine": 6, + "EndLine": 10 + } + ] + }, + { + "ID": "jquery@3.3.9", + "Name": "jquery", + "Identifier": { + "PURL": "pkg:npm/jquery@3.3.9" + }, + "Version": "3.3.9", + "Licenses": [ + "MIT" + ], + "Layer": {}, + "Locations": [ + { + "StartLine": 11, + "EndLine": 15 + } + ] + }, + { + "ID": "js-tokens@4.0.0", + "Name": "js-tokens", + "Identifier": { + "PURL": "pkg:npm/js-tokens@4.0.0" + }, + "Version": "4.0.0", + "Layer": {}, + "Locations": [ + { + "StartLine": 16, + "EndLine": 20 + } + ] + }, + { + "ID": "loose-envify@1.4.0", + "Name": "loose-envify", + "Identifier": { + "PURL": "pkg:npm/loose-envify@1.4.0" + }, + "Version": "1.4.0", + "DependsOn": [ + "js-tokens@4.0.0" + ], + "Layer": {}, + "Locations": [ + { + "StartLine": 21, + "EndLine": 28 + } + ] + }, + { + "ID": "object-assign@4.1.1", + "Name": "object-assign", + "Identifier": { + "PURL": "pkg:npm/object-assign@4.1.1" + }, + "Version": "4.1.1", + "Layer": {}, + "Locations": [ + { + "StartLine": 29, + "EndLine": 33 + } + ] + }, + { + "ID": "promise@8.0.3", + "Name": "promise", + "Identifier": { + "PURL": "pkg:npm/promise@8.0.3" + }, + "Version": "8.0.3", + "Licenses": [ + "MIT" + ], + "DependsOn": [ + "asap@2.0.6" + ], + "Layer": {}, + "Locations": [ + { + "StartLine": 34, + "EndLine": 41 + } + ] + }, + { + "ID": "prop-types@15.7.2", + "Name": "prop-types", + "Identifier": { + "PURL": "pkg:npm/prop-types@15.7.2" + }, + "Version": "15.7.2", + "DependsOn": [ + "loose-envify@1.4.0", + "object-assign@4.1.1", + "react-is@16.8.6" + ], + "Layer": {}, + "Locations": [ + { + "StartLine": 42, + "EndLine": 51 + } + ] + }, + { + "ID": "react@16.8.6", + "Name": "react", + "Identifier": { + "PURL": "pkg:npm/react@16.8.6" + }, + "Version": "16.8.6", + "Licenses": [ + "MIT" + ], + "DependsOn": [ + "loose-envify@1.4.0", + "object-assign@4.1.1", + "prop-types@15.7.2", + "scheduler@0.13.6" + ], + "Layer": {}, + "Locations": [ + { + "StartLine": 52, + "EndLine": 62 + } + ] + }, + { + "ID": "react-is@16.8.6", + "Name": "react-is", + "Identifier": { + "PURL": "pkg:npm/react-is@16.8.6" + }, + "Version": "16.8.6", + "Licenses": [ + "MIT" + ], + "Layer": {}, + "Locations": [ + { + "StartLine": 63, + "EndLine": 67 + } + ] + }, + { + "ID": "redux@4.0.1", + "Name": "redux", + "Identifier": { + "PURL": "pkg:npm/redux@4.0.1" + }, + "Version": "4.0.1", + "Licenses": [ + "MIT" + ], + "DependsOn": [ + "loose-envify@1.4.0", + "symbol-observable@1.2.0" + ], + "Layer": {}, + "Locations": [ + { + "StartLine": 68, + "EndLine": 76 + } + ] + }, + { + "ID": "scheduler@0.13.6", + "Name": "scheduler", + "Identifier": { + "PURL": "pkg:npm/scheduler@0.13.6" + }, + "Version": "0.13.6", + "DependsOn": [ + "loose-envify@1.4.0", + "object-assign@4.1.1" + ], + "Layer": {}, + "Locations": [ + { + "StartLine": 77, + "EndLine": 85 + } + ] + }, + { + "ID": "symbol-observable@1.2.0", + "Name": "symbol-observable", + "Identifier": { + "PURL": "pkg:npm/symbol-observable@1.2.0" + }, + "Version": "1.2.0", + "Layer": {}, + "Locations": [ + { + "StartLine": 86, + "EndLine": 90 + } + ] + } + ], + "Vulnerabilities": [ + { + "VulnerabilityID": "CVE-2019-11358", + "PkgID": "jquery@3.3.9", + "PkgName": "jquery", + "PkgIdentifier": { + "PURL": "pkg:npm/jquery@3.3.9" + }, + "InstalledVersion": "3.3.9", + "FixedVersion": "3.4.0", + "Status": "fixed", + "Layer": {}, + "SeveritySource": "ghsa", + "PrimaryURL": "https://avd.aquasec.com/nvd/cve-2019-11358", + "DataSource": { + "ID": "ghsa", + "Name": "GitHub Security Advisory Npm", + "URL": "https://github.com/advisories?query=type%3Areviewed+ecosystem%3Anpm" + }, + "Title": "jquery: Prototype pollution in object's prototype leading to denial of service, remote code execution, or property injection", + "Description": "jQuery before 3.4.0, as used in Drupal, Backdrop CMS, and other products, mishandles jQuery.extend(true, {}, ...) because of Object.prototype pollution. If an unsanitized source object contained an enumerable __proto__ property, it could extend the native Object.prototype.", + "Severity": "MEDIUM", + "CweIDs": [ + "CWE-79" + ], + "VendorSeverity": { + "alma": 2, + "amazon": 2, + "arch-linux": 2, + "ghsa": 2, + "nodejs-security-wg": 2, + "nvd": 2, + "oracle-oval": 2, + "redhat": 2, + "ruby-advisory-db": 2, + "ubuntu": 1 + }, + "CVSS": { + "nvd": { + "V2Vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N", + "V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:C/C:L/I:L/A:N", + "V2Score": 4.3, + "V3Score": 6.1 + }, + "redhat": { + "V3Vector": "CVSS:3.0/AV:N/AC:H/PR:N/UI:N/S:U/C:L/I:L/A:L", + "V3Score": 5.6 + } + }, + "References": [ + "http://lists.opensuse.org/opensuse-security-announce/2019-08/msg00006.html", + "http://lists.opensuse.org/opensuse-security-announce/2019-08/msg00025.html", + "http://packetstormsecurity.com/files/152787/dotCMS-5.1.1-Vulnerable-Dependencies.html", + "http://packetstormsecurity.com/files/153237/RetireJS-CORS-Issue-Script-Execution.html", + "http://packetstormsecurity.com/files/156743/OctoberCMS-Insecure-Dependencies.html", + "http://seclists.org/fulldisclosure/2019/May/10", + "http://seclists.org/fulldisclosure/2019/May/11", + "http://seclists.org/fulldisclosure/2019/May/13", + "http://www.openwall.com/lists/oss-security/2019/06/03/2", + "http://www.securityfocus.com/bid/108023", + "https://access.redhat.com/errata/RHBA-2019:1570", + "https://access.redhat.com/errata/RHSA-2019:1456", + "https://access.redhat.com/errata/RHSA-2019:2587", + "https://access.redhat.com/errata/RHSA-2019:3023", + "https://access.redhat.com/errata/RHSA-2019:3024", + "https://access.redhat.com/security/cve/CVE-2019-11358", + "https://backdropcms.org/security/backdrop-sa-core-2019-009", + "https://blog.jquery.com/2019/04/10/jquery-3-4-0-released/", + "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2019-11358", + "https://github.com/DanielRuf/snyk-js-jquery-174006?files=1", + "https://github.com/advisories/GHSA-6c3j-c64m-qhgq", + "https://github.com/jquery/jquery/commit/753d591aea698e57d6db58c9f722cd0808619b1b", + "https://github.com/jquery/jquery/pull/4333", + "https://github.com/rails/jquery-rails/blob/master/CHANGELOG.md#434", + "https://hackerone.com/reports/454365", + "https://kb.pulsesecure.net/articles/Pulse_Security_Advisories/SA44601", + "https://linux.oracle.com/cve/CVE-2019-11358.html", + "https://linux.oracle.com/errata/ELSA-2020-4847.html", + "https://lists.apache.org/thread.html/08720ef215ee7ab3386c05a1a90a7d1c852bf0706f176a7816bf65fc@%3Ccommits.airflow.apache.org%3E", + "https://lists.apache.org/thread.html/519eb0fd45642dcecd9ff74cb3e71c20a4753f7d82e2f07864b5108f@%3Cdev.drill.apache.org%3E", + "https://lists.apache.org/thread.html/5928aa293e39d248266472210c50f176cac1535220f2486e6a7fa844@%3Ccommits.airflow.apache.org%3E", + "https://lists.apache.org/thread.html/6097cdbd6f0a337bedd9bb5cc441b2d525ff002a96531de367e4259f@%3Ccommits.airflow.apache.org%3E", + "https://lists.apache.org/thread.html/88fb0362fd40e5b605ea8149f63241537b8b6fb5bfa315391fc5cbb7@%3Ccommits.airflow.apache.org%3E", + "https://lists.apache.org/thread.html/b0656d359c7d40ec9f39c8cc61bca66802ef9a2a12ee199f5b0c1442@%3Cdev.drill.apache.org%3E", + "https://lists.apache.org/thread.html/b736d0784cf02f5a30fbb4c5902762a15ad6d47e17e2c5a17b7d6205@%3Ccommits.airflow.apache.org%3E", + "https://lists.apache.org/thread.html/ba79cf1658741e9f146e4c59b50aee56656ea95d841d358d006c18b6@%3Ccommits.roller.apache.org%3E", + "https://lists.apache.org/thread.html/bcce5a9c532b386c68dab2f6b3ce8b0cc9b950ec551766e76391caa3@%3Ccommits.nifi.apache.org%3E", + "https://lists.apache.org/thread.html/f9bc3e55f4e28d1dcd1a69aae6d53e609a758e34d2869b4d798e13cc@%3Cissues.drill.apache.org%3E", + "https://lists.apache.org/thread.html/r2041a75d3fc09dec55adfd95d598b38d22715303f65c997c054844c9@%3Cissues.flink.apache.org%3E", + "https://lists.apache.org/thread.html/r2baacab6e0acb5a2092eb46ae04fd6c3e8277b4fd79b1ffb7f3254fa@%3Cissues.flink.apache.org%3E", + "https://lists.apache.org/thread.html/r38f0d1aa3c923c22977fe7376508f030f22e22c1379fbb155bf29766@%3Cdev.syncope.apache.org%3E", + "https://lists.apache.org/thread.html/r41b5bfe009c845f67d4f68948cc9419ac2d62e287804aafd72892b08@%3Cissues.flink.apache.org%3E", + "https://lists.apache.org/thread.html/r7aac081cbddb6baa24b75e74abf0929bf309b176755a53e3ed810355@%3Cdev.flink.apache.org%3E", + "https://lists.apache.org/thread.html/r7d64895cc4dff84d0becfc572b20c0e4bf9bfa7b10c6f5f73e783734@%3Cdev.storm.apache.org%3E", + "https://lists.apache.org/thread.html/r7e8ebccb7c022e41295f6fdb7b971209b83702339f872ddd8cf8bf73@%3Cissues.flink.apache.org%3E", + "https://lists.apache.org/thread.html/rac25da84ecdcd36f6de5ad0d255f4e967209bbbebddb285e231da37d@%3Cissues.flink.apache.org%3E", + "https://lists.apache.org/thread.html/rca37935d661f4689cb4119f1b3b224413b22be161b678e6e6ce0c69b@%3Ccommits.nifi.apache.org%3E", + "https://lists.debian.org/debian-lts-announce/2019/05/msg00006.html", + "https://lists.debian.org/debian-lts-announce/2019/05/msg00029.html", + "https://lists.debian.org/debian-lts-announce/2020/02/msg00024.html", + "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/4UOAZIFCSZ3ENEFOR5IXX6NFAD3HV7FA/", + "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/5IABSKTYZ5JUGL735UKGXL5YPRYOPUYI/", + "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/KYH3OAGR2RTCHRA5NOKX2TES7SNQMWGO/", + "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/QV3PKZC3PQCO3273HAT76PAQZFBEO4KP/", + "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/RLXRX23725JL366CNZGJZ7AQQB7LHQ6F/", + "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/WZW27UCJ5CYFL4KFFFMYMIBNMIU2ALG5/", + "https://nvd.nist.gov/vuln/detail/CVE-2019-11358", + "https://seclists.org/bugtraq/2019/Apr/32", + "https://seclists.org/bugtraq/2019/Jun/12", + "https://seclists.org/bugtraq/2019/May/18", + "https://security.netapp.com/advisory/ntap-20190919-0001/", + "https://snyk.io/vuln/SNYK-JS-JQUERY-174006", + "https://www.debian.org/security/2019/dsa-4434", + "https://www.debian.org/security/2019/dsa-4460", + "https://www.drupal.org/sa-core-2019-006", + "https://www.oracle.com//security-alerts/cpujul2021.html", + "https://www.oracle.com/security-alerts/cpuApr2021.html", + "https://www.oracle.com/security-alerts/cpuapr2020.html", + "https://www.oracle.com/security-alerts/cpujan2020.html", + "https://www.oracle.com/security-alerts/cpujan2021.html", + "https://www.oracle.com/security-alerts/cpujul2020.html", + "https://www.oracle.com/security-alerts/cpuoct2020.html", + "https://www.oracle.com/security-alerts/cpuoct2021.html", + "https://www.oracle.com/technetwork/security-advisory/cpujul2019-5072835.html", + "https://www.oracle.com/technetwork/security-advisory/cpuoct2019-5072832.html", + "https://www.privacy-wise.com/mitigating-cve-2019-11358-in-old-versions-of-jquery/", + "https://www.synology.com/security/advisory/Synology_SA_19_19", + "https://www.tenable.com/security/tns-2019-08", + "https://www.tenable.com/security/tns-2020-02" + ], + "PublishedDate": "2019-04-20T00:29:00Z", + "LastModifiedDate": "2021-10-20T11:15:00Z" + } + ] + } + ] +} diff --git a/integration/testdata/fixtures/repo/composer-vendor/installed.json b/integration/testdata/fixtures/repo/composer-vendor/installed.json new file mode 100644 index 000000000000..532876cd7ff5 --- /dev/null +++ b/integration/testdata/fixtures/repo/composer-vendor/installed.json @@ -0,0 +1,222 @@ +{ + "packages": [ + { + "name": "guzzlehttp/psr7", + "version": "1.8.3", + "version_normalized": "1.8.3.0", + "source": { + "type": "git", + "url": "https://github.com/guzzle/psr7.git", + "reference": "1afdd860a2566ed3c2b0b4a3de6e23434a79ec85" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/guzzle/psr7/zipball/1afdd860a2566ed3c2b0b4a3de6e23434a79ec85", + "reference": "1afdd860a2566ed3c2b0b4a3de6e23434a79ec85", + "shasum": "" + }, + "require": { + "php": ">=5.4.0", + "psr/http-message": "~1.0", + "ralouphie/getallheaders": "^2.0.5 || ^3.0.0" + }, + "provide": { + "psr/http-message-implementation": "1.0" + }, + "require-dev": { + "ext-zlib": "*", + "phpunit/phpunit": "~4.8.36 || ^5.7.27 || ^6.5.14 || ^7.5.20 || ^8.5.8 || ^9.3.10" + }, + "suggest": { + "laminas/laminas-httphandlerrunner": "Emit PSR-7 responses" + }, + "time": "2021-10-05T13:56:00+00:00", + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "1.7-dev" + } + }, + "installation-source": "dist", + "autoload": { + "files": [ + "src/functions_include.php" + ], + "psr-4": { + "GuzzleHttp\\Psr7\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Graham Campbell", + "email": "hello@gjcampbell.co.uk", + "homepage": "https://github.com/GrahamCampbell" + }, + { + "name": "Michael Dowling", + "email": "mtdowling@gmail.com", + "homepage": "https://github.com/mtdowling" + }, + { + "name": "George Mponos", + "email": "gmponos@gmail.com", + "homepage": "https://github.com/gmponos" + }, + { + "name": "Tobias Nyholm", + "email": "tobias.nyholm@gmail.com", + "homepage": "https://github.com/Nyholm" + }, + { + "name": "Márk Sági-Kazár", + "email": "mark.sagikazar@gmail.com", + "homepage": "https://github.com/sagikazarmark" + }, + { + "name": "Tobias Schultze", + "email": "webmaster@tubo-world.de", + "homepage": "https://github.com/Tobion" + } + ], + "description": "PSR-7 message implementation that also provides common utility methods", + "keywords": [ + "http", + "message", + "psr-7", + "request", + "response", + "stream", + "uri", + "url" + ], + "support": { + "issues": "https://github.com/guzzle/psr7/issues", + "source": "https://github.com/guzzle/psr7/tree/1.8.3" + }, + "funding": [ + { + "url": "https://github.com/GrahamCampbell", + "type": "github" + }, + { + "url": "https://github.com/Nyholm", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/guzzlehttp/psr7", + "type": "tidelift" + } + ], + "install-path": "../guzzlehttp/psr7" + }, + { + "name": "psr/http-message", + "version": "1.1", + "version_normalized": "1.1.0.0", + "source": { + "type": "git", + "url": "https://github.com/php-fig/http-message.git", + "reference": "cb6ce4845ce34a8ad9e68117c10ee90a29919eba" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/php-fig/http-message/zipball/cb6ce4845ce34a8ad9e68117c10ee90a29919eba", + "reference": "cb6ce4845ce34a8ad9e68117c10ee90a29919eba", + "shasum": "" + }, + "require": { + "php": "^7.2 || ^8.0" + }, + "time": "2023-04-04T09:50:52+00:00", + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "1.1.x-dev" + } + }, + "installation-source": "dist", + "autoload": { + "psr-4": { + "Psr\\Http\\Message\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "PHP-FIG", + "homepage": "http://www.php-fig.org/" + } + ], + "description": "Common interface for HTTP messages", + "homepage": "https://github.com/php-fig/http-message", + "keywords": [ + "http", + "http-message", + "psr", + "psr-7", + "request", + "response" + ], + "support": { + "source": "https://github.com/php-fig/http-message/tree/1.1" + }, + "install-path": "../psr/http-message" + }, + { + "name": "ralouphie/getallheaders", + "version": "3.0.3", + "version_normalized": "3.0.3.0", + "source": { + "type": "git", + "url": "https://github.com/ralouphie/getallheaders.git", + "reference": "120b605dfeb996808c31b6477290a714d356e822" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/ralouphie/getallheaders/zipball/120b605dfeb996808c31b6477290a714d356e822", + "reference": "120b605dfeb996808c31b6477290a714d356e822", + "shasum": "" + }, + "require": { + "php": ">=5.6" + }, + "require-dev": { + "php-coveralls/php-coveralls": "^2.1", + "phpunit/phpunit": "^5 || ^6.5" + }, + "time": "2019-03-08T08:55:37+00:00", + "type": "library", + "installation-source": "dist", + "autoload": { + "files": [ + "src/getallheaders.php" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Ralph Khattar", + "email": "ralph.khattar@gmail.com" + } + ], + "description": "A polyfill for getallheaders.", + "support": { + "issues": "https://github.com/ralouphie/getallheaders/issues", + "source": "https://github.com/ralouphie/getallheaders/tree/develop" + }, + "install-path": "../ralouphie/getallheaders" + } + ], + "dev": true, + "dev-package-names": [] +} diff --git a/integration/testdata/fixtures/repo/sbt/build.sbt.lock b/integration/testdata/fixtures/repo/sbt/build.sbt.lock new file mode 100644 index 000000000000..33bcdbee245e --- /dev/null +++ b/integration/testdata/fixtures/repo/sbt/build.sbt.lock @@ -0,0 +1,29 @@ +{ + "lockVersion" : 1, + "timestamp" : "2024-06-06T11:03:09.964557Z", + "configurations" : [ + "compile", + "optional", + "provided", + "runtime", + "test" + ], + "dependencies" : [ + { + "org" : "com.fasterxml.jackson.core", + "name" : "jackson-databind", + "version" : "2.9.1", + "artifacts" : [ + { + "name" : "jackson-databind.jar", + "hash" : "sha1:716da1830a2043f18882fc036ec26eb32cbe5aff" + } + ], + "configurations" : [ + "compile", + "runtime", + "test" + ] + } + ] +} \ No newline at end of file diff --git a/integration/testdata/helm.json.golden b/integration/testdata/helm.json.golden index c9721e205272..518458ac1088 100644 --- a/integration/testdata/helm.json.golden +++ b/integration/testdata/helm.json.golden @@ -21,7 +21,7 @@ "Class": "config", "Type": "helm", "MisconfSummary": { - "Successes": 125, + "Successes": 80, "Failures": 14, "Exceptions": 0 }, diff --git a/integration/testdata/helm_testchart.json.golden b/integration/testdata/helm_testchart.json.golden index ce6df6b17cee..2e659b13e68a 100644 --- a/integration/testdata/helm_testchart.json.golden +++ b/integration/testdata/helm_testchart.json.golden @@ -21,7 +21,7 @@ "Class": "config", "Type": "helm", "MisconfSummary": { - "Successes": 135, + "Successes": 90, "Failures": 4, "Exceptions": 0 }, @@ -341,7 +341,7 @@ "Class": "config", "Type": "helm", "MisconfSummary": { - "Successes": 106, + "Successes": 61, "Failures": 0, "Exceptions": 0 } @@ -351,7 +351,7 @@ "Class": "config", "Type": "helm", "MisconfSummary": { - "Successes": 105, + "Successes": 60, "Failures": 0, "Exceptions": 0 } diff --git a/integration/testdata/helm_testchart.overridden.json.golden b/integration/testdata/helm_testchart.overridden.json.golden index 573d789ef7b4..1b1ada2cd9a3 100644 --- a/integration/testdata/helm_testchart.overridden.json.golden +++ b/integration/testdata/helm_testchart.overridden.json.golden @@ -21,7 +21,7 @@ "Class": "config", "Type": "helm", "MisconfSummary": { - "Successes": 133, + "Successes": 88, "Failures": 6, "Exceptions": 0 }, @@ -568,7 +568,7 @@ "Class": "config", "Type": "helm", "MisconfSummary": { - "Successes": 106, + "Successes": 61, "Failures": 0, "Exceptions": 0 } @@ -578,7 +578,7 @@ "Class": "config", "Type": "helm", "MisconfSummary": { - "Successes": 105, + "Successes": 60, "Failures": 0, "Exceptions": 0 } diff --git a/integration/testdata/mariner-1.0.json.golden b/integration/testdata/mariner-1.0.json.golden index 1d549e1ef188..7325bf74f6e6 100644 --- a/integration/testdata/mariner-1.0.json.golden +++ b/integration/testdata/mariner-1.0.json.golden @@ -42,7 +42,7 @@ "VulnerabilityID": "CVE-2022-0261", "PkgName": "vim", "PkgIdentifier": { - "PURL": "pkg:cbl-mariner/vim@8.2.4081-1.cm1?arch=x86_64", + "PURL": "pkg:rpm/cbl-mariner/vim@8.2.4081-1.cm1?arch=x86_64\u0026distro=cbl-mariner-1.0.20220122", "UID": "3f08cd76fa5ba73d" }, "InstalledVersion": "8.2.4081-1.cm1", @@ -79,7 +79,7 @@ "VulnerabilityID": "CVE-2022-0158", "PkgName": "vim", "PkgIdentifier": { - "PURL": "pkg:cbl-mariner/vim@8.2.4081-1.cm1?arch=x86_64", + "PURL": "pkg:rpm/cbl-mariner/vim@8.2.4081-1.cm1?arch=x86_64\u0026distro=cbl-mariner-1.0.20220122", "UID": "3f08cd76fa5ba73d" }, "InstalledVersion": "8.2.4081-1.cm1", diff --git a/integration/testdata/npm-cyclonedx.json.golden b/integration/testdata/npm-cyclonedx.json.golden new file mode 100644 index 000000000000..d7bcc56af462 --- /dev/null +++ b/integration/testdata/npm-cyclonedx.json.golden @@ -0,0 +1,725 @@ +{ + "$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.6", + "serialNumber": "urn:uuid:3ff14136-e09f-4df9-80ea-000000000015", + "version": 1, + "metadata": { + "timestamp": "2021-08-25T12:20:30+00:00", + "tools": { + "components": [ + { + "type": "application", + "group": "aquasecurity", + "name": "trivy", + "version": "dev" + } + ] + }, + "component": { + "bom-ref": "3ff14136-e09f-4df9-80ea-000000000001", + "type": "application", + "name": "testdata/fixtures/repo/npm", + "properties": [ + { + "name": "aquasecurity:trivy:SchemaVersion", + "value": "2" + } + ] + } + }, + "components": [ + { + "bom-ref": "3ff14136-e09f-4df9-80ea-000000000002", + "type": "application", + "name": "package-lock.json", + "properties": [ + { + "name": "aquasecurity:trivy:Class", + "value": "lang-pkgs" + }, + { + "name": "aquasecurity:trivy:Type", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/asap@2.0.6", + "type": "library", + "name": "asap", + "version": "2.0.6", + "purl": "pkg:npm/asap@2.0.6", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "asap@2.0.6" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/jquery@3.3.9", + "type": "library", + "name": "jquery", + "version": "3.3.9", + "licenses": [ + { + "license": { + "name": "MIT" + } + } + ], + "purl": "pkg:npm/jquery@3.3.9", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "jquery@3.3.9" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/js-tokens@4.0.0", + "type": "library", + "name": "js-tokens", + "version": "4.0.0", + "purl": "pkg:npm/js-tokens@4.0.0", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "js-tokens@4.0.0" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/loose-envify@1.4.0", + "type": "library", + "name": "loose-envify", + "version": "1.4.0", + "purl": "pkg:npm/loose-envify@1.4.0", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "loose-envify@1.4.0" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/object-assign@4.1.1", + "type": "library", + "name": "object-assign", + "version": "4.1.1", + "purl": "pkg:npm/object-assign@4.1.1", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "object-assign@4.1.1" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/promise@8.0.3", + "type": "library", + "name": "promise", + "version": "8.0.3", + "licenses": [ + { + "license": { + "name": "MIT" + } + } + ], + "purl": "pkg:npm/promise@8.0.3", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "promise@8.0.3" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/prop-types@15.7.2", + "type": "library", + "name": "prop-types", + "version": "15.7.2", + "purl": "pkg:npm/prop-types@15.7.2", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "prop-types@15.7.2" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/react-is@16.8.6", + "type": "library", + "name": "react-is", + "version": "16.8.6", + "licenses": [ + { + "license": { + "name": "MIT" + } + } + ], + "purl": "pkg:npm/react-is@16.8.6", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "react-is@16.8.6" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/react@16.8.6", + "type": "library", + "name": "react", + "version": "16.8.6", + "licenses": [ + { + "license": { + "name": "MIT" + } + } + ], + "purl": "pkg:npm/react@16.8.6", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "react@16.8.6" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/redux@4.0.1", + "type": "library", + "name": "redux", + "version": "4.0.1", + "licenses": [ + { + "license": { + "name": "MIT" + } + } + ], + "purl": "pkg:npm/redux@4.0.1", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "redux@4.0.1" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/scheduler@0.13.6", + "type": "library", + "name": "scheduler", + "version": "0.13.6", + "purl": "pkg:npm/scheduler@0.13.6", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "scheduler@0.13.6" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + }, + { + "bom-ref": "pkg:npm/symbol-observable@1.2.0", + "type": "library", + "name": "symbol-observable", + "version": "1.2.0", + "purl": "pkg:npm/symbol-observable@1.2.0", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "symbol-observable@1.2.0" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "npm" + } + ] + } + ], + "dependencies": [ + { + "ref": "3ff14136-e09f-4df9-80ea-000000000001", + "dependsOn": [ + "3ff14136-e09f-4df9-80ea-000000000002" + ] + }, + { + "ref": "3ff14136-e09f-4df9-80ea-000000000002", + "dependsOn": [ + "pkg:npm/asap@2.0.6", + "pkg:npm/jquery@3.3.9", + "pkg:npm/js-tokens@4.0.0", + "pkg:npm/loose-envify@1.4.0", + "pkg:npm/object-assign@4.1.1", + "pkg:npm/promise@8.0.3", + "pkg:npm/prop-types@15.7.2", + "pkg:npm/react-is@16.8.6", + "pkg:npm/react@16.8.6", + "pkg:npm/redux@4.0.1", + "pkg:npm/scheduler@0.13.6", + "pkg:npm/symbol-observable@1.2.0" + ] + }, + { + "ref": "pkg:npm/asap@2.0.6", + "dependsOn": [] + }, + { + "ref": "pkg:npm/jquery@3.3.9", + "dependsOn": [] + }, + { + "ref": "pkg:npm/js-tokens@4.0.0", + "dependsOn": [] + }, + { + "ref": "pkg:npm/loose-envify@1.4.0", + "dependsOn": [ + "pkg:npm/js-tokens@4.0.0" + ] + }, + { + "ref": "pkg:npm/object-assign@4.1.1", + "dependsOn": [] + }, + { + "ref": "pkg:npm/promise@8.0.3", + "dependsOn": [ + "pkg:npm/asap@2.0.6" + ] + }, + { + "ref": "pkg:npm/prop-types@15.7.2", + "dependsOn": [ + "pkg:npm/loose-envify@1.4.0", + "pkg:npm/object-assign@4.1.1", + "pkg:npm/react-is@16.8.6" + ] + }, + { + "ref": "pkg:npm/react-is@16.8.6", + "dependsOn": [] + }, + { + "ref": "pkg:npm/react@16.8.6", + "dependsOn": [ + "pkg:npm/loose-envify@1.4.0", + "pkg:npm/object-assign@4.1.1", + "pkg:npm/prop-types@15.7.2", + "pkg:npm/scheduler@0.13.6" + ] + }, + { + "ref": "pkg:npm/redux@4.0.1", + "dependsOn": [ + "pkg:npm/loose-envify@1.4.0", + "pkg:npm/symbol-observable@1.2.0" + ] + }, + { + "ref": "pkg:npm/scheduler@0.13.6", + "dependsOn": [ + "pkg:npm/loose-envify@1.4.0", + "pkg:npm/object-assign@4.1.1" + ] + }, + { + "ref": "pkg:npm/symbol-observable@1.2.0", + "dependsOn": [] + } + ], + "vulnerabilities": [ + { + "id": "CVE-2019-11358", + "source": { + "name": "ghsa", + "url": "https://github.com/advisories?query=type%3Areviewed+ecosystem%3Anpm" + }, + "ratings": [ + { + "source": { + "name": "alma" + }, + "severity": "medium" + }, + { + "source": { + "name": "amazon" + }, + "severity": "medium" + }, + { + "source": { + "name": "arch-linux" + }, + "severity": "medium" + }, + { + "source": { + "name": "ghsa" + }, + "severity": "medium" + }, + { + "source": { + "name": "nodejs-security-wg" + }, + "severity": "medium" + }, + { + "source": { + "name": "nvd" + }, + "score": 4.3, + "severity": "medium", + "method": "CVSSv2", + "vector": "AV:N/AC:M/Au:N/C:N/I:P/A:N" + }, + { + "source": { + "name": "nvd" + }, + "score": 6.1, + "severity": "medium", + "method": "CVSSv31", + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:C/C:L/I:L/A:N" + }, + { + "source": { + "name": "oracle-oval" + }, + "severity": "medium" + }, + { + "source": { + "name": "redhat" + }, + "score": 5.6, + "severity": "medium", + "method": "CVSSv3", + "vector": "CVSS:3.0/AV:N/AC:H/PR:N/UI:N/S:U/C:L/I:L/A:L" + }, + { + "source": { + "name": "ruby-advisory-db" + }, + "severity": "medium" + }, + { + "source": { + "name": "ubuntu" + }, + "severity": "low" + } + ], + "cwes": [ + 79 + ], + "description": "jQuery before 3.4.0, as used in Drupal, Backdrop CMS, and other products, mishandles jQuery.extend(true, {}, ...) because of Object.prototype pollution. If an unsanitized source object contained an enumerable __proto__ property, it could extend the native Object.prototype.", + "recommendation": "Upgrade jquery to version 3.4.0", + "advisories": [ + { + "url": "https://avd.aquasec.com/nvd/cve-2019-11358" + }, + { + "url": "http://lists.opensuse.org/opensuse-security-announce/2019-08/msg00006.html" + }, + { + "url": "http://lists.opensuse.org/opensuse-security-announce/2019-08/msg00025.html" + }, + { + "url": "http://packetstormsecurity.com/files/152787/dotCMS-5.1.1-Vulnerable-Dependencies.html" + }, + { + "url": "http://packetstormsecurity.com/files/153237/RetireJS-CORS-Issue-Script-Execution.html" + }, + { + "url": "http://packetstormsecurity.com/files/156743/OctoberCMS-Insecure-Dependencies.html" + }, + { + "url": "http://seclists.org/fulldisclosure/2019/May/10" + }, + { + "url": "http://seclists.org/fulldisclosure/2019/May/11" + }, + { + "url": "http://seclists.org/fulldisclosure/2019/May/13" + }, + { + "url": "http://www.openwall.com/lists/oss-security/2019/06/03/2" + }, + { + "url": "http://www.securityfocus.com/bid/108023" + }, + { + "url": "https://access.redhat.com/errata/RHBA-2019:1570" + }, + { + "url": "https://access.redhat.com/errata/RHSA-2019:1456" + }, + { + "url": "https://access.redhat.com/errata/RHSA-2019:2587" + }, + { + "url": "https://access.redhat.com/errata/RHSA-2019:3023" + }, + { + "url": "https://access.redhat.com/errata/RHSA-2019:3024" + }, + { + "url": "https://access.redhat.com/security/cve/CVE-2019-11358" + }, + { + "url": "https://backdropcms.org/security/backdrop-sa-core-2019-009" + }, + { + "url": "https://blog.jquery.com/2019/04/10/jquery-3-4-0-released/" + }, + { + "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2019-11358" + }, + { + "url": "https://github.com/DanielRuf/snyk-js-jquery-174006?files=1" + }, + { + "url": "https://github.com/advisories/GHSA-6c3j-c64m-qhgq" + }, + { + "url": "https://github.com/jquery/jquery/commit/753d591aea698e57d6db58c9f722cd0808619b1b" + }, + { + "url": "https://github.com/jquery/jquery/pull/4333" + }, + { + "url": "https://github.com/rails/jquery-rails/blob/master/CHANGELOG.md#434" + }, + { + "url": "https://hackerone.com/reports/454365" + }, + { + "url": "https://kb.pulsesecure.net/articles/Pulse_Security_Advisories/SA44601" + }, + { + "url": "https://linux.oracle.com/cve/CVE-2019-11358.html" + }, + { + "url": "https://linux.oracle.com/errata/ELSA-2020-4847.html" + }, + { + "url": "https://lists.apache.org/thread.html/08720ef215ee7ab3386c05a1a90a7d1c852bf0706f176a7816bf65fc@%3Ccommits.airflow.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/519eb0fd45642dcecd9ff74cb3e71c20a4753f7d82e2f07864b5108f@%3Cdev.drill.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/5928aa293e39d248266472210c50f176cac1535220f2486e6a7fa844@%3Ccommits.airflow.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/6097cdbd6f0a337bedd9bb5cc441b2d525ff002a96531de367e4259f@%3Ccommits.airflow.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/88fb0362fd40e5b605ea8149f63241537b8b6fb5bfa315391fc5cbb7@%3Ccommits.airflow.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/b0656d359c7d40ec9f39c8cc61bca66802ef9a2a12ee199f5b0c1442@%3Cdev.drill.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/b736d0784cf02f5a30fbb4c5902762a15ad6d47e17e2c5a17b7d6205@%3Ccommits.airflow.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/ba79cf1658741e9f146e4c59b50aee56656ea95d841d358d006c18b6@%3Ccommits.roller.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/bcce5a9c532b386c68dab2f6b3ce8b0cc9b950ec551766e76391caa3@%3Ccommits.nifi.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/f9bc3e55f4e28d1dcd1a69aae6d53e609a758e34d2869b4d798e13cc@%3Cissues.drill.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/r2041a75d3fc09dec55adfd95d598b38d22715303f65c997c054844c9@%3Cissues.flink.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/r2baacab6e0acb5a2092eb46ae04fd6c3e8277b4fd79b1ffb7f3254fa@%3Cissues.flink.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/r38f0d1aa3c923c22977fe7376508f030f22e22c1379fbb155bf29766@%3Cdev.syncope.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/r41b5bfe009c845f67d4f68948cc9419ac2d62e287804aafd72892b08@%3Cissues.flink.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/r7aac081cbddb6baa24b75e74abf0929bf309b176755a53e3ed810355@%3Cdev.flink.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/r7d64895cc4dff84d0becfc572b20c0e4bf9bfa7b10c6f5f73e783734@%3Cdev.storm.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/r7e8ebccb7c022e41295f6fdb7b971209b83702339f872ddd8cf8bf73@%3Cissues.flink.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/rac25da84ecdcd36f6de5ad0d255f4e967209bbbebddb285e231da37d@%3Cissues.flink.apache.org%3E" + }, + { + "url": "https://lists.apache.org/thread.html/rca37935d661f4689cb4119f1b3b224413b22be161b678e6e6ce0c69b@%3Ccommits.nifi.apache.org%3E" + }, + { + "url": "https://lists.debian.org/debian-lts-announce/2019/05/msg00006.html" + }, + { + "url": "https://lists.debian.org/debian-lts-announce/2019/05/msg00029.html" + }, + { + "url": "https://lists.debian.org/debian-lts-announce/2020/02/msg00024.html" + }, + { + "url": "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/4UOAZIFCSZ3ENEFOR5IXX6NFAD3HV7FA/" + }, + { + "url": "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/5IABSKTYZ5JUGL735UKGXL5YPRYOPUYI/" + }, + { + "url": "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/KYH3OAGR2RTCHRA5NOKX2TES7SNQMWGO/" + }, + { + "url": "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/QV3PKZC3PQCO3273HAT76PAQZFBEO4KP/" + }, + { + "url": "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/RLXRX23725JL366CNZGJZ7AQQB7LHQ6F/" + }, + { + "url": "https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/WZW27UCJ5CYFL4KFFFMYMIBNMIU2ALG5/" + }, + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-11358" + }, + { + "url": "https://seclists.org/bugtraq/2019/Apr/32" + }, + { + "url": "https://seclists.org/bugtraq/2019/Jun/12" + }, + { + "url": "https://seclists.org/bugtraq/2019/May/18" + }, + { + "url": "https://security.netapp.com/advisory/ntap-20190919-0001/" + }, + { + "url": "https://snyk.io/vuln/SNYK-JS-JQUERY-174006" + }, + { + "url": "https://www.debian.org/security/2019/dsa-4434" + }, + { + "url": "https://www.debian.org/security/2019/dsa-4460" + }, + { + "url": "https://www.drupal.org/sa-core-2019-006" + }, + { + "url": "https://www.oracle.com//security-alerts/cpujul2021.html" + }, + { + "url": "https://www.oracle.com/security-alerts/cpuApr2021.html" + }, + { + "url": "https://www.oracle.com/security-alerts/cpuapr2020.html" + }, + { + "url": "https://www.oracle.com/security-alerts/cpujan2020.html" + }, + { + "url": "https://www.oracle.com/security-alerts/cpujan2021.html" + }, + { + "url": "https://www.oracle.com/security-alerts/cpujul2020.html" + }, + { + "url": "https://www.oracle.com/security-alerts/cpuoct2020.html" + }, + { + "url": "https://www.oracle.com/security-alerts/cpuoct2021.html" + }, + { + "url": "https://www.oracle.com/technetwork/security-advisory/cpujul2019-5072835.html" + }, + { + "url": "https://www.oracle.com/technetwork/security-advisory/cpuoct2019-5072832.html" + }, + { + "url": "https://www.privacy-wise.com/mitigating-cve-2019-11358-in-old-versions-of-jquery/" + }, + { + "url": "https://www.synology.com/security/advisory/Synology_SA_19_19" + }, + { + "url": "https://www.tenable.com/security/tns-2019-08" + }, + { + "url": "https://www.tenable.com/security/tns-2020-02" + } + ], + "published": "2019-04-20T00:29:00+00:00", + "updated": "2021-10-20T11:15:00+00:00", + "affects": [ + { + "ref": "pkg:npm/jquery@3.3.9", + "versions": [ + { + "version": "3.3.9", + "status": "affected" + } + ] + } + ] + } + ] +} diff --git a/integration/testdata/sbt.json.golden b/integration/testdata/sbt.json.golden new file mode 100644 index 000000000000..94bf111fd221 --- /dev/null +++ b/integration/testdata/sbt.json.golden @@ -0,0 +1,149 @@ +{ + "SchemaVersion": 2, + "CreatedAt": "2021-08-25T12:20:30.000000005Z", + "ArtifactName": "testdata/fixtures/repo/sbt", + "ArtifactType": "repository", + "Metadata": { + "ImageConfig": { + "architecture": "", + "created": "0001-01-01T00:00:00Z", + "os": "", + "rootfs": { + "type": "", + "diff_ids": null + }, + "config": {} + } + }, + "Results": [ + { + "Target": "build.sbt.lock", + "Class": "lang-pkgs", + "Type": "sbt", + "Vulnerabilities": [ + { + "VulnerabilityID": "CVE-2020-9548", + "PkgID": "com.fasterxml.jackson.core:jackson-databind:2.9.1", + "PkgName": "com.fasterxml.jackson.core:jackson-databind", + "PkgIdentifier": { + "PURL": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.9.1", + "UID": "9ccd2eb3e03373ff" + }, + "InstalledVersion": "2.9.1", + "FixedVersion": "2.9.10.4", + "Status": "fixed", + "Layer": {}, + "SeveritySource": "ghsa", + "PrimaryURL": "https://avd.aquasec.com/nvd/cve-2020-9548", + "DataSource": { + "ID": "ghsa", + "Name": "GitHub Security Advisory Maven", + "URL": "https://github.com/advisories?query=type%3Areviewed+ecosystem%3Amaven" + }, + "Title": "jackson-databind: Serialization gadgets in anteros-core", + "Description": "FasterXML jackson-databind 2.x before 2.9.10.4 mishandles the interaction between serialization gadgets and typing, related to br.com.anteros.dbcp.AnterosDBCPConfig (aka anteros-core).", + "Severity": "CRITICAL", + "CweIDs": [ + "CWE-502" + ], + "VendorSeverity": { + "ghsa": 4, + "nvd": 4, + "redhat": 3 + }, + "CVSS": { + "nvd": { + "V2Vector": "AV:N/AC:M/Au:N/C:P/I:P/A:P", + "V3Vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + "V2Score": 6.8, + "V3Score": 9.8 + }, + "redhat": { + "V3Vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:H", + "V3Score": 8.1 + } + }, + "References": [ + "https://access.redhat.com/security/cve/CVE-2020-9548", + "https://github.com/FasterXML/jackson-databind/issues/2634", + "https://github.com/advisories/GHSA-p43x-xfjf-5jhr", + "https://lists.apache.org/thread.html/r35d30db00440ef63b791c4b7f7acb036e14d4a23afa2a249cb66c0fd@%3Cissues.zookeeper.apache.org%3E", + "https://lists.apache.org/thread.html/r9464a40d25c3ba1a55622db72f113eb494a889656962d098c70c5bb1@%3Cdev.zookeeper.apache.org%3E", + "https://lists.apache.org/thread.html/r98c9b6e4c9e17792e2cd1ec3e4aa20b61a791939046d3f10888176bb@%3Cissues.zookeeper.apache.org%3E", + "https://lists.apache.org/thread.html/rb6fecb5e96a6d61e175ff49f33f2713798dd05cf03067c169d195596@%3Cissues.zookeeper.apache.org%3E", + "https://lists.apache.org/thread.html/rd5a4457be4623038c3989294429bc063eec433a2e55995d81591e2ca@%3Cissues.zookeeper.apache.org%3E", + "https://lists.apache.org/thread.html/rdd49ab9565bec436a896bc00c4b9fc9dce1598e106c318524fbdfec6@%3Cissues.zookeeper.apache.org%3E", + "https://lists.apache.org/thread.html/rdd4df698d5d8e635144d2994922bf0842e933809eae259521f3b5097@%3Cissues.zookeeper.apache.org%3E", + "https://lists.apache.org/thread.html/rf1bbc0ea4a9f014cf94df9a12a6477d24a27f52741dbc87f2fd52ff2@%3Cissues.geode.apache.org%3E", + "https://lists.debian.org/debian-lts-announce/2020/03/msg00008.html", + "https://medium.com/@cowtowncoder/on-jackson-cves-dont-panic-here-is-what-you-need-to-know-54cd0d6e8062", + "https://nvd.nist.gov/vuln/detail/CVE-2020-9548", + "https://security.netapp.com/advisory/ntap-20200904-0006/", + "https://www.oracle.com/security-alerts/cpujan2021.html", + "https://www.oracle.com/security-alerts/cpujul2020.html", + "https://www.oracle.com/security-alerts/cpuoct2020.html", + "https://www.oracle.com/security-alerts/cpuoct2021.html" + ], + "PublishedDate": "2020-03-02T04:15:00Z", + "LastModifiedDate": "2021-12-02T21:23:00Z" + }, + { + "VulnerabilityID": "CVE-2021-20190", + "PkgID": "com.fasterxml.jackson.core:jackson-databind:2.9.1", + "PkgName": "com.fasterxml.jackson.core:jackson-databind", + "PkgIdentifier": { + "PURL": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.9.1", + "UID": "9ccd2eb3e03373ff" + }, + "InstalledVersion": "2.9.1", + "FixedVersion": "2.9.10.7", + "Status": "fixed", + "Layer": {}, + "SeveritySource": "nvd", + "PrimaryURL": "https://avd.aquasec.com/nvd/cve-2021-20190", + "DataSource": { + "ID": "glad", + "Name": "GitLab Advisory Database Community", + "URL": "https://gitlab.com/gitlab-org/advisories-community" + }, + "Title": "jackson-databind: mishandles the interaction between serialization gadgets and typing, related to javax.swing", + "Description": "A flaw was found in jackson-databind before 2.9.10.7. FasterXML mishandles the interaction between serialization gadgets and typing. The highest threat from this vulnerability is to data confidentiality and integrity as well as system availability.", + "Severity": "HIGH", + "CweIDs": [ + "CWE-502" + ], + "VendorSeverity": { + "ghsa": 3, + "nvd": 3, + "redhat": 3 + }, + "CVSS": { + "nvd": { + "V2Vector": "AV:N/AC:M/Au:N/C:P/I:P/A:C", + "V3Vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:H", + "V2Score": 8.3, + "V3Score": 8.1 + }, + "redhat": { + "V3Vector": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:H", + "V3Score": 8.1 + } + }, + "References": [ + "https://access.redhat.com/security/cve/CVE-2021-20190", + "https://bugzilla.redhat.com/show_bug.cgi?id=1916633", + "https://github.com/FasterXML/jackson-databind/commit/7dbf51bf78d157098074a20bd9da39bd48c18e4a", + "https://github.com/FasterXML/jackson-databind/issues/2854", + "https://github.com/advisories/GHSA-5949-rw7g-wx7w", + "https://lists.apache.org/thread.html/r380e9257bacb8551ee6fcf2c59890ae9477b2c78e553fa9ea08e9d9a@%3Ccommits.nifi.apache.org%3E", + "https://lists.debian.org/debian-lts-announce/2021/04/msg00025.html", + "https://nvd.nist.gov/vuln/detail/CVE-2021-20190", + "https://security.netapp.com/advisory/ntap-20210219-0008/" + ], + "PublishedDate": "2021-01-19T17:15:00Z", + "LastModifiedDate": "2021-07-20T23:15:00Z" + } + ] + } + ] +} diff --git a/pkg/dbtest/db.go b/internal/dbtest/db.go similarity index 100% rename from pkg/dbtest/db.go rename to internal/dbtest/db.go diff --git a/internal/dbtest/fake.go b/internal/dbtest/fake.go new file mode 100644 index 000000000000..9f2484bbf94a --- /dev/null +++ b/internal/dbtest/fake.go @@ -0,0 +1,84 @@ +package dbtest + +import ( + "archive/tar" + "os" + "path/filepath" + "testing" + + v1 "github.com/google/go-containerregistry/pkg/v1" + fakei "github.com/google/go-containerregistry/pkg/v1/fake" + "github.com/google/go-containerregistry/pkg/v1/tarball" + "github.com/google/go-containerregistry/pkg/v1/types" + "github.com/samber/lo" + "github.com/stretchr/testify/require" + + ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/oci" +) + +const defaultMediaType = "application/vnd.aquasec.trivy.db.layer.v1.tar+gzip" + +type fakeLayer struct { + v1.Layer +} + +func (f fakeLayer) MediaType() (types.MediaType, error) { + return f.Layer.MediaType() +} + +func NewFakeLayer(t *testing.T, input string, mediaType types.MediaType) v1.Layer { + layer, err := tarball.LayerFromFile(input, tarball.WithMediaType(mediaType)) + require.NoError(t, err) + + return fakeLayer{layer} +} + +type FakeDBOptions struct { + MediaType types.MediaType +} + +func NewFakeDB(t *testing.T, dbPath string, opts FakeDBOptions) *oci.Artifact { + mediaType := lo.Ternary(opts.MediaType != "", opts.MediaType, defaultMediaType) + img := new(fakei.FakeImage) + img.LayersReturns([]v1.Layer{NewFakeLayer(t, dbPath, mediaType)}, nil) + img.ManifestReturns(&v1.Manifest{ + Layers: []v1.Descriptor{ + { + MediaType: mediaType, + Size: 100, + Digest: v1.Hash{ + Algorithm: "sha256", + Hex: "aec482bc254b5dd025d3eaf5bb35997d3dba783e394e8f91d5a415963151bfb8", + }, + Annotations: map[string]string{ + "org.opencontainers.image.title": "db.tar.gz", + }, + }, + }, + }, nil) + + // Mock OCI artifact + opt := ftypes.RegistryOptions{ + Insecure: false, + } + art, err := oci.NewArtifact("dummy", true, opt, oci.WithImage(img)) + require.NoError(t, err) + + return art +} + +func ArchiveDir(t *testing.T, dir string) string { + tmpDBPath := filepath.Join(t.TempDir(), "db.tar") + f, err := os.Create(tmpDBPath) + require.NoError(t, err) + defer f.Close() + + tr := tar.NewWriter(f) + defer tr.Close() + + err = tr.AddFS(os.DirFS(dir)) + require.NoError(t, err) + + return tmpDBPath +} diff --git a/mkdocs.yml b/mkdocs.yml index 92bbbb24ac21..2222a30220fb 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -152,7 +152,7 @@ nav: - Configuration: - CLI: - Overview: docs/references/configuration/cli/trivy.md - - AWS: docs/references/configuration/cli/trivy_aws.md + - Clean: docs/references/configuration/cli/trivy_clean.md - Config: docs/references/configuration/cli/trivy_config.md - Convert: docs/references/configuration/cli/trivy_convert.md - Filesystem: docs/references/configuration/cli/trivy_filesystem.md diff --git a/pkg/fanal/cache/cache.go b/pkg/cache/cache.go similarity index 98% rename from pkg/fanal/cache/cache.go rename to pkg/cache/cache.go index b2f5fa704ae7..1280c84fe156 100644 --- a/pkg/fanal/cache/cache.go +++ b/pkg/cache/cache.go @@ -5,7 +5,7 @@ import ( ) const ( - cacheDirName = "fanal" + scanCacheDirName = "fanal" // artifactBucket stores artifact information with artifact ID such as image ID artifactBucket = "artifact" diff --git a/pkg/cache/client.go b/pkg/cache/client.go new file mode 100644 index 000000000000..667900366e3e --- /dev/null +++ b/pkg/cache/client.go @@ -0,0 +1,73 @@ +package cache + +import ( + "strings" + "time" + + "golang.org/x/xerrors" + + "github.com/aquasecurity/trivy/pkg/log" +) + +const ( + TypeUnknown Type = "unknown" + TypeFS Type = "fs" + TypeRedis Type = "redis" + TypeMemory Type = "memory" +) + +type Type string + +type Options struct { + Backend string + CacheDir string + RedisCACert string + RedisCert string + RedisKey string + RedisTLS bool + TTL time.Duration +} + +func NewType(backend string) Type { + // "redis://" or "fs" are allowed for now + // An empty value is also allowed for testability + switch { + case strings.HasPrefix(backend, "redis://"): + return TypeRedis + case backend == "fs", backend == "": + return TypeFS + case backend == "memory": + return TypeMemory + default: + return TypeUnknown + } +} + +// New returns a new cache client +func New(opts Options) (Cache, func(), error) { + cleanup := func() {} // To avoid panic + + var cache Cache + t := NewType(opts.Backend) + log.Debug("Initializing scan cache...", log.String("type", string(t))) + switch t { + case TypeRedis: + redisCache, err := NewRedisCache(opts.Backend, opts.RedisCACert, opts.RedisCert, opts.RedisKey, opts.RedisTLS, opts.TTL) + if err != nil { + return nil, cleanup, xerrors.Errorf("unable to initialize redis cache: %w", err) + } + cache = redisCache + case TypeFS: + // standalone mode + fsCache, err := NewFSCache(opts.CacheDir) + if err != nil { + return nil, cleanup, xerrors.Errorf("unable to initialize fs cache: %w", err) + } + cache = fsCache + case TypeMemory: + cache = NewMemoryCache() + default: + return nil, cleanup, xerrors.Errorf("unknown cache type: %s", t) + } + return cache, func() { _ = cache.Close() }, nil +} diff --git a/pkg/cache/client_test.go b/pkg/cache/client_test.go new file mode 100644 index 000000000000..c72eb3de4d13 --- /dev/null +++ b/pkg/cache/client_test.go @@ -0,0 +1,121 @@ +package cache_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/pkg/cache" +) + +func TestNew(t *testing.T) { + tests := []struct { + name string + opts cache.Options + wantType any + wantErr string + }{ + { + name: "fs backend", + opts: cache.Options{ + Backend: "fs", + CacheDir: "/tmp/cache", + }, + wantType: cache.FSCache{}, + }, + { + name: "redis backend", + opts: cache.Options{ + Backend: "redis://localhost:6379", + }, + wantType: cache.RedisCache{}, + }, + { + name: "unknown backend", + opts: cache.Options{ + Backend: "unknown", + }, + wantErr: "unknown cache type", + }, + { + name: "invalid redis URL", + opts: cache.Options{ + Backend: "redis://invalid-url:foo/bar", + }, + wantErr: "failed to parse Redis URL", + }, + { + name: "incomplete TLS options", + opts: cache.Options{ + Backend: "redis://localhost:6379", + RedisCACert: "testdata/ca-cert.pem", + RedisTLS: true, + }, + wantErr: "you must provide Redis CA, cert and key file path when using TLS", + }, + { + name: "invalid TLS file paths", + opts: cache.Options{ + Backend: "redis://localhost:6379", + RedisCACert: "testdata/non-existent-ca-cert.pem", + RedisCert: "testdata/non-existent-cert.pem", + RedisKey: "testdata/non-existent-key.pem", + RedisTLS: true, + }, + wantErr: "failed to get TLS config", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c, cleanup, err := cache.New(tt.opts) + defer cleanup() + + if tt.wantErr != "" { + assert.ErrorContains(t, err, tt.wantErr) + return + } + + require.NoError(t, err) + assert.NotNil(t, c) + assert.IsType(t, tt.wantType, c) + }) + } +} + +func TestNewType(t *testing.T) { + tests := []struct { + name string + backend string + wantType cache.Type + }{ + { + name: "redis backend", + backend: "redis://localhost:6379", + wantType: cache.TypeRedis, + }, + { + name: "fs backend", + backend: "fs", + wantType: cache.TypeFS, + }, + { + name: "empty backend", + backend: "", + wantType: cache.TypeFS, + }, + { + name: "unknown backend", + backend: "unknown", + wantType: cache.TypeUnknown, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := cache.NewType(tt.backend) + assert.Equal(t, tt.wantType, got) + }) + } +} diff --git a/pkg/cache/dir.go b/pkg/cache/dir.go new file mode 100644 index 000000000000..2a67d269bdfe --- /dev/null +++ b/pkg/cache/dir.go @@ -0,0 +1,15 @@ +package cache + +import ( + "os" + "path/filepath" +) + +// DefaultDir returns/creates the cache-dir to be used for trivy operations +func DefaultDir() string { + tmpDir, err := os.UserCacheDir() + if err != nil { + tmpDir = os.TempDir() + } + return filepath.Join(tmpDir, "trivy") +} diff --git a/pkg/fanal/cache/fs.go b/pkg/cache/fs.go similarity index 97% rename from pkg/fanal/cache/fs.go rename to pkg/cache/fs.go index 08fa696e6555..edfac70b04e5 100644 --- a/pkg/fanal/cache/fs.go +++ b/pkg/cache/fs.go @@ -20,7 +20,7 @@ type FSCache struct { } func NewFSCache(cacheDir string) (FSCache, error) { - dir := filepath.Join(cacheDir, cacheDirName) + dir := filepath.Join(cacheDir, scanCacheDirName) if err := os.MkdirAll(dir, 0700); err != nil { return FSCache{}, xerrors.Errorf("failed to create cache dir: %w", err) } @@ -31,7 +31,10 @@ func NewFSCache(cacheDir string) (FSCache, error) { } err = db.Update(func(tx *bolt.Tx) error { - for _, bucket := range []string{artifactBucket, blobBucket} { + for _, bucket := range []string{ + artifactBucket, + blobBucket, + } { if _, err := tx.CreateBucketIfNotExists([]byte(bucket)); err != nil { return xerrors.Errorf("unable to create %s bucket: %w", bucket, err) } diff --git a/pkg/fanal/cache/fs_test.go b/pkg/cache/fs_test.go similarity index 99% rename from pkg/fanal/cache/fs_test.go rename to pkg/cache/fs_test.go index 4eb059f5c508..9323391a3af4 100644 --- a/pkg/fanal/cache/fs_test.go +++ b/pkg/cache/fs_test.go @@ -373,7 +373,7 @@ func TestFSCache_PutArtifact(t *testing.T) { require.NoError(t, err, tt.name) } - fs.db.View(func(tx *bolt.Tx) error { + err = fs.db.View(func(tx *bolt.Tx) error { // check decompressedDigestBucket imageBucket := tx.Bucket([]byte(artifactBucket)) b := imageBucket.Get([]byte(tt.args.imageID)) @@ -381,6 +381,7 @@ func TestFSCache_PutArtifact(t *testing.T) { return nil }) + require.NoError(t, err) }) } } diff --git a/pkg/fanal/cache/key.go b/pkg/cache/key.go similarity index 100% rename from pkg/fanal/cache/key.go rename to pkg/cache/key.go diff --git a/pkg/fanal/cache/key_test.go b/pkg/cache/key_test.go similarity index 100% rename from pkg/fanal/cache/key_test.go rename to pkg/cache/key_test.go diff --git a/pkg/cache/memory.go b/pkg/cache/memory.go new file mode 100644 index 000000000000..485c6ff4624f --- /dev/null +++ b/pkg/cache/memory.go @@ -0,0 +1,98 @@ +package cache + +import ( + "sync" + + "golang.org/x/xerrors" + + "github.com/aquasecurity/trivy/pkg/fanal/types" +) + +var _ Cache = &MemoryCache{} + +type MemoryCache struct { + artifacts sync.Map // Map to store artifact information + blobs sync.Map // Map to store blob information +} + +func NewMemoryCache() *MemoryCache { + return &MemoryCache{} +} + +// PutArtifact stores the artifact information in the memory cache +func (c *MemoryCache) PutArtifact(artifactID string, artifactInfo types.ArtifactInfo) error { + c.artifacts.Store(artifactID, artifactInfo) + return nil +} + +// PutBlob stores the blob information in the memory cache +func (c *MemoryCache) PutBlob(blobID string, blobInfo types.BlobInfo) error { + c.blobs.Store(blobID, blobInfo) + return nil +} + +// DeleteBlobs removes the specified blobs from the memory cache +func (c *MemoryCache) DeleteBlobs(blobIDs []string) error { + for _, blobID := range blobIDs { + c.blobs.Delete(blobID) + } + return nil +} + +// GetArtifact retrieves the artifact information from the memory cache +func (c *MemoryCache) GetArtifact(artifactID string) (types.ArtifactInfo, error) { + info, ok := c.artifacts.Load(artifactID) + if !ok { + return types.ArtifactInfo{}, xerrors.Errorf("artifact (%s) not found in memory cache", artifactID) + } + artifactInfo, ok := info.(types.ArtifactInfo) + if !ok { + return types.ArtifactInfo{}, xerrors.Errorf("invalid type for artifact (%s) in memory cache", artifactID) + } + return artifactInfo, nil +} + +// GetBlob retrieves the blob information from the memory cache +func (c *MemoryCache) GetBlob(blobID string) (types.BlobInfo, error) { + info, ok := c.blobs.Load(blobID) + if !ok { + return types.BlobInfo{}, xerrors.Errorf("blob (%s) not found in memory cache", blobID) + } + blobInfo, ok := info.(types.BlobInfo) + if !ok { + return types.BlobInfo{}, xerrors.Errorf("invalid type for blob (%s) in memory cache", blobID) + } + return blobInfo, nil +} + +// MissingBlobs determines the missing artifact and blob information in the memory cache +func (c *MemoryCache) MissingBlobs(artifactID string, blobIDs []string) (bool, []string, error) { + var missingArtifact bool + var missingBlobIDs []string + + if _, err := c.GetArtifact(artifactID); err != nil { + missingArtifact = true + } + + for _, blobID := range blobIDs { + if _, err := c.GetBlob(blobID); err != nil { + missingBlobIDs = append(missingBlobIDs, blobID) + } + } + + return missingArtifact, missingBlobIDs, nil +} + +// Close clears the artifact and blob information from the memory cache +func (c *MemoryCache) Close() error { + c.artifacts = sync.Map{} + c.blobs = sync.Map{} + return nil +} + +// Clear clears the artifact and blob information from the memory cache +func (c *MemoryCache) Clear() error { + c.artifacts = sync.Map{} + c.blobs = sync.Map{} + return nil +} diff --git a/pkg/cache/memory_test.go b/pkg/cache/memory_test.go new file mode 100644 index 000000000000..3d88b565c7f9 --- /dev/null +++ b/pkg/cache/memory_test.go @@ -0,0 +1,396 @@ +package cache_test + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/pkg/cache" + "github.com/aquasecurity/trivy/pkg/fanal/types" +) + +func TestMemoryCache_PutArtifact(t *testing.T) { + tests := []struct { + name string + artifactID string + artifactInfo types.ArtifactInfo + }{ + { + name: "happy path", + artifactID: "sha256:8652b9f0cb4c0599575e5a003f5906876e10c1ceb2ab9fe1786712dac14a50cf", + artifactInfo: types.ArtifactInfo{ + SchemaVersion: 2, + Architecture: "amd64", + Created: time.Date(2020, 11, 14, 0, 20, 4, 0, time.UTC), + DockerVersion: "19.03.12", + OS: "linux", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := cache.NewMemoryCache() + + err := c.PutArtifact(tt.artifactID, tt.artifactInfo) + require.NoError(t, err) + + got, err := c.GetArtifact(tt.artifactID) + require.NoError(t, err) + assert.Equal(t, tt.artifactInfo, got) + }) + } +} + +func TestMemoryCache_PutBlob(t *testing.T) { + tests := []struct { + name string + blobID string + blobInfo types.BlobInfo + }{ + { + name: "happy path", + blobID: "sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0", + blobInfo: types.BlobInfo{ + SchemaVersion: 2, + Digest: "sha256:9d48c3bd43c520dc2784e868a780e976b207cbf493eaff8c6596eb871cbd9609", + DiffID: "sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0", + OS: types.OS{ + Family: "alpine", + Name: "3.10.2", + }, + PackageInfos: []types.PackageInfo{ + { + FilePath: "lib/apk/db/installed", + Packages: []types.Package{ + { + Name: "musl", + Version: "1.1.22-r3", + SrcName: "musl", + SrcVersion: "1.1.22-r3", + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := cache.NewMemoryCache() + + err := c.PutBlob(tt.blobID, tt.blobInfo) + require.NoError(t, err) + + got, err := c.GetBlob(tt.blobID) + require.NoError(t, err) + assert.Equal(t, tt.blobInfo, got) + }) + } +} + +func TestMemoryCache_GetArtifact(t *testing.T) { + tests := []struct { + name string + artifactID string + artifactInfo types.ArtifactInfo + wantErr bool + }{ + { + name: "happy path", + artifactID: "sha256:8652b9f0cb4c0599575e5a003f5906876e10c1ceb2ab9fe1786712dac14a50cf", + artifactInfo: types.ArtifactInfo{ + SchemaVersion: 2, + Architecture: "amd64", + Created: time.Date(2020, 11, 14, 0, 20, 4, 0, time.UTC), + DockerVersion: "19.03.12", + OS: "linux", + }, + wantErr: false, + }, + { + name: "not found", + artifactID: "sha256:nonexistent", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := cache.NewMemoryCache() + + if !tt.wantErr { + err := c.PutArtifact(tt.artifactID, tt.artifactInfo) + require.NoError(t, err) + } + + got, err := c.GetArtifact(tt.artifactID) + if tt.wantErr { + require.ErrorContains(t, err, "not found in memory cache") + return + } + require.NoError(t, err) + assert.Equal(t, tt.artifactInfo, got) + }) + } +} + +func TestMemoryCache_GetBlob(t *testing.T) { + tests := []struct { + name string + blobID string + blobInfo types.BlobInfo + wantErr bool + }{ + { + name: "happy path", + blobID: "sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0", + blobInfo: types.BlobInfo{ + SchemaVersion: 2, + Digest: "sha256:9d48c3bd43c520dc2784e868a780e976b207cbf493eaff8c6596eb871cbd9609", + DiffID: "sha256:03901b4a2ea88eeaad62dbe59b072b28b6efa00491962b8741081c5df50c65e0", + OS: types.OS{ + Family: "alpine", + Name: "3.10.2", + }, + }, + wantErr: false, + }, + { + name: "not found", + blobID: "sha256:nonexistent", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := cache.NewMemoryCache() + + if !tt.wantErr { + err := c.PutBlob(tt.blobID, tt.blobInfo) + require.NoError(t, err) + } + + got, err := c.GetBlob(tt.blobID) + if tt.wantErr { + require.ErrorContains(t, err, "not found in memory cache") + return + } + require.NoError(t, err) + assert.Equal(t, tt.blobInfo, got) + }) + } +} + +func TestMemoryCache_MissingBlobs(t *testing.T) { + tests := []struct { + name string + artifactID string + blobIDs []string + putArtifact bool + putBlobs []string + wantMissingArtifact bool + wantMissingBlobIDs []string + }{ + { + name: "missing both artifact and blob", + artifactID: "sha256:artifact1", + blobIDs: []string{ + "sha256:blob1", + "sha256:blob2", + }, + putArtifact: false, + putBlobs: []string{}, + wantMissingArtifact: true, + wantMissingBlobIDs: []string{ + "sha256:blob1", + "sha256:blob2", + }, + }, + { + name: "missing artifact only", + artifactID: "sha256:artifact1", + blobIDs: []string{ + "sha256:blob1", + "sha256:blob2", + }, + putArtifact: false, + putBlobs: []string{ + "sha256:blob1", + "sha256:blob2", + }, + wantMissingArtifact: true, + wantMissingBlobIDs: nil, + }, + { + name: "missing one blob", + artifactID: "sha256:artifact1", + blobIDs: []string{ + "sha256:blob1", + "sha256:blob2", + }, + putArtifact: true, + putBlobs: []string{"sha256:blob1"}, + wantMissingArtifact: false, + wantMissingBlobIDs: []string{"sha256:blob2"}, + }, + { + name: "no missing blobs", + artifactID: "sha256:artifact1", + blobIDs: []string{ + "sha256:blob1", + "sha256:blob2", + }, + putArtifact: true, + putBlobs: []string{ + "sha256:blob1", + "sha256:blob2", + }, + wantMissingArtifact: false, + wantMissingBlobIDs: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := cache.NewMemoryCache() + + if tt.putArtifact { + err := c.PutArtifact(tt.artifactID, types.ArtifactInfo{}) + require.NoError(t, err) + } + + for _, blobID := range tt.putBlobs { + err := c.PutBlob(blobID, types.BlobInfo{}) + require.NoError(t, err) + } + + gotMissingArtifact, gotMissingBlobIDs, err := c.MissingBlobs(tt.artifactID, tt.blobIDs) + require.NoError(t, err) + assert.Equal(t, tt.wantMissingArtifact, gotMissingArtifact) + assert.Equal(t, tt.wantMissingBlobIDs, gotMissingBlobIDs) + }) + } +} + +func TestMemoryCache_DeleteBlobs(t *testing.T) { + tests := []struct { + name string + blobIDs []string + }{ + { + name: "delete existing blobs", + blobIDs: []string{ + "sha256:blob1", + "sha256:blob2", + }, + }, + { + name: "delete non-existing blobs", + blobIDs: []string{ + "sha256:nonexistent1", + "sha256:nonexistent2", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := cache.NewMemoryCache() + + // Put some blobs in the cache + for _, blobID := range tt.blobIDs { + err := c.PutBlob(blobID, types.BlobInfo{}) + require.NoError(t, err) + } + + err := c.DeleteBlobs(tt.blobIDs) + require.NoError(t, err) + + // Check that the blobs are no longer in the cache + for _, blobID := range tt.blobIDs { + _, err := c.GetBlob(blobID) + require.Error(t, err) + assert.Contains(t, err.Error(), "not found in memory cache") + } + }) + } +} + +func TestMemoryCache_Clear(t *testing.T) { + tests := []struct { + name string + artifactID string + blobID string + }{ + { + name: "clear cache", + artifactID: "sha256:artifact1", + blobID: "sha256:blob1", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := cache.NewMemoryCache() + + err := c.PutArtifact(tt.artifactID, types.ArtifactInfo{}) + require.NoError(t, err) + + err = c.PutBlob(tt.blobID, types.BlobInfo{}) + require.NoError(t, err) + + err = c.Clear() + require.NoError(t, err) + + _, err = c.GetArtifact(tt.artifactID) + require.Error(t, err) + assert.Contains(t, err.Error(), "not found in memory cache") + + _, err = c.GetBlob(tt.blobID) + require.Error(t, err) + assert.Contains(t, err.Error(), "not found in memory cache") + }) + } +} + +func TestMemoryCache_Close(t *testing.T) { + tests := []struct { + name string + artifactID string + blobID string + }{ + { + name: "close cache", + artifactID: "sha256:artifact1", + blobID: "sha256:blob1", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := cache.NewMemoryCache() + + err := c.PutArtifact(tt.artifactID, types.ArtifactInfo{}) + require.NoError(t, err) + + err = c.PutBlob(tt.blobID, types.BlobInfo{}) + require.NoError(t, err) + + err = c.Close() + require.NoError(t, err) + + _, err = c.GetArtifact(tt.artifactID) + require.Error(t, err) + assert.Contains(t, err.Error(), "not found in memory cache") + + _, err = c.GetBlob(tt.blobID) + require.Error(t, err) + assert.Contains(t, err.Error(), "not found in memory cache") + }) + } +} diff --git a/pkg/fanal/cache/mock_artifact_cache.go b/pkg/cache/mock_artifact_cache.go similarity index 100% rename from pkg/fanal/cache/mock_artifact_cache.go rename to pkg/cache/mock_artifact_cache.go diff --git a/pkg/fanal/cache/mock_cache.go b/pkg/cache/mock_cache.go similarity index 100% rename from pkg/fanal/cache/mock_cache.go rename to pkg/cache/mock_cache.go diff --git a/pkg/fanal/cache/mock_local_artifact_cache.go b/pkg/cache/mock_local_artifact_cache.go similarity index 100% rename from pkg/fanal/cache/mock_local_artifact_cache.go rename to pkg/cache/mock_local_artifact_cache.go diff --git a/pkg/cache/nop.go b/pkg/cache/nop.go index 6b52e91108c6..4a76cd84e414 100644 --- a/pkg/cache/nop.go +++ b/pkg/cache/nop.go @@ -1,16 +1,11 @@ package cache -import "github.com/aquasecurity/trivy/pkg/fanal/cache" +import "github.com/aquasecurity/trivy/pkg/fanal/types" -func NopCache(ac cache.ArtifactCache) cache.Cache { - return nopCache{ArtifactCache: ac} -} +type NopCache struct{} -type nopCache struct { - cache.ArtifactCache - cache.LocalArtifactCache -} - -func (nopCache) Close() error { - return nil -} +func NewNopCache() NopCache { return NopCache{} } +func (NopCache) GetArtifact(string) (types.ArtifactInfo, error) { return types.ArtifactInfo{}, nil } +func (NopCache) GetBlob(string) (types.BlobInfo, error) { return types.BlobInfo{}, nil } +func (NopCache) Close() error { return nil } +func (NopCache) Clear() error { return nil } diff --git a/pkg/fanal/cache/redis.go b/pkg/cache/redis.go similarity index 58% rename from pkg/fanal/cache/redis.go rename to pkg/cache/redis.go index af9d2622b531..2a4a12bda3f7 100644 --- a/pkg/fanal/cache/redis.go +++ b/pkg/cache/redis.go @@ -2,33 +2,118 @@ package cache import ( "context" + "crypto/tls" + "crypto/x509" "encoding/json" "fmt" + "os" + "strings" "time" "github.com/go-redis/redis/v8" "github.com/hashicorp/go-multierror" + "github.com/samber/lo" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" ) -var _ Cache = &RedisCache{} +var _ Cache = (*RedisCache)(nil) -const ( - redisPrefix = "fanal" -) +const redisPrefix = "fanal" + +type RedisOptions struct { + Backend string + TLS bool + TLSOptions RedisTLSOptions +} + +func NewRedisOptions(backend, caCert, cert, key string, enableTLS bool) (RedisOptions, error) { + tlsOpts, err := NewRedisTLSOptions(caCert, cert, key) + if err != nil { + return RedisOptions{}, xerrors.Errorf("redis TLS option error: %w", err) + } + + return RedisOptions{ + Backend: backend, + TLS: enableTLS, + TLSOptions: tlsOpts, + }, nil +} + +// BackendMasked returns the redis connection string masking credentials +func (o *RedisOptions) BackendMasked() string { + endIndex := strings.Index(o.Backend, "@") + if endIndex == -1 { + return o.Backend + } + + startIndex := strings.Index(o.Backend, "//") + + return fmt.Sprintf("%s****%s", o.Backend[:startIndex+2], o.Backend[endIndex:]) +} + +// RedisTLSOptions holds the options for redis cache +type RedisTLSOptions struct { + CACert string + Cert string + Key string +} + +func NewRedisTLSOptions(caCert, cert, key string) (RedisTLSOptions, error) { + opts := RedisTLSOptions{ + CACert: caCert, + Cert: cert, + Key: key, + } + + // If one of redis option not nil, make sure CA, cert, and key provided + if !lo.IsEmpty(opts) { + if opts.CACert == "" || opts.Cert == "" || opts.Key == "" { + return RedisTLSOptions{}, xerrors.Errorf("you must provide Redis CA, cert and key file path when using TLS") + } + } + return opts, nil +} type RedisCache struct { client *redis.Client expiration time.Duration } -func NewRedisCache(options *redis.Options, expiration time.Duration) RedisCache { +func NewRedisCache(backend, caCertPath, certPath, keyPath string, enableTLS bool, ttl time.Duration) (RedisCache, error) { + opts, err := NewRedisOptions(backend, caCertPath, certPath, keyPath, enableTLS) + if err != nil { + return RedisCache{}, xerrors.Errorf("failed to create Redis options: %w", err) + } + + log.Info("Redis scan cache", log.String("url", opts.BackendMasked())) + options, err := redis.ParseURL(opts.Backend) + if err != nil { + return RedisCache{}, xerrors.Errorf("failed to parse Redis URL: %w", err) + } + + if tlsOpts := opts.TLSOptions; !lo.IsEmpty(tlsOpts) { + caCert, cert, err := GetTLSConfig(tlsOpts.CACert, tlsOpts.Cert, tlsOpts.Key) + if err != nil { + return RedisCache{}, xerrors.Errorf("failed to get TLS config: %w", err) + } + + options.TLSConfig = &tls.Config{ + RootCAs: caCert, + Certificates: []tls.Certificate{cert}, + MinVersion: tls.VersionTLS12, + } + } else if opts.TLS { + options.TLSConfig = &tls.Config{ + MinVersion: tls.VersionTLS12, + } + } return RedisCache{ client: redis.NewClient(options), - expiration: expiration, - } + expiration: ttl, + }, nil } func (c RedisCache) PutArtifact(artifactID string, artifactConfig types.ArtifactInfo) error { @@ -145,3 +230,21 @@ func (c RedisCache) Clear() error { } return nil } + +// GetTLSConfig gets tls config from CA, Cert and Key file +func GetTLSConfig(caCertPath, certPath, keyPath string) (*x509.CertPool, tls.Certificate, error) { + cert, err := tls.LoadX509KeyPair(certPath, keyPath) + if err != nil { + return nil, tls.Certificate{}, err + } + + caCert, err := os.ReadFile(caCertPath) + if err != nil { + return nil, tls.Certificate{}, err + } + + caCertPool := x509.NewCertPool() + caCertPool.AppendCertsFromPEM(caCert) + + return caCertPool, cert, nil +} diff --git a/pkg/fanal/cache/redis_test.go b/pkg/cache/redis_test.go similarity index 87% rename from pkg/fanal/cache/redis_test.go rename to pkg/cache/redis_test.go index 335e272890f2..3cc8bbd702ad 100644 --- a/pkg/fanal/cache/redis_test.go +++ b/pkg/cache/redis_test.go @@ -7,11 +7,10 @@ import ( "time" "github.com/alicebob/miniredis/v2" - "github.com/go-redis/redis/v8" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/aquasecurity/trivy/pkg/fanal/cache" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/types" ) @@ -67,18 +66,15 @@ func TestRedisCache_PutArtifact(t *testing.T) { addr = "dummy:16379" } - c := cache.NewRedisCache(&redis.Options{ - Addr: addr, - }, 0) + c, err := cache.NewRedisCache(fmt.Sprintf("redis://%s", addr), "", "", "", false, 0) + require.NoError(t, err) err = c.PutArtifact(tt.args.artifactID, tt.args.artifactConfig) if tt.wantErr != "" { - require.Error(t, err) - assert.Contains(t, err.Error(), tt.wantErr) + require.ErrorContains(t, err, tt.wantErr) return - } else { - require.NoError(t, err) } + require.NoError(t, err) got, err := s.Get(tt.wantKey) require.NoError(t, err) @@ -156,18 +152,15 @@ func TestRedisCache_PutBlob(t *testing.T) { addr = "dummy:16379" } - c := cache.NewRedisCache(&redis.Options{ - Addr: addr, - }, 0) + c, err := cache.NewRedisCache(fmt.Sprintf("redis://%s", addr), "", "", "", false, 0) + require.NoError(t, err) err = c.PutBlob(tt.args.blobID, tt.args.blobConfig) if tt.wantErr != "" { - require.Error(t, err) - assert.Contains(t, err.Error(), tt.wantErr) + require.ErrorContains(t, err, tt.wantErr) return - } else { - require.NoError(t, err) } + require.NoError(t, err) got, err := s.Get(tt.wantKey) require.NoError(t, err) @@ -241,18 +234,15 @@ func TestRedisCache_GetArtifact(t *testing.T) { addr = "dummy:16379" } - c := cache.NewRedisCache(&redis.Options{ - Addr: addr, - }, 0) + c, err := cache.NewRedisCache(fmt.Sprintf("redis://%s", addr), "", "", "", false, 0) + require.NoError(t, err) got, err := c.GetArtifact(tt.artifactID) if tt.wantErr != "" { - require.Error(t, err) - assert.Contains(t, err.Error(), tt.wantErr) + require.ErrorContains(t, err, tt.wantErr) return - } else { - require.NoError(t, err) } + require.NoError(t, err) assert.Equal(t, tt.want, got) }) @@ -334,14 +324,12 @@ func TestRedisCache_GetBlob(t *testing.T) { addr = "dummy:16379" } - c := cache.NewRedisCache(&redis.Options{ - Addr: addr, - }, 0) + c, err := cache.NewRedisCache(fmt.Sprintf("redis://%s", addr), "", "", "", false, 0) + require.NoError(t, err) got, err := c.GetBlob(tt.blobID) if tt.wantErr != "" { - require.Error(t, err) - assert.Contains(t, err.Error(), tt.wantErr) + require.ErrorContains(t, err, tt.wantErr) return } @@ -445,14 +433,12 @@ func TestRedisCache_MissingBlobs(t *testing.T) { addr = "dummy:6379" } - c := cache.NewRedisCache(&redis.Options{ - Addr: addr, - }, 0) + c, err := cache.NewRedisCache(fmt.Sprintf("redis://%s", addr), "", "", "", false, 0) + require.NoError(t, err) missingArtifact, missingBlobIDs, err := c.MissingBlobs(tt.args.artifactID, tt.args.blobIDs) if tt.wantErr != "" { - require.Error(t, err) - assert.Contains(t, err.Error(), tt.wantErr) + require.ErrorContains(t, err, tt.wantErr) return } @@ -470,9 +456,9 @@ func TestRedisCache_Close(t *testing.T) { defer s.Close() t.Run("close", func(t *testing.T) { - c := cache.NewRedisCache(&redis.Options{ - Addr: s.Addr(), - }, 0) + c, err := cache.NewRedisCache(fmt.Sprintf("redis://%s", s.Addr()), "", "", "", false, 0) + require.NoError(t, err) + closeErr := c.Close() require.NoError(t, closeErr) time.Sleep(3 * time.Second) // give it some time @@ -492,9 +478,9 @@ func TestRedisCache_Clear(t *testing.T) { s.Set("foo", "bar") t.Run("clear", func(t *testing.T) { - c := cache.NewRedisCache(&redis.Options{ - Addr: s.Addr(), - }, 0) + c, err := cache.NewRedisCache(fmt.Sprintf("redis://%s", s.Addr()), "", "", "", false, 0) + require.NoError(t, err) + require.NoError(t, c.Clear()) for i := 0; i < 200; i++ { assert.False(t, s.Exists(fmt.Sprintf("fanal::key%d", i))) @@ -546,9 +532,8 @@ func TestRedisCache_DeleteBlobs(t *testing.T) { addr = "dummy:16379" } - c := cache.NewRedisCache(&redis.Options{ - Addr: addr, - }, 0) + c, err := cache.NewRedisCache(fmt.Sprintf("redis://%s", addr), "", "", "", false, 0) + require.NoError(t, err) err = c.DeleteBlobs(tt.args.blobIDs) if tt.wantErr != "" { @@ -560,3 +545,27 @@ func TestRedisCache_DeleteBlobs(t *testing.T) { }) } } + +func TestRedisOptions_BackendMasked(t *testing.T) { + tests := []struct { + name string + fields cache.RedisOptions + want string + }{ + { + name: "redis cache backend masked", + fields: cache.RedisOptions{Backend: "redis://root:password@localhost:6379"}, + want: "redis://****@localhost:6379", + }, + { + name: "redis cache backend masked does nothing", + fields: cache.RedisOptions{Backend: "redis://localhost:6379"}, + want: "redis://localhost:6379", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.fields.BackendMasked()) + }) + } +} diff --git a/pkg/cache/remote.go b/pkg/cache/remote.go index 5900bf7a3b91..44c9f63c92d8 100644 --- a/pkg/cache/remote.go +++ b/pkg/cache/remote.go @@ -7,13 +7,20 @@ import ( "golang.org/x/xerrors" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/rpc" "github.com/aquasecurity/trivy/pkg/rpc/client" rpcCache "github.com/aquasecurity/trivy/rpc/cache" ) +var _ ArtifactCache = (*RemoteCache)(nil) + +type RemoteOptions struct { + ServerAddr string + CustomHeaders http.Header + Insecure bool +} + // RemoteCache implements remote cache type RemoteCache struct { ctx context.Context // for custom header @@ -21,19 +28,22 @@ type RemoteCache struct { } // NewRemoteCache is the factory method for RemoteCache -func NewRemoteCache(url string, customHeaders http.Header, insecure bool) cache.ArtifactCache { - ctx := client.WithCustomHeaders(context.Background(), customHeaders) +func NewRemoteCache(opts RemoteOptions) *RemoteCache { + ctx := client.WithCustomHeaders(context.Background(), opts.CustomHeaders) httpClient := &http.Client{ Transport: &http.Transport{ Proxy: http.ProxyFromEnvironment, TLSClientConfig: &tls.Config{ - InsecureSkipVerify: insecure, + InsecureSkipVerify: opts.Insecure, }, }, } - c := rpcCache.NewCacheProtobufClient(url, httpClient) - return &RemoteCache{ctx: ctx, client: c} + c := rpcCache.NewCacheProtobufClient(opts.ServerAddr, httpClient) + return &RemoteCache{ + ctx: ctx, + client: c, + } } // PutArtifact sends artifact to remote client diff --git a/pkg/cache/remote_test.go b/pkg/cache/remote_test.go index a7a8e27dbaa9..3e1363d5dd4d 100644 --- a/pkg/cache/remote_test.go +++ b/pkg/cache/remote_test.go @@ -15,14 +15,13 @@ import ( "google.golang.org/protobuf/types/known/emptypb" "github.com/aquasecurity/trivy/pkg/cache" - fcache "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/types" rpcCache "github.com/aquasecurity/trivy/rpc/cache" rpcScanner "github.com/aquasecurity/trivy/rpc/scanner" ) type mockCacheServer struct { - cache fcache.Cache + cache cache.Cache } func (s *mockCacheServer) PutArtifact(_ context.Context, in *rpcCache.PutArtifactRequest) (*emptypb.Empty, error) { @@ -47,7 +46,10 @@ func (s *mockCacheServer) MissingBlobs(_ context.Context, in *rpcCache.MissingBl } layerIDs = append(layerIDs, layerID) } - return &rpcCache.MissingBlobsResponse{MissingArtifact: true, MissingBlobIds: layerIDs}, nil + return &rpcCache.MissingBlobsResponse{ + MissingArtifact: true, + MissingBlobIds: layerIDs, + }, nil } func (s *mockCacheServer) DeleteBlobs(_ context.Context, in *rpcCache.DeleteBlobsRequest) (*emptypb.Empty, error) { @@ -143,7 +145,11 @@ func TestRemoteCache_PutArtifact(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - c := cache.NewRemoteCache(ts.URL, tt.args.customHeaders, false) + c := cache.NewRemoteCache(cache.RemoteOptions{ + ServerAddr: ts.URL, + CustomHeaders: tt.args.customHeaders, + Insecure: false, + }) err := c.PutArtifact(tt.args.imageID, tt.args.imageInfo) if tt.wantErr != "" { require.Error(t, err, tt.name) @@ -204,7 +210,11 @@ func TestRemoteCache_PutBlob(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - c := cache.NewRemoteCache(ts.URL, tt.args.customHeaders, false) + c := cache.NewRemoteCache(cache.RemoteOptions{ + ServerAddr: ts.URL, + CustomHeaders: tt.args.customHeaders, + Insecure: false, + }) err := c.PutBlob(tt.args.diffID, tt.args.layerInfo) if tt.wantErr != "" { require.Error(t, err, tt.name) @@ -282,7 +292,11 @@ func TestRemoteCache_MissingBlobs(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - c := cache.NewRemoteCache(ts.URL, tt.args.customHeaders, false) + c := cache.NewRemoteCache(cache.RemoteOptions{ + ServerAddr: ts.URL, + CustomHeaders: tt.args.customHeaders, + Insecure: false, + }) gotMissingImage, gotMissingLayerIDs, err := c.MissingBlobs(tt.args.imageID, tt.args.layerIDs) if tt.wantErr != "" { require.Error(t, err, tt.name) @@ -332,7 +346,11 @@ func TestRemoteCache_PutArtifactInsecure(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - c := cache.NewRemoteCache(ts.URL, nil, tt.args.insecure) + c := cache.NewRemoteCache(cache.RemoteOptions{ + ServerAddr: ts.URL, + CustomHeaders: nil, + Insecure: tt.args.insecure, + }) err := c.PutArtifact(tt.args.imageID, tt.args.imageInfo) if tt.wantErr != "" { require.Error(t, err) diff --git a/pkg/fanal/cache/testdata/broken-image.db b/pkg/cache/testdata/broken-image.db similarity index 100% rename from pkg/fanal/cache/testdata/broken-image.db rename to pkg/cache/testdata/broken-image.db diff --git a/pkg/fanal/cache/testdata/broken-layer.db b/pkg/cache/testdata/broken-layer.db similarity index 100% rename from pkg/fanal/cache/testdata/broken-layer.db rename to pkg/cache/testdata/broken-layer.db diff --git a/pkg/fanal/cache/testdata/different-image-schema.db b/pkg/cache/testdata/different-image-schema.db similarity index 100% rename from pkg/fanal/cache/testdata/different-image-schema.db rename to pkg/cache/testdata/different-image-schema.db diff --git a/pkg/fanal/cache/testdata/fanal.db b/pkg/cache/testdata/fanal.db similarity index 100% rename from pkg/fanal/cache/testdata/fanal.db rename to pkg/cache/testdata/fanal.db diff --git a/pkg/fanal/cache/testdata/policy/test.rego b/pkg/cache/testdata/policy/test.rego similarity index 100% rename from pkg/fanal/cache/testdata/policy/test.rego rename to pkg/cache/testdata/policy/test.rego diff --git a/pkg/fanal/cache/testdata/trivy-secret.yaml b/pkg/cache/testdata/trivy-secret.yaml similarity index 100% rename from pkg/fanal/cache/testdata/trivy-secret.yaml rename to pkg/cache/testdata/trivy-secret.yaml diff --git a/pkg/cloud/aws/cache/cache.go b/pkg/cloud/aws/cache/cache.go deleted file mode 100644 index 660cb24b443b..000000000000 --- a/pkg/cloud/aws/cache/cache.go +++ /dev/null @@ -1,132 +0,0 @@ -package cache - -import ( - "encoding/json" - "fmt" - "os" - "path" - "path/filepath" - "strings" - "time" - - "github.com/aquasecurity/trivy/pkg/iac/state" -) - -type Cache struct { - path string - accountID string - region string - maxAge time.Duration -} - -const SchemaVersion = 2 - -type CacheData struct { - SchemaVersion int `json:"schema_version"` - State *state.State `json:"state"` - Services map[string]ServiceMetadata `json:"service_metadata"` - Updated time.Time `json:"updated"` -} - -type ServiceMetadata struct { - Name string `json:"name"` - Updated time.Time `json:"updated"` -} - -var ErrCacheNotFound = fmt.Errorf("cache record not found") -var ErrCacheIncompatible = fmt.Errorf("cache record used incomatible schema") -var ErrCacheExpired = fmt.Errorf("cache record expired") - -func New(cacheDir string, maxCacheAge time.Duration, accountID, region string) *Cache { - return &Cache{ - path: path.Join(cacheDir, "cloud", "aws", accountID, strings.ToLower(region), "data.json"), - accountID: accountID, - region: region, - maxAge: maxCacheAge, - } -} - -func (c *Cache) load() (*CacheData, error) { - - m, err := os.Open(c.path) - if err != nil { - return nil, ErrCacheNotFound - } - defer func() { _ = m.Close() }() - - var data CacheData - if err := json.NewDecoder(m).Decode(&data); err != nil { - return nil, err - } - - if data.SchemaVersion != SchemaVersion { - return nil, ErrCacheIncompatible - } - - if time.Since(data.Updated) > c.maxAge { - return nil, ErrCacheExpired - } - - return &data, nil -} - -func (c *Cache) ListServices(required []string) (included, missing []string) { - - data, err := c.load() - if err != nil { - return nil, required - } - - for _, service := range required { - metadata, ok := data.Services[service] - if !ok { - missing = append(missing, service) - continue - } - if time.Since(metadata.Updated) > c.maxAge { - missing = append(missing, service) - continue - } - included = append(included, service) - } - - return included, missing -} - -func (c *Cache) LoadState() (*state.State, error) { - data, err := c.load() - if err != nil { - return nil, err - } - return data.State, nil -} - -func (c *Cache) AddServices(s *state.State, includedServices []string) error { - data := &CacheData{ - SchemaVersion: SchemaVersion, - State: s, - Services: make(map[string]ServiceMetadata), - Updated: time.Now(), - } - - if previous, err := c.load(); err == nil { - data.Services = previous.Services - } - - for _, service := range includedServices { - data.Services[service] = ServiceMetadata{ - Name: service, - Updated: time.Now(), - } - } - - if err := os.MkdirAll(filepath.Dir(c.path), 0700); err != nil { - return err - } - f, err := os.Create(c.path) - if err != nil { - return err - } - defer func() { _ = f.Close() }() - return json.NewEncoder(f).Encode(data) -} diff --git a/pkg/cloud/aws/commands/run.go b/pkg/cloud/aws/commands/run.go deleted file mode 100644 index 708d9314f94f..000000000000 --- a/pkg/cloud/aws/commands/run.go +++ /dev/null @@ -1,180 +0,0 @@ -package commands - -import ( - "context" - "errors" - "sort" - "strings" - - "github.com/aws/aws-sdk-go-v2/service/sts" - "golang.org/x/exp/slices" - "golang.org/x/xerrors" - - "github.com/aquasecurity/trivy-aws/pkg/errs" - awsScanner "github.com/aquasecurity/trivy-aws/pkg/scanner" - "github.com/aquasecurity/trivy/pkg/cloud" - "github.com/aquasecurity/trivy/pkg/cloud/aws/config" - "github.com/aquasecurity/trivy/pkg/cloud/aws/scanner" - "github.com/aquasecurity/trivy/pkg/cloud/report" - "github.com/aquasecurity/trivy/pkg/commands/operation" - "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" - "github.com/aquasecurity/trivy/pkg/types" -) - -var allSupportedServicesFunc = awsScanner.AllSupportedServices - -func getAccountIDAndRegion(ctx context.Context, region, endpoint string) (string, string, error) { - log.DebugContext(ctx, "Looking for AWS credentials provider...") - - cfg, err := config.LoadDefaultAWSConfig(ctx, region, endpoint) - if err != nil { - return "", "", err - } - - svc := sts.NewFromConfig(cfg) - - log.DebugContext(ctx, "Looking up AWS caller identity...") - result, err := svc.GetCallerIdentity(ctx, &sts.GetCallerIdentityInput{}) - if err != nil { - return "", "", xerrors.Errorf("failed to discover AWS caller identity: %w", err) - } - if result.Account == nil { - return "", "", xerrors.Errorf("missing account id for aws account") - } - log.DebugContext(ctx, "Verified AWS credentials for account!", log.String("account", *result.Account)) - return *result.Account, cfg.Region, nil -} - -func validateServicesInput(services, skipServices []string) error { - for _, s := range services { - for _, ss := range skipServices { - if s == ss { - return xerrors.Errorf("service: %s specified to both skip and include", s) - } - } - } - return nil -} - -func processOptions(ctx context.Context, opt *flag.Options) error { - if err := validateServicesInput(opt.Services, opt.SkipServices); err != nil { - return err - } - - // support comma separated services too - var splitServices []string - for _, service := range opt.Services { - splitServices = append(splitServices, strings.Split(service, ",")...) - } - opt.Services = splitServices - - var splitSkipServices []string - for _, skipService := range opt.SkipServices { - splitSkipServices = append(splitSkipServices, strings.Split(skipService, ",")...) - } - opt.SkipServices = splitSkipServices - - if len(opt.Services) != 1 && opt.ARN != "" { - return xerrors.Errorf("you must specify the single --service which the --arn relates to") - } - - if opt.Account == "" || opt.Region == "" { - var err error - opt.Account, opt.Region, err = getAccountIDAndRegion(ctx, opt.Region, opt.Endpoint) - if err != nil { - return err - } - } - - err := filterServices(ctx, opt) - if err != nil { - return err - } - - log.DebugContext(ctx, "Scanning services", log.Any("services", opt.Services)) - return nil -} - -func filterServices(ctx context.Context, opt *flag.Options) error { - switch { - case len(opt.Services) == 0 && len(opt.SkipServices) == 0: - log.DebugContext(ctx, "No service(s) specified, scanning all services...") - opt.Services = allSupportedServicesFunc() - case len(opt.SkipServices) > 0: - log.DebugContext(ctx, "Excluding services", log.Any("services", opt.SkipServices)) - for _, s := range allSupportedServicesFunc() { - if slices.Contains(opt.SkipServices, s) { - continue - } - if !slices.Contains(opt.Services, s) { - opt.Services = append(opt.Services, s) - } - } - case len(opt.Services) > 0: - log.DebugContext(ctx, "Specific services were requested...", - log.String("services", strings.Join(opt.Services, ", "))) - for _, service := range opt.Services { - var found bool - supported := allSupportedServicesFunc() - for _, allowed := range supported { - if allowed == service { - found = true - break - } - } - if !found { - return xerrors.Errorf("service '%s' is not currently supported - supported services are: %s", service, strings.Join(supported, ", ")) - } - } - } - return nil -} - -func Run(ctx context.Context, opt flag.Options) error { - ctx, cancel := context.WithTimeout(ctx, opt.GlobalOptions.Timeout) - defer cancel() - - ctx = log.WithContextPrefix(ctx, "aws") - - var err error - defer func() { - if errors.Is(err, context.DeadlineExceeded) { - log.Warn("Provide a higher timeout value, see https://aquasecurity.github.io/trivy/latest/docs/configuration/") - } - }() - - if err := processOptions(ctx, &opt); err != nil { - return err - } - - results, cached, err := scanner.NewScanner().Scan(ctx, opt) - if err != nil { - var aerr errs.AdapterError - if errors.As(err, &aerr) { - for _, e := range aerr.Errors() { - log.WarnContext(ctx, "Adapter error", log.Err(e)) - } - } else { - return xerrors.Errorf("aws scan error: %w", err) - } - } - - log.DebugContext(ctx, "Writing report to output...") - - sort.Slice(results, func(i, j int) bool { - return results[i].Rule().AVDID < results[j].Rule().AVDID - }) - - res := results.GetFailed() - if opt.MisconfOptions.IncludeNonFailures { - res = results - } - - r := report.New(cloud.ProviderAWS, opt.Account, opt.Region, res, opt.Services) - if err := report.Write(ctx, r, opt, cached); err != nil { - return xerrors.Errorf("unable to write results: %w", err) - } - - return operation.Exit(opt, r.Failed(), types.Metadata{}) -} diff --git a/pkg/cloud/aws/commands/run_test.go b/pkg/cloud/aws/commands/run_test.go deleted file mode 100644 index 325df5330bc5..000000000000 --- a/pkg/cloud/aws/commands/run_test.go +++ /dev/null @@ -1,1284 +0,0 @@ -package commands - -import ( - "bytes" - "context" - "os" - "path/filepath" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - dbTypes "github.com/aquasecurity/trivy-db/pkg/types" - "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/compliance/spec" - "github.com/aquasecurity/trivy/pkg/flag" - iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" -) - -const expectedS3ScanResult = `{ - "CreatedAt": "2021-08-25T12:20:30.000000005Z", - "ArtifactName": "12345678", - "ArtifactType": "aws_account", - "Metadata": { - "ImageConfig": { - "architecture": "", - "created": "0001-01-01T00:00:00Z", - "os": "", - "rootfs": { - "type": "", - "diff_ids": null - }, - "config": {} - } - }, - "Results": [ - { - "Target": "arn:aws:s3:::examplebucket", - "Class": "config", - "Type": "cloud", - "MisconfSummary": { - "Successes": 1, - "Failures": 8, - "Exceptions": 0 - }, - "Misconfigurations": [ - { - "Type": "AWS", - "ID": "AVD-AWS-0086", - "AVDID": "AVD-AWS-0086", - "Title": "S3 Access block should block public ACL", - "Description": "S3 buckets should block public ACLs on buckets and any objects they contain. By blocking, PUTs with fail if the object has any public ACL a.", - "Message": "No public access block so not blocking public acls", - "Resolution": "Enable blocking any PUT calls with a public ACL specified", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0086", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0086" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0087", - "AVDID": "AVD-AWS-0087", - "Title": "S3 Access block should block public policy", - "Description": "S3 bucket policy should have block public policy to prevent users from putting a policy that enable public access.", - "Message": "No public access block so not blocking public policies", - "Resolution": "Prevent policies that allow public access being PUT", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0087", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0087" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0088", - "AVDID": "AVD-AWS-0088", - "Title": "Unencrypted S3 bucket.", - "Description": "S3 Buckets should be encrypted to protect the data that is stored within them if access is compromised.", - "Message": "Bucket does not have encryption enabled", - "Resolution": "Configure bucket encryption", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0088", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0088" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0090", - "AVDID": "AVD-AWS-0090", - "Title": "S3 Data should be versioned", - "Description": "Versioning in Amazon S3 is a means of keeping multiple variants of an object in the same bucket. \nYou can use the S3 Versioning feature to preserve, retrieve, and restore every version of every object stored in your buckets. \nWith versioning you can recover more easily from both unintended user actions and application failures.", - "Message": "Bucket does not have versioning enabled", - "Resolution": "Enable versioning to protect against accidental/malicious removal or modification", - "Severity": "MEDIUM", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0090", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0090" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0091", - "AVDID": "AVD-AWS-0091", - "Title": "S3 Access Block should Ignore Public Acl", - "Description": "S3 buckets should ignore public ACLs on buckets and any objects they contain. By ignoring rather than blocking, PUT calls with public ACLs will still be applied but the ACL will be ignored.", - "Message": "No public access block so not ignoring public acls", - "Resolution": "Enable ignoring the application of public ACLs in PUT calls", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0091", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0091" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0092", - "AVDID": "AVD-AWS-0092", - "Title": "S3 Buckets not publicly accessible through ACL.", - "Description": "Buckets should not have ACLs that allow public access", - "Resolution": "Don't use canned ACLs or switch to private acl", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0092", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0092" - ], - "Status": "PASS", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0093", - "AVDID": "AVD-AWS-0093", - "Title": "S3 Access block should restrict public bucket to limit access", - "Description": "S3 buckets should restrict public policies for the bucket. By enabling, the restrict_public_buckets, only the bucket owner and AWS Services can access if it has a public policy.", - "Message": "No public access block so not restricting public buckets", - "Resolution": "Limit the access to public buckets to only the owner or AWS Services (eg; CloudFront)", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0093", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0093" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0094", - "AVDID": "AVD-AWS-0094", - "Title": "S3 buckets should each define an aws_s3_bucket_public_access_block", - "Description": "The \"block public access\" settings in S3 override individual policies that apply to a given bucket, meaning that all public access can be controlled in one central types for that bucket. It is therefore good practice to define these settings for each bucket in order to clearly define the public access that can be allowed for it.", - "Message": "Bucket does not have a corresponding public access block.", - "Resolution": "Define a aws_s3_bucket_public_access_block for the given bucket to control public access policies", - "Severity": "LOW", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0094", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0094" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0132", - "AVDID": "AVD-AWS-0132", - "Title": "S3 encryption should use Customer Managed Keys", - "Description": "Encryption using AWS keys provides protection for your S3 buckets. To increase control of the encryption and manage factors like rotation use customer managed keys.", - "Message": "Bucket does not encrypt data with a customer managed key.", - "Resolution": "Enable encryption using customer managed keys", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0132", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0132" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - } - ] - } - ] -} -` - -const expectedS3ScanResultWithExceptions = `{ - "CreatedAt": "2021-08-25T12:20:30.000000005Z", - "ArtifactName": "12345678", - "ArtifactType": "aws_account", - "Metadata": { - "ImageConfig": { - "architecture": "", - "created": "0001-01-01T00:00:00Z", - "os": "", - "rootfs": { - "type": "", - "diff_ids": null - }, - "config": {} - } - }, - "Results": [ - { - "Target": "arn:aws:s3:::examplebucket", - "Class": "config", - "Type": "cloud", - "MisconfSummary": { - "Successes": 0, - "Failures": 1, - "Exceptions": 8 - }, - "Misconfigurations": [ - { - "Type": "AWS", - "ID": "AVD-AWS-0094", - "AVDID": "AVD-AWS-0094", - "Title": "S3 buckets should each define an aws_s3_bucket_public_access_block", - "Description": "The \"block public access\" settings in S3 override individual policies that apply to a given bucket, meaning that all public access can be controlled in one central types for that bucket. It is therefore good practice to define these settings for each bucket in order to clearly define the public access that can be allowed for it.", - "Message": "Bucket does not have a corresponding public access block.", - "Resolution": "Define a aws_s3_bucket_public_access_block for the given bucket to control public access policies", - "Severity": "LOW", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0094", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0094" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - } - ] - } - ] -} -` - -const expectedCustomScanResult = `{ - "CreatedAt": "2021-08-25T12:20:30.000000005Z", - "ArtifactName": "12345678", - "ArtifactType": "aws_account", - "Metadata": { - "ImageConfig": { - "architecture": "", - "created": "0001-01-01T00:00:00Z", - "os": "", - "rootfs": { - "type": "", - "diff_ids": null - }, - "config": {} - } - }, - "Results": [ - { - "Target": "", - "Class": "config", - "Type": "cloud", - "MisconfSummary": { - "Successes": 0, - "Failures": 1, - "Exceptions": 0 - }, - "Misconfigurations": [ - { - "Type": "AWS", - "Title": "Bad input data", - "Description": "Just failing rule with input data", - "Message": "Rego check resulted in DENY", - "Namespace": "user.whatever", - "Query": "deny", - "Severity": "LOW", - "References": [ - "" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Provider": "cloud", - "Service": "s3", - "Code": { - "Lines": null - } - } - } - ] - }, - { - "Target": "arn:aws:s3:::examplebucket", - "Class": "config", - "Type": "cloud", - "MisconfSummary": { - "Successes": 1, - "Failures": 8, - "Exceptions": 0 - }, - "Misconfigurations": [ - { - "Type": "AWS", - "ID": "AVD-AWS-0086", - "AVDID": "AVD-AWS-0086", - "Title": "S3 Access block should block public ACL", - "Description": "S3 buckets should block public ACLs on buckets and any objects they contain. By blocking, PUTs with fail if the object has any public ACL a.", - "Message": "No public access block so not blocking public acls", - "Resolution": "Enable blocking any PUT calls with a public ACL specified", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0086", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0086" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0087", - "AVDID": "AVD-AWS-0087", - "Title": "S3 Access block should block public policy", - "Description": "S3 bucket policy should have block public policy to prevent users from putting a policy that enable public access.", - "Message": "No public access block so not blocking public policies", - "Resolution": "Prevent policies that allow public access being PUT", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0087", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0087" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0088", - "AVDID": "AVD-AWS-0088", - "Title": "Unencrypted S3 bucket.", - "Description": "S3 Buckets should be encrypted to protect the data that is stored within them if access is compromised.", - "Message": "Bucket does not have encryption enabled", - "Resolution": "Configure bucket encryption", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0088", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0088" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0090", - "AVDID": "AVD-AWS-0090", - "Title": "S3 Data should be versioned", - "Description": "Versioning in Amazon S3 is a means of keeping multiple variants of an object in the same bucket. \nYou can use the S3 Versioning feature to preserve, retrieve, and restore every version of every object stored in your buckets. \nWith versioning you can recover more easily from both unintended user actions and application failures.", - "Message": "Bucket does not have versioning enabled", - "Resolution": "Enable versioning to protect against accidental/malicious removal or modification", - "Severity": "MEDIUM", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0090", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0090" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0091", - "AVDID": "AVD-AWS-0091", - "Title": "S3 Access Block should Ignore Public Acl", - "Description": "S3 buckets should ignore public ACLs on buckets and any objects they contain. By ignoring rather than blocking, PUT calls with public ACLs will still be applied but the ACL will be ignored.", - "Message": "No public access block so not ignoring public acls", - "Resolution": "Enable ignoring the application of public ACLs in PUT calls", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0091", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0091" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0092", - "AVDID": "AVD-AWS-0092", - "Title": "S3 Buckets not publicly accessible through ACL.", - "Description": "Buckets should not have ACLs that allow public access", - "Resolution": "Don't use canned ACLs or switch to private acl", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0092", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0092" - ], - "Status": "PASS", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0093", - "AVDID": "AVD-AWS-0093", - "Title": "S3 Access block should restrict public bucket to limit access", - "Description": "S3 buckets should restrict public policies for the bucket. By enabling, the restrict_public_buckets, only the bucket owner and AWS Services can access if it has a public policy.", - "Message": "No public access block so not restricting public buckets", - "Resolution": "Limit the access to public buckets to only the owner or AWS Services (eg; CloudFront)", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0093", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0093" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0094", - "AVDID": "AVD-AWS-0094", - "Title": "S3 buckets should each define an aws_s3_bucket_public_access_block", - "Description": "The \"block public access\" settings in S3 override individual policies that apply to a given bucket, meaning that all public access can be controlled in one central types for that bucket. It is therefore good practice to define these settings for each bucket in order to clearly define the public access that can be allowed for it.", - "Message": "Bucket does not have a corresponding public access block.", - "Resolution": "Define a aws_s3_bucket_public_access_block for the given bucket to control public access policies", - "Severity": "LOW", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0094", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0094" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0132", - "AVDID": "AVD-AWS-0132", - "Title": "S3 encryption should use Customer Managed Keys", - "Description": "Encryption using AWS keys provides protection for your S3 buckets. To increase control of the encryption and manage factors like rotation use customer managed keys.", - "Message": "Bucket does not encrypt data with a customer managed key.", - "Resolution": "Enable encryption using customer managed keys", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0132", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0132" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - } - ] - } - ] -} -` - -const expectedS3AndCloudTrailResult = `{ - "CreatedAt": "2021-08-25T12:20:30.000000005Z", - "ArtifactName": "123456789", - "ArtifactType": "aws_account", - "Metadata": { - "ImageConfig": { - "architecture": "", - "created": "0001-01-01T00:00:00Z", - "os": "", - "rootfs": { - "type": "", - "diff_ids": null - }, - "config": {} - } - }, - "Results": [ - { - "Target": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "Class": "config", - "Type": "cloud", - "MisconfSummary": { - "Successes": 1, - "Failures": 3, - "Exceptions": 0 - }, - "Misconfigurations": [ - { - "Type": "AWS", - "ID": "AVD-AWS-0014", - "AVDID": "AVD-AWS-0014", - "Title": "Cloudtrail should be enabled in all regions regardless of where your AWS resources are generally homed", - "Description": "When creating Cloudtrail in the AWS Management Console the trail is configured by default to be multi-region, this isn't the case with the Terraform resource. Cloudtrail should cover the full AWS account to ensure you can track changes in regions you are not actively operting in.", - "Resolution": "Enable Cloudtrail in all regions", - "Severity": "MEDIUM", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0014", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0014" - ], - "Status": "PASS", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "Provider": "aws", - "Service": "cloudtrail", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0015", - "AVDID": "AVD-AWS-0015", - "Title": "CloudTrail should use Customer managed keys to encrypt the logs", - "Description": "Using Customer managed keys provides comprehensive control over cryptographic keys, enabling management of policies, permissions, and rotation, thus enhancing security and compliance measures for sensitive data and systems.", - "Message": "CloudTrail does not use a customer managed key to encrypt the logs.", - "Resolution": "Use Customer managed key", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0015", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0015" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "Provider": "aws", - "Service": "cloudtrail", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0016", - "AVDID": "AVD-AWS-0016", - "Title": "Cloudtrail log validation should be enabled to prevent tampering of log data", - "Description": "Log validation should be activated on Cloudtrail logs to prevent the tampering of the underlying data in the S3 bucket. It is feasible that a rogue actor compromising an AWS account might want to modify the log data to remove trace of their actions.", - "Message": "Trail does not have log validation enabled.", - "Resolution": "Turn on log validation for Cloudtrail", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0016", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0016" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "Provider": "aws", - "Service": "cloudtrail", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0162", - "AVDID": "AVD-AWS-0162", - "Title": "CloudTrail logs should be stored in S3 and also sent to CloudWatch Logs", - "Description": "CloudTrail is a web service that records AWS API calls made in a given account. The recorded information includes the identity of the API caller, the time of the API call, the source IP address of the API caller, the request parameters, and the response elements returned by the AWS service.\n\nCloudTrail uses Amazon S3 for log file storage and delivery, so log files are stored durably. In addition to capturing CloudTrail logs in a specified Amazon S3 bucket for long-term analysis, you can perform real-time analysis by configuring CloudTrail to send logs to CloudWatch Logs.\n\nFor a trail that is enabled in all Regions in an account, CloudTrail sends log files from all those Regions to a CloudWatch Logs log group.", - "Message": "Trail does not have CloudWatch logging configured", - "Resolution": "Enable logging to CloudWatch", - "Severity": "LOW", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0162", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0162" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "Provider": "aws", - "Service": "cloudtrail", - "Code": { - "Lines": null - } - } - } - ] - }, - { - "Target": "arn:aws:s3:::examplebucket", - "Class": "config", - "Type": "cloud", - "MisconfSummary": { - "Successes": 1, - "Failures": 8, - "Exceptions": 0 - }, - "Misconfigurations": [ - { - "Type": "AWS", - "ID": "AVD-AWS-0086", - "AVDID": "AVD-AWS-0086", - "Title": "S3 Access block should block public ACL", - "Description": "S3 buckets should block public ACLs on buckets and any objects they contain. By blocking, PUTs with fail if the object has any public ACL a.", - "Message": "No public access block so not blocking public acls", - "Resolution": "Enable blocking any PUT calls with a public ACL specified", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0086", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0086" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0087", - "AVDID": "AVD-AWS-0087", - "Title": "S3 Access block should block public policy", - "Description": "S3 bucket policy should have block public policy to prevent users from putting a policy that enable public access.", - "Message": "No public access block so not blocking public policies", - "Resolution": "Prevent policies that allow public access being PUT", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0087", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0087" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0088", - "AVDID": "AVD-AWS-0088", - "Title": "Unencrypted S3 bucket.", - "Description": "S3 Buckets should be encrypted to protect the data that is stored within them if access is compromised.", - "Message": "Bucket does not have encryption enabled", - "Resolution": "Configure bucket encryption", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0088", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0088" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0090", - "AVDID": "AVD-AWS-0090", - "Title": "S3 Data should be versioned", - "Description": "Versioning in Amazon S3 is a means of keeping multiple variants of an object in the same bucket. \nYou can use the S3 Versioning feature to preserve, retrieve, and restore every version of every object stored in your buckets. \nWith versioning you can recover more easily from both unintended user actions and application failures.", - "Message": "Bucket does not have versioning enabled", - "Resolution": "Enable versioning to protect against accidental/malicious removal or modification", - "Severity": "MEDIUM", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0090", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0090" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0091", - "AVDID": "AVD-AWS-0091", - "Title": "S3 Access Block should Ignore Public Acl", - "Description": "S3 buckets should ignore public ACLs on buckets and any objects they contain. By ignoring rather than blocking, PUT calls with public ACLs will still be applied but the ACL will be ignored.", - "Message": "No public access block so not ignoring public acls", - "Resolution": "Enable ignoring the application of public ACLs in PUT calls", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0091", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0091" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0092", - "AVDID": "AVD-AWS-0092", - "Title": "S3 Buckets not publicly accessible through ACL.", - "Description": "Buckets should not have ACLs that allow public access", - "Resolution": "Don't use canned ACLs or switch to private acl", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0092", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0092" - ], - "Status": "PASS", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0093", - "AVDID": "AVD-AWS-0093", - "Title": "S3 Access block should restrict public bucket to limit access", - "Description": "S3 buckets should restrict public policies for the bucket. By enabling, the restrict_public_buckets, only the bucket owner and AWS Services can access if it has a public policy.", - "Message": "No public access block so not restricting public buckets", - "Resolution": "Limit the access to public buckets to only the owner or AWS Services (eg; CloudFront)", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0093", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0093" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0094", - "AVDID": "AVD-AWS-0094", - "Title": "S3 buckets should each define an aws_s3_bucket_public_access_block", - "Description": "The \"block public access\" settings in S3 override individual policies that apply to a given bucket, meaning that all public access can be controlled in one central types for that bucket. It is therefore good practice to define these settings for each bucket in order to clearly define the public access that can be allowed for it.", - "Message": "Bucket does not have a corresponding public access block.", - "Resolution": "Define a aws_s3_bucket_public_access_block for the given bucket to control public access policies", - "Severity": "LOW", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0094", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0094" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-0132", - "AVDID": "AVD-AWS-0132", - "Title": "S3 encryption should use Customer Managed Keys", - "Description": "Encryption using AWS keys provides protection for your S3 buckets. To increase control of the encryption and manage factors like rotation use customer managed keys.", - "Message": "Bucket does not encrypt data with a customer managed key.", - "Resolution": "Enable encryption using customer managed keys", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0132", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-0132" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:::examplebucket", - "Provider": "aws", - "Service": "s3", - "Code": { - "Lines": null - } - } - } - ] - } - ] -} -` - -func Test_Run(t *testing.T) { - regoDir := t.TempDir() - - tests := []struct { - name string - options flag.Options - want string - expectErr bool - cacheContent string - regoPolicy string - allServices []string - inputData string - ignoreFile string - }{ - { - name: "succeed with cached infra", - options: flag.Options{ - RegoOptions: flag.RegoOptions{SkipCheckUpdate: true}, - AWSOptions: flag.AWSOptions{ - Region: "us-east-1", - Services: []string{"s3"}, - Account: "12345678", - }, - CloudOptions: flag.CloudOptions{ - MaxCacheAge: time.Hour * 24 * 365 * 100, - }, - MisconfOptions: flag.MisconfOptions{IncludeNonFailures: true}, - }, - cacheContent: "testdata/s3onlycache.json", - allServices: []string{"s3"}, - want: expectedS3ScanResult, - }, - { - name: "custom rego rule with passed results", - options: flag.Options{ - AWSOptions: flag.AWSOptions{ - Region: "us-east-1", - Services: []string{"s3"}, - Account: "12345678", - }, - CloudOptions: flag.CloudOptions{ - MaxCacheAge: time.Hour * 24 * 365 * 100, - }, - RegoOptions: flag.RegoOptions{ - Trace: true, - CheckPaths: []string{ - filepath.Join(regoDir, "policies"), - }, - CheckNamespaces: []string{ - "user", - }, - DataPaths: []string{ - filepath.Join(regoDir, "data"), - }, - SkipCheckUpdate: true, - }, - MisconfOptions: flag.MisconfOptions{IncludeNonFailures: true}, - }, - regoPolicy: `# METADATA -# title: Bad input data -# description: Just failing rule with input data -# scope: package -# schemas: -# - input: schema["input"] -# custom: -# severity: LOW -# service: s3 -# input: -# selector: -# - type: cloud -package user.whatever -import data.settings.DS123.foo - -deny { - foo == true -} -`, - inputData: `{ - "settings": { - "DS123": { - "foo": true - } - } -}`, - cacheContent: filepath.Join("testdata", "s3onlycache.json"), - allServices: []string{"s3"}, - want: expectedCustomScanResult, - }, - { - name: "compliance report summary", - options: flag.Options{ - AWSOptions: flag.AWSOptions{ - Region: "us-east-1", - Services: []string{"s3"}, - Account: "12345678", - }, - CloudOptions: flag.CloudOptions{ - MaxCacheAge: time.Hour * 24 * 365 * 100, - }, - ReportOptions: flag.ReportOptions{ - Compliance: spec.ComplianceSpec{ - Spec: iacTypes.Spec{ - ID: "@testdata/example-spec.yaml", - Title: "my-custom-spec", - Description: "My fancy spec", - Version: "1.2", - Controls: []iacTypes.Control{ - { - ID: "1.1", - Name: "Unencrypted S3 bucket", - Description: "S3 Buckets should be encrypted to protect the data that is stored within them if access is compromised.", - Checks: []iacTypes.SpecCheck{ - {ID: "AVD-AWS-0088"}, - }, - Severity: "HIGH", - }, - }, - }, - }, - Format: "table", - ReportFormat: "summary", - }, - RegoOptions: flag.RegoOptions{SkipCheckUpdate: true}, - }, - cacheContent: "testdata/s3onlycache.json", - allServices: []string{"s3"}, - want: ` -Summary Report for compliance: my-custom-spec -┌─────┬──────────┬───────────────────────┬────────┬────────┐ -│ ID │ Severity │ Control Name │ Status │ Issues │ -├─────┼──────────┼───────────────────────┼────────┼────────┤ -│ 1.1 │ HIGH │ Unencrypted S3 bucket │ FAIL │ 1 │ -└─────┴──────────┴───────────────────────┴────────┴────────┘ -`, - }, - { - name: "scan an unsupported service", - options: flag.Options{ - RegoOptions: flag.RegoOptions{SkipCheckUpdate: true}, - AWSOptions: flag.AWSOptions{ - Region: "us-east-1", - Account: "123456789", - Services: []string{"theultimateservice"}, - }, - CloudOptions: flag.CloudOptions{ - MaxCacheAge: time.Hour * 24 * 365 * 100, - }, - MisconfOptions: flag.MisconfOptions{IncludeNonFailures: true}, - }, - cacheContent: "testdata/s3onlycache.json", - expectErr: true, - }, - { - name: "scan every service", - options: flag.Options{ - RegoOptions: flag.RegoOptions{SkipCheckUpdate: true}, - AWSOptions: flag.AWSOptions{ - Region: "us-east-1", - Account: "123456789", - }, - CloudOptions: flag.CloudOptions{ - MaxCacheAge: time.Hour * 24 * 365 * 100, - }, - MisconfOptions: flag.MisconfOptions{IncludeNonFailures: true}, - }, - cacheContent: "testdata/s3andcloudtrailcache.json", - allServices: []string{ - "s3", - "cloudtrail", - }, - want: expectedS3AndCloudTrailResult, - }, - { - name: "skip certain services and include specific services", - options: flag.Options{ - RegoOptions: flag.RegoOptions{SkipCheckUpdate: true}, - AWSOptions: flag.AWSOptions{ - Region: "us-east-1", - Services: []string{"s3"}, - SkipServices: []string{"cloudtrail"}, - Account: "123456789", - }, - CloudOptions: flag.CloudOptions{ - MaxCacheAge: time.Hour * 24 * 365 * 100, - }, - MisconfOptions: flag.MisconfOptions{IncludeNonFailures: true}, - }, - cacheContent: "testdata/s3andcloudtrailcache.json", - allServices: []string{ - "s3", - "cloudtrail", - }, - // we skip cloudtrail but still expect results from it as it is cached - want: expectedS3AndCloudTrailResult, - }, - { - name: "only skip certain services but scan the rest", - options: flag.Options{ - RegoOptions: flag.RegoOptions{SkipCheckUpdate: true}, - AWSOptions: flag.AWSOptions{ - Region: "us-east-1", - SkipServices: []string{ - "cloudtrail", - "iam", - }, - Account: "12345678", - }, - CloudOptions: flag.CloudOptions{ - MaxCacheAge: time.Hour * 24 * 365 * 100, - }, - MisconfOptions: flag.MisconfOptions{IncludeNonFailures: true}, - }, - allServices: []string{ - "s3", - "cloudtrail", - "iam", - }, - cacheContent: "testdata/s3onlycache.json", - want: expectedS3ScanResult, - }, - { - name: "fail - service specified to both include and exclude", - options: flag.Options{ - RegoOptions: flag.RegoOptions{SkipCheckUpdate: true}, - AWSOptions: flag.AWSOptions{ - Region: "us-east-1", - Services: []string{"s3"}, - SkipServices: []string{"s3"}, - Account: "123456789", - }, - CloudOptions: flag.CloudOptions{ - MaxCacheAge: time.Hour * 24 * 365 * 100, - }, - MisconfOptions: flag.MisconfOptions{IncludeNonFailures: true}, - }, - cacheContent: "testdata/s3andcloudtrailcache.json", - expectErr: true, - }, - { - name: "ignore findings with .trivyignore", - options: flag.Options{ - RegoOptions: flag.RegoOptions{SkipCheckUpdate: true}, - AWSOptions: flag.AWSOptions{ - Region: "us-east-1", - Services: []string{"s3"}, - Account: "12345678", - }, - CloudOptions: flag.CloudOptions{ - MaxCacheAge: time.Hour * 24 * 365 * 100, - }, - MisconfOptions: flag.MisconfOptions{IncludeNonFailures: true}, - }, - cacheContent: "testdata/s3onlycache.json", - allServices: []string{"s3"}, - ignoreFile: "testdata/.trivyignore", - want: expectedS3ScanResultWithExceptions, - }, - } - - ctx := clock.With(context.Background(), time.Date(2021, 8, 25, 12, 20, 30, 5, time.UTC)) - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - if test.allServices != nil { - oldAllSupportedServicesFunc := allSupportedServicesFunc - allSupportedServicesFunc = func() []string { - return test.allServices - } - defer func() { - allSupportedServicesFunc = oldAllSupportedServicesFunc - }() - } - - output := bytes.NewBuffer(nil) - test.options.SetOutputWriter(output) - test.options.Debug = true - test.options.GlobalOptions.Timeout = time.Minute - if test.options.Format == "" { - test.options.Format = "json" - } - test.options.Severities = []dbTypes.Severity{ - dbTypes.SeverityUnknown, - dbTypes.SeverityLow, - dbTypes.SeverityMedium, - dbTypes.SeverityHigh, - dbTypes.SeverityCritical, - } - - if test.regoPolicy != "" { - require.NoError(t, os.MkdirAll(filepath.Join(regoDir, "policies"), 0755)) - require.NoError(t, os.WriteFile(filepath.Join(regoDir, "policies", "user.rego"), []byte(test.regoPolicy), 0644)) - } - - if test.inputData != "" { - require.NoError(t, os.MkdirAll(filepath.Join(regoDir, "data"), 0755)) - require.NoError(t, os.WriteFile(filepath.Join(regoDir, "data", "data.json"), []byte(test.inputData), 0644)) - } - - if test.cacheContent != "" { - cacheRoot := t.TempDir() - test.options.CacheDir = cacheRoot - cacheFile := filepath.Join(cacheRoot, "cloud", "aws", test.options.Account, test.options.Region, "data.json") - require.NoError(t, os.MkdirAll(filepath.Dir(cacheFile), 0700)) - - cacheData, err := os.ReadFile(test.cacheContent) - require.NoError(t, err, test.name) - - require.NoError(t, os.WriteFile(cacheFile, cacheData, 0600)) - } - - if test.ignoreFile != "" { - test.options.ReportOptions.IgnoreFile = test.ignoreFile - } - - err := Run(ctx, test.options) - if test.expectErr { - require.Error(t, err) - return - } - require.NoError(t, err) - assert.Equal(t, test.want, output.String()) - }) - } -} diff --git a/pkg/cloud/aws/commands/testdata/.trivyignore b/pkg/cloud/aws/commands/testdata/.trivyignore deleted file mode 100644 index 44ef395ee173..000000000000 --- a/pkg/cloud/aws/commands/testdata/.trivyignore +++ /dev/null @@ -1,8 +0,0 @@ -AVD-AWS-0086 -AVD-AWS-0087 -AVD-AWS-0088 -AVD-AWS-0090 -AVD-AWS-0132 -AVD-AWS-0091 -AVD-AWS-0092 -AVD-AWS-0093 \ No newline at end of file diff --git a/pkg/cloud/aws/commands/testdata/example-spec.yaml b/pkg/cloud/aws/commands/testdata/example-spec.yaml deleted file mode 100644 index 19fbf0a3bf31..000000000000 --- a/pkg/cloud/aws/commands/testdata/example-spec.yaml +++ /dev/null @@ -1,13 +0,0 @@ -spec: - id: "0001" - title: my-custom-spec - description: My fancy spec - version: "1.2" - controls: - - id: "1.1" - name: Unencrypted S3 bucket - description: |- - S3 Buckets should be encrypted to protect the data that is stored within them if access is compromised. - checks: - - id: AVD-AWS-0088 - severity: HIGH \ No newline at end of file diff --git a/pkg/cloud/aws/commands/testdata/s3andcloudtrailcache.json b/pkg/cloud/aws/commands/testdata/s3andcloudtrailcache.json deleted file mode 100644 index f9cfd2abcec2..000000000000 --- a/pkg/cloud/aws/commands/testdata/s3andcloudtrailcache.json +++ /dev/null @@ -1,420 +0,0 @@ -{ - "schema_version": 2, - "state": { - "AWS": { - "S3": { - "Buckets": [{ - "Metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "Name": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": "examplebucket" - }, - "PublicAccessBlock": null, - "BucketPolicies": null, - "Encryption": { - "Metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "Enabled": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": false - }, - "Algorithm": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": "" - }, - "KMSKeyId": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": "" - } - }, - "Versioning": { - "Metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "Enabled": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": false - }, - "MFADelete": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": false - } - }, - "Logging": { - "Metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "Enabled": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": false - }, - "TargetBucket": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": "" - } - }, - "ACL": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": "private" - } - }] - }, - "CloudTrail": { - "Trails": [{ - "Metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "unresolvable": false - }, - "Name": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "unresolvable": false - }, - "value": "management-events" - }, - "EnableLogFileValidation": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "unresolvable": false - }, - "value": false - }, - "IsMultiRegion": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "unresolvable": false - }, - "value": true - }, - "KMSKeyID": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "unresolvable": false - }, - "value": "" - }, - "CloudWatchLogsLogGroupArn": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "unresolvable": false - }, - "value": "" - }, - "IsLogging": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "unresolvable": false - }, - "value": true - }, - "BucketName": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:cloudtrail:us-east-1:12345678:trail/management-events", - "unresolvable": false - }, - "value": "aws-cloudtrail-logs-12345678-d0a47f2f" - }, - "EventSelectors": null - }] - } - } - - }, - "service_metadata": { - "s3": { - "name": "s3", - "updated": "2022-10-04T14:08:36.659817426+01:00" - }, - "cloudtrail": { - "name": "cloudtrail", - "updated": "2022-10-04T14:08:36.659817426+01:00" - } - }, - "updated": "2022-10-04T14:08:36.659817426+01:00" -} diff --git a/pkg/cloud/aws/commands/testdata/s3onlycache.json b/pkg/cloud/aws/commands/testdata/s3onlycache.json deleted file mode 100644 index 43a015aa9ca9..000000000000 --- a/pkg/cloud/aws/commands/testdata/s3onlycache.json +++ /dev/null @@ -1,261 +0,0 @@ -{ - "schema_version": 2, - "state": { - "AWS": { - "S3": { - "Buckets": [{ - "Metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "Name": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": "examplebucket" - }, - "PublicAccessBlock": null, - "BucketPolicies": null, - "Encryption": { - "Metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "Enabled": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": false - }, - "Algorithm": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": "" - }, - "KMSKeyId": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": "" - } - }, - "Versioning": { - "Metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "Enabled": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": false - }, - "MFADelete": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": false - } - }, - "Logging": { - "Metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "Enabled": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": false - }, - "TargetBucket": { - "metadata": { - "default": true, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": "" - } - }, - "ACL": { - "metadata": { - "default": false, - "explicit": false, - "managed": true, - "parent": null, - "range": { - "endLine": 0, - "filename": "arn:aws:s3:::examplebucket", - "fsKey": "", - "isLogicalSource": false, - "sourcePrefix": "remote", - "startLine": 0 - }, - "ref": "arn:aws:s3:::examplebucket", - "unresolvable": false - }, - "value": "private" - } - }] - } - } - }, - "service_metadata": { - "s3": { - "name": "s3", - "updated": "2022-10-04T14:08:36.659817426+01:00" - } - }, - "updated": "2022-10-04T14:08:36.659817426+01:00" -} diff --git a/pkg/cloud/aws/config/config.go b/pkg/cloud/aws/config/config.go index e4ef5f2f70dc..e173840cdaec 100644 --- a/pkg/cloud/aws/config/config.go +++ b/pkg/cloud/aws/config/config.go @@ -9,14 +9,14 @@ import ( ) func EndpointResolver(endpoint string) aws.EndpointResolverWithOptionsFunc { - return aws.EndpointResolverWithOptionsFunc(func(_, reg string, options ...any) (aws.Endpoint, error) { + return func(_, reg string, options ...any) (aws.Endpoint, error) { return aws.Endpoint{ PartitionID: "aws", URL: endpoint, SigningRegion: reg, Source: aws.EndpointSourceCustom, }, nil - }) + } } func MakeAWSOptions(region, endpoint string) []func(*awsconfig.LoadOptions) error { diff --git a/pkg/cloud/aws/scanner/progress.go b/pkg/cloud/aws/scanner/progress.go deleted file mode 100644 index a313dd482c6c..000000000000 --- a/pkg/cloud/aws/scanner/progress.go +++ /dev/null @@ -1,83 +0,0 @@ -package scanner - -import ( - "fmt" - "io" - "os" - - "github.com/aquasecurity/loading/pkg/bar" -) - -type progressTracker struct { - serviceBar *bar.Bar - serviceTotal int - serviceCurrent int - isTTY bool - debugWriter io.Writer -} - -func newProgressTracker(w io.Writer) *progressTracker { - var isTTY bool - if stat, err := os.Stdout.Stat(); err == nil { - isTTY = stat.Mode()&os.ModeCharDevice == os.ModeCharDevice - } - return &progressTracker{ - isTTY: isTTY, - debugWriter: w, - } -} - -func (m *progressTracker) Finish() { - if !m.isTTY || m.serviceBar == nil { - return - } - m.serviceBar.Finish() -} - -func (m *progressTracker) IncrementResource() { - if !m.isTTY { - return - } - m.serviceBar.Increment() -} - -func (m *progressTracker) SetTotalResources(i int) { - if !m.isTTY { - return - } - m.serviceBar.SetTotal(i) -} - -func (m *progressTracker) SetTotalServices(i int) { - m.serviceTotal = i -} - -func (m *progressTracker) SetServiceLabel(label string) { - if !m.isTTY { - return - } - m.serviceBar.SetLabel("└╴" + label) - m.serviceBar.SetCurrent(0) -} - -func (m *progressTracker) FinishService() { - if !m.isTTY { - return - } - m.serviceCurrent++ - m.serviceBar.Finish() -} - -func (m *progressTracker) StartService(name string) { - if !m.isTTY { - return - } - - fmt.Fprintf(m.debugWriter, "[%d/%d] Scanning %s...\n", m.serviceCurrent+1, m.serviceTotal, name) - m.serviceBar = bar.New( - bar.OptionHideOnFinish(true), - bar.OptionWithAutoComplete(false), - bar.OptionWithRenderFunc(bar.RenderColoured(0xff, 0x66, 0x00)), - ) - m.SetServiceLabel("Initializing...") -} diff --git a/pkg/cloud/aws/scanner/scanner.go b/pkg/cloud/aws/scanner/scanner.go deleted file mode 100644 index 6ea5f919f24d..000000000000 --- a/pkg/cloud/aws/scanner/scanner.go +++ /dev/null @@ -1,176 +0,0 @@ -package scanner - -import ( - "context" - "fmt" - "io/fs" - - "golang.org/x/xerrors" - - aws "github.com/aquasecurity/trivy-aws/pkg/scanner" - "github.com/aquasecurity/trivy/pkg/cloud/aws/cache" - "github.com/aquasecurity/trivy/pkg/commands/operation" - "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/iac/framework" - "github.com/aquasecurity/trivy/pkg/iac/scan" - "github.com/aquasecurity/trivy/pkg/iac/scanners/options" - "github.com/aquasecurity/trivy/pkg/iac/state" - "github.com/aquasecurity/trivy/pkg/log" - "github.com/aquasecurity/trivy/pkg/misconf" -) - -type AWSScanner struct { - logger *log.Logger -} - -func NewScanner() *AWSScanner { - return &AWSScanner{ - logger: log.WithPrefix("aws"), - } -} - -func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Results, bool, error) { - - awsCache := cache.New(option.CacheDir, option.MaxCacheAge, option.Account, option.Region) - included, missing := awsCache.ListServices(option.Services) - - prefixedLogger := log.NewWriteLogger(log.WithPrefix("aws")) - - var scannerOpts []options.ScannerOption - if !option.NoProgress { - tracker := newProgressTracker(prefixedLogger) - defer tracker.Finish() - scannerOpts = append(scannerOpts, aws.ScannerWithProgressTracker(tracker)) - } - - if len(missing) > 0 { - scannerOpts = append(scannerOpts, aws.ScannerWithAWSServices(missing...)) - } - - if option.Debug { - scannerOpts = append(scannerOpts, options.ScannerWithDebug(prefixedLogger)) - } - - if option.Trace { - scannerOpts = append(scannerOpts, options.ScannerWithTrace(prefixedLogger)) - } - - if option.Region != "" { - scannerOpts = append( - scannerOpts, - aws.ScannerWithAWSRegion(option.Region), - ) - } - - if option.Endpoint != "" { - scannerOpts = append( - scannerOpts, - aws.ScannerWithAWSEndpoint(option.Endpoint), - ) - } - - var policyPaths []string - var downloadedPolicyPaths []string - var err error - - downloadedPolicyPaths, err = operation.InitBuiltinPolicies(context.Background(), option.CacheDir, option.Quiet, option.SkipCheckUpdate, option.MisconfOptions.ChecksBundleRepository, option.RegistryOpts()) - if err != nil { - if !option.SkipCheckUpdate { - s.logger.Error("Falling back to embedded checks", log.Err(err)) - } - } else { - s.logger.Debug("Checks successfully loaded from disk") - policyPaths = append(policyPaths, downloadedPolicyPaths...) - scannerOpts = append(scannerOpts, - options.ScannerWithEmbeddedPolicies(false), - options.ScannerWithEmbeddedLibraries(false)) - } - - var policyFS fs.FS - policyFS, policyPaths, err = misconf.CreatePolicyFS(append(policyPaths, option.RegoOptions.CheckPaths...)) - if err != nil { - return nil, false, xerrors.Errorf("unable to create policyfs: %w", err) - } - - scannerOpts = append(scannerOpts, - options.ScannerWithPolicyFilesystem(policyFS), - options.ScannerWithPolicyDirs(policyPaths...), - ) - - dataFS, dataPaths, err := misconf.CreateDataFS(option.RegoOptions.DataPaths) - if err != nil { - s.logger.Error("Could not load config data", log.Err(err)) - } - scannerOpts = append(scannerOpts, - options.ScannerWithDataDirs(dataPaths...), - options.ScannerWithDataFilesystem(dataFS), - ) - - scannerOpts = addPolicyNamespaces(option.RegoOptions.CheckNamespaces, scannerOpts) - - if option.Compliance.Spec.ID != "" { - scannerOpts = append(scannerOpts, options.ScannerWithSpec(option.Compliance.Spec.ID)) - } else { - scannerOpts = append(scannerOpts, options.ScannerWithFrameworks( - framework.Default, - framework.CIS_AWS_1_2)) - } - - scanner := aws.New(scannerOpts...) - - var freshState *state.State - if len(missing) > 0 || option.CloudOptions.UpdateCache { - var err error - freshState, err = scanner.CreateState(ctx) - if err != nil { - return nil, false, err - } - } - - fullState, err := createState(freshState, awsCache) - if err != nil { - return nil, false, err - } - - if fullState == nil { - return nil, false, fmt.Errorf("no resultant state found") - } - - if err := awsCache.AddServices(fullState, missing); err != nil { - return nil, false, err - } - - defsecResults, err := scanner.Scan(ctx, fullState) - if err != nil { - return nil, false, err - } - - return defsecResults, len(included) > 0, nil -} - -func createState(freshState *state.State, awsCache *cache.Cache) (*state.State, error) { - var fullState *state.State - if previousState, err := awsCache.LoadState(); err == nil { - if freshState != nil { - fullState, err = previousState.Merge(freshState) - if err != nil { - return nil, err - } - } else { - fullState = previousState - } - } else { - fullState = freshState - } - return fullState, nil -} - -func addPolicyNamespaces(namespaces []string, scannerOpts []options.ScannerOption) []options.ScannerOption { - if len(namespaces) > 0 { - scannerOpts = append( - scannerOpts, - options.ScannerWithPolicyNamespaces(namespaces...), - ) - } - return scannerOpts -} diff --git a/pkg/cloud/provider.go b/pkg/cloud/provider.go deleted file mode 100644 index f495e15e2095..000000000000 --- a/pkg/cloud/provider.go +++ /dev/null @@ -1,5 +0,0 @@ -package cloud - -const ( - ProviderAWS = "AWS" -) diff --git a/pkg/cloud/report/convert.go b/pkg/cloud/report/convert.go deleted file mode 100644 index ac8517380cb7..000000000000 --- a/pkg/cloud/report/convert.go +++ /dev/null @@ -1,107 +0,0 @@ -package report - -import ( - "fmt" - "strings" - "time" - - "github.com/aws/aws-sdk-go-v2/aws/arn" - - ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" - "github.com/aquasecurity/trivy/pkg/iac/rego" - "github.com/aquasecurity/trivy/pkg/iac/scan" - "github.com/aquasecurity/trivy/pkg/types" -) - -func ConvertResults(results scan.Results, provider string, scoped []string) map[string]ResultsAtTime { - convertedResults := make(map[string]ResultsAtTime) - resultsByServiceAndARN := make(map[string]map[string]scan.Results) - for _, result := range results { - - service := result.Rule().Service - resource := result.Flatten().Resource - if service == "" || service == "general" { - if parsed, err := arn.Parse(resource); err == nil { - service = parsed.Service - } - } - - existingService, ok := resultsByServiceAndARN[service] - if !ok { - existingService = make(map[string]scan.Results) - } - - existingService[resource] = append(existingService[resource], result) - resultsByServiceAndARN[service] = existingService - } - // ensure we have entries for all scoped services, even if there are no results - for _, service := range scoped { - if _, ok := resultsByServiceAndARN[service]; !ok { - resultsByServiceAndARN[service] = nil - } - } - for service, arnResults := range resultsByServiceAndARN { - - var convertedArnResults []types.Result - - for arn, serviceResults := range arnResults { - - arnResult := types.Result{ - Target: arn, - Class: types.ClassConfig, - Type: ftypes.Cloud, - } - - for _, result := range serviceResults { - - var primaryURL string - - // empty namespace implies a go rule from defsec, "builtin" refers to a built-in rego rule - // this ensures we don't generate bad links for custom policies - if result.RegoNamespace() == "" || rego.IsBuiltinNamespace(result.RegoNamespace()) { - primaryURL = fmt.Sprintf("https://avd.aquasec.com/misconfig/%s", strings.ToLower(result.Rule().AVDID)) - } - - status := types.MisconfStatusFailure - switch result.Status() { - case scan.StatusPassed: - status = types.MisconfStatusPassed - case scan.StatusIgnored: - status = types.MisconfStatusException - } - - flat := result.Flatten() - - arnResult.Misconfigurations = append(arnResult.Misconfigurations, types.DetectedMisconfiguration{ - Type: provider, - ID: result.Rule().AVDID, - AVDID: result.Rule().AVDID, - Title: result.Rule().Summary, - Description: strings.TrimSpace(result.Rule().Explanation), - Message: strings.TrimSpace(result.Description()), - Namespace: result.RegoNamespace(), - Query: result.RegoRule(), - Resolution: result.Rule().Resolution, - Severity: string(result.Severity()), - PrimaryURL: primaryURL, - References: []string{primaryURL}, - Status: status, - CauseMetadata: ftypes.CauseMetadata{ - Resource: flat.Resource, - Provider: string(flat.RuleProvider), - Service: service, - StartLine: flat.Location.StartLine, - EndLine: flat.Location.EndLine, - }, - }) - } - - convertedArnResults = append(convertedArnResults, arnResult) - } - convertedResults[service] = ResultsAtTime{ - Results: convertedArnResults, - CreationTime: time.Now(), - } - } - return convertedResults -} diff --git a/pkg/cloud/report/convert_test.go b/pkg/cloud/report/convert_test.go deleted file mode 100644 index b8a0b728c53f..000000000000 --- a/pkg/cloud/report/convert_test.go +++ /dev/null @@ -1,242 +0,0 @@ -package report - -import ( - "sort" - "testing" - - "github.com/aws/aws-sdk-go-v2/aws/arn" - "github.com/stretchr/testify/assert" - - fanaltypes "github.com/aquasecurity/trivy/pkg/fanal/types" - "github.com/aquasecurity/trivy/pkg/iac/scan" - iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/aquasecurity/trivy/pkg/types" -) - -func Test_ResultConversion(t *testing.T) { - - tests := []struct { - name string - results scan.Results - provider string - scoped []string - expected map[string]ResultsAtTime - }{ - { - name: "no results", - results: scan.Results{}, - provider: "AWS", - expected: make(map[string]ResultsAtTime), - }, - { - name: "no results, multiple scoped services", - results: scan.Results{}, - provider: "AWS", - scoped: []string{"s3", "ec2"}, - expected: map[string]ResultsAtTime{ - "s3": {}, - "ec2": {}, - }, - }, - { - name: "multiple results", - results: func() scan.Results { - - baseRule := scan.Rule{ - AVDID: "AVD-AWS-9999", - Aliases: []string{"AWS999"}, - ShortCode: "no-bad-stuff", - Summary: "Do not use bad stuff", - Explanation: "Bad stuff is... bad", - Impact: "Bad things", - Resolution: "Remove bad stuff", - Provider: "AWS", - Severity: "HIGH", - } - - var s3Results scan.Results - s3Results.Add( - "something failed", - iacTypes.NewRemoteMetadata((arn.ARN{ - Partition: "aws", - Service: "s3", - Region: "us-east-1", - AccountID: "1234567890", - Resource: "bucket1", - }).String()), - ) - s3Results.Add( - "something else failed", - iacTypes.NewRemoteMetadata((arn.ARN{ - Partition: "aws", - Service: "s3", - Region: "us-east-1", - AccountID: "1234567890", - Resource: "bucket2", - }).String()), - ) - s3Results.Add( - "something else failed again", - iacTypes.NewRemoteMetadata((arn.ARN{ - Partition: "aws", - Service: "s3", - Region: "us-east-1", - AccountID: "1234567890", - Resource: "bucket2", - }).String()), - ) - baseRule.Service = "s3" - s3Results.SetRule(baseRule) - var ec2Results scan.Results - ec2Results.Add( - "instance is bad", - iacTypes.NewRemoteMetadata((arn.ARN{ - Partition: "aws", - Service: "ec2", - Region: "us-east-1", - AccountID: "1234567890", - Resource: "instance1", - }).String()), - ) - baseRule.Service = "ec2" - ec2Results.SetRule(baseRule) - return append(s3Results, ec2Results...) - }(), - provider: "AWS", - expected: map[string]ResultsAtTime{ - "s3": { - Results: types.Results{ - { - Target: "arn:aws:s3:us-east-1:1234567890:bucket1", - Class: "config", - Type: "cloud", - Misconfigurations: []types.DetectedMisconfiguration{ - { - Type: "AWS", - ID: "AVD-AWS-9999", - AVDID: "AVD-AWS-9999", - Title: "Do not use bad stuff", - Description: "Bad stuff is... bad", - Message: "something failed", - Resolution: "Remove bad stuff", - Severity: "HIGH", - PrimaryURL: "https://avd.aquasec.com/misconfig/avd-aws-9999", - References: []string{ - "https://avd.aquasec.com/misconfig/avd-aws-9999", - }, - Status: "FAIL", - CauseMetadata: fanaltypes.CauseMetadata{ - Resource: "arn:aws:s3:us-east-1:1234567890:bucket1", - Provider: "AWS", - Service: "s3", - StartLine: 0, - EndLine: 0, - Code: fanaltypes.Code{}, - }, - }, - }, - }, - { - Target: "arn:aws:s3:us-east-1:1234567890:bucket2", - Class: "config", - Type: "cloud", - Misconfigurations: []types.DetectedMisconfiguration{ - { - Type: "AWS", - ID: "AVD-AWS-9999", - AVDID: "AVD-AWS-9999", - Title: "Do not use bad stuff", - Description: "Bad stuff is... bad", - Message: "something else failed", - Resolution: "Remove bad stuff", - Severity: "HIGH", - PrimaryURL: "https://avd.aquasec.com/misconfig/avd-aws-9999", - References: []string{ - "https://avd.aquasec.com/misconfig/avd-aws-9999", - }, - Status: "FAIL", - CauseMetadata: fanaltypes.CauseMetadata{ - Resource: "arn:aws:s3:us-east-1:1234567890:bucket2", - Provider: "AWS", - Service: "s3", - }, - }, - { - Type: "AWS", - ID: "AVD-AWS-9999", - AVDID: "AVD-AWS-9999", - Title: "Do not use bad stuff", - Description: "Bad stuff is... bad", - Message: "something else failed again", - Resolution: "Remove bad stuff", - Severity: "HIGH", - PrimaryURL: "https://avd.aquasec.com/misconfig/avd-aws-9999", - References: []string{ - "https://avd.aquasec.com/misconfig/avd-aws-9999", - }, - Status: "FAIL", - CauseMetadata: fanaltypes.CauseMetadata{ - Resource: "arn:aws:s3:us-east-1:1234567890:bucket2", - Provider: "AWS", - Service: "s3", - }, - }, - }, - }, - }, - }, - "ec2": { - Results: types.Results{ - { - Target: "arn:aws:ec2:us-east-1:1234567890:instance1", - Class: "config", - Type: "cloud", - Misconfigurations: []types.DetectedMisconfiguration{ - { - Type: "AWS", - ID: "AVD-AWS-9999", - AVDID: "AVD-AWS-9999", - Title: "Do not use bad stuff", - Description: "Bad stuff is... bad", - Message: "instance is bad", - Resolution: "Remove bad stuff", - Severity: "HIGH", - PrimaryURL: "https://avd.aquasec.com/misconfig/avd-aws-9999", - References: []string{ - "https://avd.aquasec.com/misconfig/avd-aws-9999", - }, - Status: "FAIL", - CauseMetadata: fanaltypes.CauseMetadata{ - Resource: "arn:aws:ec2:us-east-1:1234567890:instance1", - Provider: "AWS", - Service: "ec2", - }, - }, - }, - }, - }, - }, - }, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - converted := ConvertResults(test.results, test.provider, test.scoped) - assertConvertedResultsMatch(t, test.expected, converted) - }) - } - -} - -func assertConvertedResultsMatch(t *testing.T, expected, actual map[string]ResultsAtTime) { - assert.Equal(t, len(expected), len(actual)) - for service, resultsAtTime := range expected { - _, ok := actual[service] - assert.True(t, ok) - sort.Slice(actual[service].Results, func(i, j int) bool { - return actual[service].Results[i].Target < actual[service].Results[j].Target - }) - assert.ElementsMatch(t, resultsAtTime.Results, actual[service].Results) - } -} diff --git a/pkg/cloud/report/report.go b/pkg/cloud/report/report.go deleted file mode 100644 index 55d992fc505e..000000000000 --- a/pkg/cloud/report/report.go +++ /dev/null @@ -1,160 +0,0 @@ -package report - -import ( - "context" - "io" - "os" - "sort" - "time" - - "golang.org/x/xerrors" - - "github.com/aquasecurity/tml" - "github.com/aquasecurity/trivy/pkg/clock" - cr "github.com/aquasecurity/trivy/pkg/compliance/report" - "github.com/aquasecurity/trivy/pkg/fanal/artifact" - "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/iac/scan" - pkgReport "github.com/aquasecurity/trivy/pkg/report" - "github.com/aquasecurity/trivy/pkg/result" - "github.com/aquasecurity/trivy/pkg/types" -) - -const ( - tableFormat = "table" -) - -// Report represents an AWS scan report -type Report struct { - Provider string - AccountID string - Region string - Results map[string]ResultsAtTime - ServicesInScope []string -} - -type ResultsAtTime struct { - Results types.Results - CreationTime time.Time -} - -func New(provider, accountID, region string, defsecResults scan.Results, scopedServices []string) *Report { - return &Report{ - Provider: provider, - AccountID: accountID, - Results: ConvertResults(defsecResults, provider, scopedServices), - ServicesInScope: scopedServices, - Region: region, - } -} - -// Failed returns whether the aws report includes any "failed" results -func (r *Report) Failed() bool { - for _, set := range r.Results { - if set.Results.Failed() { - return true - } - } - return false -} - -// Write writes the results in the give format -func Write(ctx context.Context, rep *Report, opt flag.Options, fromCache bool) error { - output, cleanup, err := opt.OutputWriter(ctx) - if err != nil { - return xerrors.Errorf("failed to create output file: %w", err) - } - defer cleanup() - - if opt.Compliance.Spec.ID != "" { - return writeCompliance(ctx, rep, opt, output) - } - - ignoreConf, err := result.ParseIgnoreFile(ctx, opt.IgnoreFile) - if err != nil { - return xerrors.Errorf("%s error: %w", opt.IgnoreFile, err) - } - - var filtered []types.Result - - // filter results - for _, resultsAtTime := range rep.Results { - for _, res := range resultsAtTime.Results { - resCopy := res - if err := result.FilterResult(ctx, &resCopy, ignoreConf, opt.FilterOpts()); err != nil { - return err - } - sort.Slice(resCopy.Misconfigurations, func(i, j int) bool { - return resCopy.Misconfigurations[i].CauseMetadata.Resource < resCopy.Misconfigurations[j].CauseMetadata.Resource - }) - filtered = append(filtered, resCopy) - } - } - sort.Slice(filtered, func(i, j int) bool { - return filtered[i].Target < filtered[j].Target - }) - - base := types.Report{ - CreatedAt: clock.Now(ctx), - ArtifactName: rep.AccountID, - ArtifactType: artifact.TypeAWSAccount, - Results: filtered, - } - - switch opt.Format { - case tableFormat: - - // ensure color/formatting is disabled for pipes/non-pty - var useANSI bool - if output == os.Stdout { - if o, err := os.Stdout.Stat(); err == nil { - useANSI = (o.Mode() & os.ModeCharDevice) == os.ModeCharDevice - } - } - if !useANSI { - tml.DisableFormatting() - } - - switch { - case len(opt.Services) == 1 && opt.ARN == "": - if err := writeResourceTable(rep, filtered, output, opt.Services[0]); err != nil { - return err - } - case len(opt.Services) == 1 && opt.ARN != "": - if err := writeResultsForARN(rep, filtered, output, opt.Services[0], opt.ARN, opt.Severities); err != nil { - return err - } - default: - if err := writeServiceTable(rep, filtered, output); err != nil { - return err - } - } - - // render cache info - if fromCache { - _ = tml.Fprintf(output, "\nThis scan report was loaded from cached results. If you'd like to run a fresh scan, use --update-cache.\n") - } - - return nil - default: - return pkgReport.Write(ctx, base, opt) - } -} - -func writeCompliance(ctx context.Context, rep *Report, opt flag.Options, output io.Writer) error { - var crr []types.Results - for _, r := range rep.Results { - crr = append(crr, r.Results) - } - - complianceReport, err := cr.BuildComplianceReport(crr, opt.Compliance) - if err != nil { - return xerrors.Errorf("compliance report build error: %w", err) - } - - return cr.Write(ctx, complianceReport, cr.Option{ - Format: opt.Format, - Report: opt.ReportFormat, - Output: output, - }) -} diff --git a/pkg/cloud/report/resource.go b/pkg/cloud/report/resource.go deleted file mode 100644 index 79b1b8cc2e94..000000000000 --- a/pkg/cloud/report/resource.go +++ /dev/null @@ -1,88 +0,0 @@ -package report - -import ( - "fmt" - "io" - "sort" - "strconv" - - "golang.org/x/term" - - "github.com/aquasecurity/table" - "github.com/aquasecurity/tml" - pkgReport "github.com/aquasecurity/trivy/pkg/report/table" - "github.com/aquasecurity/trivy/pkg/types" -) - -type sortableRow struct { - name string - counts map[string]int -} - -func writeResourceTable(report *Report, results types.Results, output io.Writer, service string) error { - - termWidth, _, err := term.GetSize(0) - if err != nil { - termWidth = 80 - } - maxWidth := termWidth - 48 - if maxWidth < 20 { - maxWidth = 20 - } - - t := table.New(output) - t.SetColumnMaxWidth(maxWidth) - t.SetHeaders("Resource", "Misconfigurations") - t.AddHeaders("Resource", "Critical", "High", "Medium", "Low", "Unknown") - t.SetHeaderVerticalAlignment(table.AlignBottom) - t.SetHeaderAlignment(table.AlignLeft, table.AlignCenter, table.AlignCenter, table.AlignCenter, table.AlignCenter, table.AlignCenter) - t.SetAlignment(table.AlignLeft, table.AlignRight, table.AlignRight, table.AlignRight, table.AlignRight, table.AlignRight) - t.SetRowLines(false) - t.SetAutoMergeHeaders(true) - t.SetHeaderColSpans(0, 1, 5) - - // map resource -> severity -> count - grouped := make(map[string]map[string]int) - for _, result := range results { - for _, misconfiguration := range result.Misconfigurations { - if misconfiguration.CauseMetadata.Service != service { - continue - } - if _, ok := grouped[misconfiguration.CauseMetadata.Resource]; !ok { - grouped[misconfiguration.CauseMetadata.Resource] = make(map[string]int) - } - grouped[misconfiguration.CauseMetadata.Resource][misconfiguration.Severity]++ - } - } - - var sortable []sortableRow - for resource, severityCounts := range grouped { - sortable = append(sortable, sortableRow{ - name: resource, - counts: severityCounts, - }) - } - sort.Slice(sortable, func(i, j int) bool { return sortable[i].name < sortable[j].name }) - for _, row := range sortable { - t.AddRow( - row.name, - pkgReport.ColorizeSeverity(strconv.Itoa(row.counts["CRITICAL"]), "CRITICAL"), - pkgReport.ColorizeSeverity(strconv.Itoa(row.counts["HIGH"]), "HIGH"), - pkgReport.ColorizeSeverity(strconv.Itoa(row.counts["MEDIUM"]), "MEDIUM"), - pkgReport.ColorizeSeverity(strconv.Itoa(row.counts["LOW"]), "LOW"), - pkgReport.ColorizeSeverity(strconv.Itoa(row.counts["UNKNOWN"]), "UNKNOWN"), - ) - } - - // render scan title - _ = tml.Fprintf(output, "\nResource Summary for Service '%s' (%s Account %s)\n", service, report.Provider, report.AccountID) - - // render table - if len(sortable) > 0 { - t.Render() - } else { - _, _ = fmt.Fprint(output, "\nNo problems detected.\n") - } - - return nil -} diff --git a/pkg/cloud/report/resource_test.go b/pkg/cloud/report/resource_test.go deleted file mode 100644 index 3f909b8d3b3f..000000000000 --- a/pkg/cloud/report/resource_test.go +++ /dev/null @@ -1,124 +0,0 @@ -package report - -import ( - "bytes" - "context" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/aquasecurity/trivy-db/pkg/types" - "github.com/aquasecurity/trivy/pkg/flag" -) - -func Test_ResourceReport(t *testing.T) { - tests := []struct { - name string - options flag.Options - fromCache bool - expected string - }{ - { - name: "simple table output", - options: flag.Options{ - ReportOptions: flag.ReportOptions{ - Format: tableFormat, - Severities: []types.Severity{ - types.SeverityLow, - types.SeverityMedium, - types.SeverityHigh, - types.SeverityCritical, - }, - }, - AWSOptions: flag.AWSOptions{ - Services: []string{"s3"}, - }, - }, - fromCache: false, - expected: ` -Resource Summary for Service 's3' (AWS Account ) -┌─────────────────────────────────────────┬──────────────────────────────────────────┐ -│ │ Misconfigurations │ -│ ├──────────┬──────┬────────┬─────┬─────────┤ -│ Resource │ Critical │ High │ Medium │ Low │ Unknown │ -├─────────────────────────────────────────┼──────────┼──────┼────────┼─────┼─────────┤ -│ arn:aws:s3:us-east-1:1234567890:bucket1 │ 0 │ 1 │ 0 │ 0 │ 0 │ -│ arn:aws:s3:us-east-1:1234567890:bucket2 │ 0 │ 2 │ 0 │ 0 │ 0 │ -└─────────────────────────────────────────┴──────────┴──────┴────────┴─────┴─────────┘ -`, - }, - { - name: "results from cache", - options: flag.Options{ - ReportOptions: flag.ReportOptions{ - Format: tableFormat, - Severities: []types.Severity{ - types.SeverityLow, - types.SeverityMedium, - types.SeverityHigh, - types.SeverityCritical, - }, - }, - AWSOptions: flag.AWSOptions{ - Services: []string{"s3"}, - }, - }, - fromCache: true, - expected: ` -Resource Summary for Service 's3' (AWS Account ) -┌─────────────────────────────────────────┬──────────────────────────────────────────┐ -│ │ Misconfigurations │ -│ ├──────────┬──────┬────────┬─────┬─────────┤ -│ Resource │ Critical │ High │ Medium │ Low │ Unknown │ -├─────────────────────────────────────────┼──────────┼──────┼────────┼─────┼─────────┤ -│ arn:aws:s3:us-east-1:1234567890:bucket1 │ 0 │ 1 │ 0 │ 0 │ 0 │ -│ arn:aws:s3:us-east-1:1234567890:bucket2 │ 0 │ 2 │ 0 │ 0 │ 0 │ -└─────────────────────────────────────────┴──────────┴──────┴────────┴─────┴─────────┘ - -This scan report was loaded from cached results. If you'd like to run a fresh scan, use --update-cache. -`, - }, - { - name: "no problems", - options: flag.Options{ - ReportOptions: flag.ReportOptions{ - Format: tableFormat, - Severities: []types.Severity{ - types.SeverityLow, - }, - }, - AWSOptions: flag.AWSOptions{ - Services: []string{"s3"}, - }, - }, - fromCache: false, - expected: ` -Resource Summary for Service 's3' (AWS Account ) - -No problems detected. -`, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - report := New( - "AWS", - tt.options.AWSOptions.Account, - tt.options.AWSOptions.Region, - createTestResults(), - tt.options.AWSOptions.Services, - ) - - output := bytes.NewBuffer(nil) - tt.options.SetOutputWriter(output) - require.NoError(t, Write(context.Background(), report, tt.options, tt.fromCache)) - - assert.Equal(t, "AWS", report.Provider) - assert.Equal(t, tt.options.AWSOptions.Account, report.AccountID) - assert.Equal(t, tt.options.AWSOptions.Region, report.Region) - assert.ElementsMatch(t, tt.options.AWSOptions.Services, report.ServicesInScope) - assert.Equal(t, tt.expected, output.String()) - }) - } -} diff --git a/pkg/cloud/report/result.go b/pkg/cloud/report/result.go deleted file mode 100644 index 103be8a40afc..000000000000 --- a/pkg/cloud/report/result.go +++ /dev/null @@ -1,35 +0,0 @@ -package report - -import ( - "fmt" - "io" - - "github.com/aquasecurity/tml" - dbTypes "github.com/aquasecurity/trivy-db/pkg/types" - renderer "github.com/aquasecurity/trivy/pkg/report/table" - "github.com/aquasecurity/trivy/pkg/types" -) - -func writeResultsForARN(report *Report, results types.Results, output io.Writer, service, arn string, severities []dbTypes.Severity) error { - - // render scan title - _ = tml.Fprintf(output, "\nResults for '%s' (%s Account %s)\n\n", arn, report.Provider, report.AccountID) - - for _, result := range results { - var filtered []types.DetectedMisconfiguration - for _, misconfiguration := range result.Misconfigurations { - if arn != "" && misconfiguration.CauseMetadata.Resource != arn { - continue - } - if service != "" && misconfiguration.CauseMetadata.Service != service { - continue - } - filtered = append(filtered, misconfiguration) - } - if len(filtered) > 0 { - _, _ = fmt.Fprint(output, renderer.NewMisconfigRenderer(result, severities, false, false, true).Render()) - } - } - - return nil -} diff --git a/pkg/cloud/report/result_test.go b/pkg/cloud/report/result_test.go deleted file mode 100644 index 6afc67305c4f..000000000000 --- a/pkg/cloud/report/result_test.go +++ /dev/null @@ -1,83 +0,0 @@ -package report - -import ( - "bytes" - "context" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/aquasecurity/trivy-db/pkg/types" - "github.com/aquasecurity/trivy/pkg/flag" -) - -func Test_ARNReport(t *testing.T) { - tests := []struct { - name string - options flag.Options - fromCache bool - expected string - }{ - { - name: "simple output", - options: flag.Options{ - ReportOptions: flag.ReportOptions{ - Format: tableFormat, - Severities: []types.Severity{ - types.SeverityLow, - types.SeverityMedium, - types.SeverityHigh, - types.SeverityCritical, - }, - }, - AWSOptions: flag.AWSOptions{ - Services: []string{"s3"}, - ARN: "arn:aws:s3:us-east-1:1234567890:bucket1", - Account: "1234567890", - }, - }, - fromCache: false, - expected: ` -Results for 'arn:aws:s3:us-east-1:1234567890:bucket1' (AWS Account 1234567890) - - -arn:aws:s3:us-east-1:1234567890:bucket1 (cloud) - -Tests: 1 (SUCCESSES: 0, FAILURES: 1, EXCEPTIONS: 0) -Failures: 1 (LOW: 0, MEDIUM: 0, HIGH: 1, CRITICAL: 0) - -HIGH: something failed -════════════════════════════════════════ -Bad stuff is... bad - -See https://avd.aquasec.com/misconfig/avd-aws-9999 -──────────────────────────────────────── - - -`, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - report := New( - "AWS", - tt.options.AWSOptions.Account, - tt.options.AWSOptions.Region, - createTestResults(), - tt.options.AWSOptions.Services, - ) - - output := bytes.NewBuffer(nil) - tt.options.SetOutputWriter(output) - require.NoError(t, Write(context.Background(), report, tt.options, tt.fromCache)) - - assert.Equal(t, "AWS", report.Provider) - assert.Equal(t, tt.options.AWSOptions.Account, report.AccountID) - assert.Equal(t, tt.options.AWSOptions.Region, report.Region) - assert.ElementsMatch(t, tt.options.AWSOptions.Services, report.ServicesInScope) - assert.Equal(t, tt.expected, strings.ReplaceAll(output.String(), "\r\n", "\n")) - }) - } -} diff --git a/pkg/cloud/report/service.go b/pkg/cloud/report/service.go deleted file mode 100644 index e25d8ea393f9..000000000000 --- a/pkg/cloud/report/service.go +++ /dev/null @@ -1,85 +0,0 @@ -package report - -import ( - "fmt" - "io" - "sort" - "strconv" - "time" - - "github.com/aquasecurity/table" - "github.com/aquasecurity/tml" - pkgReport "github.com/aquasecurity/trivy/pkg/report/table" - "github.com/aquasecurity/trivy/pkg/types" -) - -func writeServiceTable(report *Report, results types.Results, output io.Writer) error { - - t := table.New(output) - - t.SetHeaders("Service", "Misconfigurations", "Last Scanned") - t.AddHeaders("Service", "Critical", "High", "Medium", "Low", "Unknown", "Last Scanned") - t.SetRowLines(false) - t.SetHeaderVerticalAlignment(table.AlignBottom) - t.SetHeaderAlignment(table.AlignLeft, table.AlignCenter, table.AlignCenter, table.AlignCenter, table.AlignCenter, table.AlignCenter, table.AlignLeft) - t.SetAlignment(table.AlignLeft, table.AlignRight, table.AlignRight, table.AlignRight, table.AlignRight, table.AlignRight, table.AlignLeft) - t.SetAutoMergeHeaders(true) - t.SetHeaderColSpans(0, 1, 5, 1) - - // map service -> severity -> count - grouped := make(map[string]map[string]int) - // set zero counts for all services - for _, service := range report.ServicesInScope { - grouped[service] = make(map[string]int) - } - for _, result := range results { - for _, misconfiguration := range result.Misconfigurations { - service := misconfiguration.CauseMetadata.Service - if _, ok := grouped[service]; !ok { - grouped[service] = make(map[string]int) - } - grouped[service][misconfiguration.Severity]++ - } - } - - var sortable []sortableRow - for service, severityCounts := range grouped { - sortable = append(sortable, sortableRow{ - name: service, - counts: severityCounts, - }) - } - sort.Slice(sortable, func(i, j int) bool { return sortable[i].name < sortable[j].name }) - for _, row := range sortable { - var lastScanned string - scanAgo := time.Since(report.Results[row.name].CreationTime).Truncate(time.Minute) - switch { - case scanAgo.Hours() >= 48: - lastScanned = fmt.Sprintf("%d days ago", int(scanAgo.Hours()/24)) - case scanAgo.Hours() > 1: - lastScanned = fmt.Sprintf("%d hours ago", int(scanAgo.Hours())) - case scanAgo.Minutes() > 1: - lastScanned = fmt.Sprintf("%d minutes ago", int(scanAgo.Minutes())) - default: - lastScanned = "just now" - } - - t.AddRow( - row.name, - pkgReport.ColorizeSeverity(strconv.Itoa(row.counts["CRITICAL"]), "CRITICAL"), - pkgReport.ColorizeSeverity(strconv.Itoa(row.counts["HIGH"]), "HIGH"), - pkgReport.ColorizeSeverity(strconv.Itoa(row.counts["MEDIUM"]), "MEDIUM"), - pkgReport.ColorizeSeverity(strconv.Itoa(row.counts["LOW"]), "LOW"), - pkgReport.ColorizeSeverity(strconv.Itoa(row.counts["UNKNOWN"]), "UNKNOWN"), - lastScanned, - ) - } - - // render scan title - _ = tml.Fprintf(output, "\nScan Overview for %s Account %s\n", report.Provider, report.AccountID) - - // render table - t.Render() - - return nil -} diff --git a/pkg/cloud/report/service_test.go b/pkg/cloud/report/service_test.go deleted file mode 100644 index 8e35bb0194e2..000000000000 --- a/pkg/cloud/report/service_test.go +++ /dev/null @@ -1,420 +0,0 @@ -package report - -import ( - "bytes" - "context" - "testing" - "time" - - "github.com/aws/aws-sdk-go-v2/aws/arn" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/aquasecurity/trivy-db/pkg/types" - "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/iac/scan" - iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" -) - -func Test_ServiceReport(t *testing.T) { - tests := []struct { - name string - options flag.Options - fromCache bool - expected string - }{ - { - name: "simple table output", - options: flag.Options{ - ReportOptions: flag.ReportOptions{ - Format: tableFormat, - Severities: []types.Severity{ - types.SeverityLow, - types.SeverityMedium, - types.SeverityHigh, - types.SeverityCritical, - }, - }, - }, - fromCache: false, - expected: ` -Scan Overview for AWS Account -┌─────────┬──────────────────────────────────────────────────┬──────────────┐ -│ │ Misconfigurations │ │ -│ ├──────────┬──────────────┬────────┬─────┬─────────┤ │ -│ Service │ Critical │ High │ Medium │ Low │ Unknown │ Last Scanned │ -├─────────┼──────────┼──────────────┼────────┼─────┼─────────┼──────────────┤ -│ ec2 │ 0 │ 1 │ 0 │ 0 │ 0 │ just now │ -│ s3 │ 0 │ 3 │ 0 │ 0 │ 0 │ just now │ -└─────────┴──────────┴──────────────┴────────┴─────┴─────────┴──────────────┘ -`, - }, - { - name: "results from cache", - options: flag.Options{ - ReportOptions: flag.ReportOptions{ - Format: tableFormat, - Severities: []types.Severity{ - types.SeverityLow, - types.SeverityMedium, - types.SeverityHigh, - types.SeverityCritical, - }, - }, - }, - fromCache: true, - expected: ` -Scan Overview for AWS Account -┌─────────┬──────────────────────────────────────────────────┬──────────────┐ -│ │ Misconfigurations │ │ -│ ├──────────┬──────────────┬────────┬─────┬─────────┤ │ -│ Service │ Critical │ High │ Medium │ Low │ Unknown │ Last Scanned │ -├─────────┼──────────┼──────────────┼────────┼─────┼─────────┼──────────────┤ -│ ec2 │ 0 │ 1 │ 0 │ 0 │ 0 │ just now │ -│ s3 │ 0 │ 3 │ 0 │ 0 │ 0 │ just now │ -└─────────┴──────────┴──────────────┴────────┴─────┴─────────┴──────────────┘ - -This scan report was loaded from cached results. If you'd like to run a fresh scan, use --update-cache. -`, - }, - { - name: "filter severities", - options: flag.Options{ - ReportOptions: flag.ReportOptions{ - Format: tableFormat, - Severities: []types.Severity{ - types.SeverityMedium, - }, - }, - AWSOptions: flag.AWSOptions{ - Services: []string{ - "s3", - "ec2", - }, - }, - }, - fromCache: false, - expected: ` -Scan Overview for AWS Account -┌─────────┬──────────────────────────────────────────────────┬──────────────┐ -│ │ Misconfigurations │ │ -│ ├──────────┬──────────────┬────────┬─────┬─────────┤ │ -│ Service │ Critical │ High │ Medium │ Low │ Unknown │ Last Scanned │ -├─────────┼──────────┼──────────────┼────────┼─────┼─────────┼──────────────┤ -│ ec2 │ 0 │ 0 │ 0 │ 0 │ 0 │ just now │ -│ s3 │ 0 │ 0 │ 0 │ 0 │ 0 │ just now │ -└─────────┴──────────┴──────────────┴────────┴─────┴─────────┴──────────────┘ -`, - }, - { - name: "scoped services without results", - options: flag.Options{ - ReportOptions: flag.ReportOptions{ - Format: tableFormat, - Severities: []types.Severity{ - types.SeverityLow, - types.SeverityMedium, - types.SeverityHigh, - types.SeverityCritical, - }, - }, - AWSOptions: flag.AWSOptions{ - Services: []string{ - "ec2", - "s3", - "iam", - }, - }, - }, - fromCache: false, - expected: ` -Scan Overview for AWS Account -┌─────────┬──────────────────────────────────────────────────┬──────────────┐ -│ │ Misconfigurations │ │ -│ ├──────────┬──────────────┬────────┬─────┬─────────┤ │ -│ Service │ Critical │ High │ Medium │ Low │ Unknown │ Last Scanned │ -├─────────┼──────────┼──────────────┼────────┼─────┼─────────┼──────────────┤ -│ ec2 │ 0 │ 1 │ 0 │ 0 │ 0 │ just now │ -│ iam │ 0 │ 0 │ 0 │ 0 │ 0 │ just now │ -│ s3 │ 0 │ 3 │ 0 │ 0 │ 0 │ just now │ -└─────────┴──────────┴──────────────┴────────┴─────┴─────────┴──────────────┘ -`, - }, - { - name: "json output", - options: flag.Options{ - ReportOptions: flag.ReportOptions{ - Format: "json", - Severities: []types.Severity{ - types.SeverityLow, - types.SeverityMedium, - types.SeverityHigh, - types.SeverityCritical, - }, - }, - }, - fromCache: false, - expected: `{ - "CreatedAt": "2021-08-25T12:20:30.000000005Z", - "ArtifactType": "aws_account", - "Metadata": { - "ImageConfig": { - "architecture": "", - "created": "0001-01-01T00:00:00Z", - "os": "", - "rootfs": { - "type": "", - "diff_ids": null - }, - "config": {} - } - }, - "Results": [ - { - "Target": "arn:aws:ec2:us-east-1:1234567890:instance1", - "Class": "config", - "Type": "cloud", - "MisconfSummary": { - "Successes": 0, - "Failures": 1, - "Exceptions": 0 - }, - "Misconfigurations": [ - { - "Type": "AWS", - "ID": "AVD-AWS-9999", - "AVDID": "AVD-AWS-9999", - "Title": "Do not use bad stuff", - "Description": "Bad stuff is... bad", - "Message": "instance is bad", - "Resolution": "Remove bad stuff", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-9999", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-9999" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:ec2:us-east-1:1234567890:instance1", - "Provider": "AWS", - "Service": "ec2", - "Code": { - "Lines": null - } - } - } - ] - }, - { - "Target": "arn:aws:s3:us-east-1:1234567890:bucket1", - "Class": "config", - "Type": "cloud", - "MisconfSummary": { - "Successes": 0, - "Failures": 1, - "Exceptions": 0 - }, - "Misconfigurations": [ - { - "Type": "AWS", - "ID": "AVD-AWS-9999", - "AVDID": "AVD-AWS-9999", - "Title": "Do not use bad stuff", - "Description": "Bad stuff is... bad", - "Message": "something failed", - "Resolution": "Remove bad stuff", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-9999", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-9999" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:us-east-1:1234567890:bucket1", - "Provider": "AWS", - "Service": "s3", - "Code": { - "Lines": null - } - } - } - ] - }, - { - "Target": "arn:aws:s3:us-east-1:1234567890:bucket2", - "Class": "config", - "Type": "cloud", - "MisconfSummary": { - "Successes": 0, - "Failures": 2, - "Exceptions": 0 - }, - "Misconfigurations": [ - { - "Type": "AWS", - "ID": "AVD-AWS-9999", - "AVDID": "AVD-AWS-9999", - "Title": "Do not use bad stuff", - "Description": "Bad stuff is... bad", - "Message": "something else failed", - "Resolution": "Remove bad stuff", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-9999", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-9999" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:us-east-1:1234567890:bucket2", - "Provider": "AWS", - "Service": "s3", - "Code": { - "Lines": null - } - } - }, - { - "Type": "AWS", - "ID": "AVD-AWS-9999", - "AVDID": "AVD-AWS-9999", - "Title": "Do not use bad stuff", - "Description": "Bad stuff is... bad", - "Message": "something else failed again", - "Resolution": "Remove bad stuff", - "Severity": "HIGH", - "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-9999", - "References": [ - "https://avd.aquasec.com/misconfig/avd-aws-9999" - ], - "Status": "FAIL", - "Layer": {}, - "CauseMetadata": { - "Resource": "arn:aws:s3:us-east-1:1234567890:bucket2", - "Provider": "AWS", - "Service": "s3", - "Code": { - "Lines": null - } - } - } - ] - }, - { - "Target": "arn:aws:s3:us-east-1:1234567890:bucket3", - "Class": "config", - "Type": "cloud", - "MisconfSummary": { - "Successes": 1, - "Failures": 0, - "Exceptions": 0 - } - } - ] -}`, - }, - } - ctx := clock.With(context.Background(), time.Date(2021, 8, 25, 12, 20, 30, 5, time.UTC)) - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - report := New( - "AWS", - tt.options.AWSOptions.Account, - tt.options.AWSOptions.Region, - createTestResults(), - tt.options.AWSOptions.Services, - ) - - output := bytes.NewBuffer(nil) - tt.options.SetOutputWriter(output) - require.NoError(t, Write(ctx, report, tt.options, tt.fromCache)) - - assert.Equal(t, "AWS", report.Provider) - assert.Equal(t, tt.options.AWSOptions.Account, report.AccountID) - assert.Equal(t, tt.options.AWSOptions.Region, report.Region) - assert.ElementsMatch(t, tt.options.AWSOptions.Services, report.ServicesInScope) - - if tt.options.Format == "json" { - // json output can be formatted/ordered differently - we just care that the data matches - assert.JSONEq(t, tt.expected, output.String()) - } else { - assert.Equal(t, tt.expected, output.String()) - } - }) - } -} - -func createTestResults() scan.Results { - - baseRule := scan.Rule{ - AVDID: "AVD-AWS-9999", - Aliases: []string{"AWS999"}, - ShortCode: "no-bad-stuff", - Summary: "Do not use bad stuff", - Explanation: "Bad stuff is... bad", - Impact: "Bad things", - Resolution: "Remove bad stuff", - Provider: "AWS", - Severity: "HIGH", - } - - var s3Results scan.Results - s3Results.Add( - "something failed", - iacTypes.NewRemoteMetadata((arn.ARN{ - Partition: "aws", - Service: "s3", - Region: "us-east-1", - AccountID: "1234567890", - Resource: "bucket1", - }).String()), - ) - s3Results.Add( - "something else failed", - iacTypes.NewRemoteMetadata((arn.ARN{ - Partition: "aws", - Service: "s3", - Region: "us-east-1", - AccountID: "1234567890", - Resource: "bucket2", - }).String()), - ) - s3Results.Add( - "something else failed again", - iacTypes.NewRemoteMetadata((arn.ARN{ - Partition: "aws", - Service: "s3", - Region: "us-east-1", - AccountID: "1234567890", - Resource: "bucket2", - }).String()), - ) - s3Results.AddPassed( - iacTypes.NewRemoteMetadata((arn.ARN{ - Partition: "aws", - Service: "s3", - Region: "us-east-1", - AccountID: "1234567890", - Resource: "bucket3", - }).String()), - ) - baseRule.Service = "s3" - s3Results.SetRule(baseRule) - var ec2Results scan.Results - ec2Results.Add( - "instance is bad", - iacTypes.NewRemoteMetadata((arn.ARN{ - Partition: "aws", - Service: "ec2", - Region: "us-east-1", - AccountID: "1234567890", - Resource: "instance1", - }).String()), - ) - baseRule.Service = "ec2" - ec2Results.SetRule(baseRule) - return append(s3Results, ec2Results...) -} diff --git a/pkg/commands/app.go b/pkg/commands/app.go index 7746e1b70784..3f88b9e9b37e 100644 --- a/pkg/commands/app.go +++ b/pkg/commands/app.go @@ -7,17 +7,15 @@ import ( "fmt" "io" "os" - "sort" - "strings" "time" "github.com/spf13/cobra" "github.com/spf13/viper" "golang.org/x/xerrors" - awsScanner "github.com/aquasecurity/trivy-aws/pkg/scanner" - awscommands "github.com/aquasecurity/trivy/pkg/cloud/aws/commands" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/commands/artifact" + "github.com/aquasecurity/trivy/pkg/commands/clean" "github.com/aquasecurity/trivy/pkg/commands/convert" "github.com/aquasecurity/trivy/pkg/commands/server" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" @@ -65,6 +63,7 @@ Use "{{.CommandPath}} [command] --help" for more information about a command.{{e // NewApp is the factory method to return Trivy CLI func NewApp() *cobra.Command { + cobra.EnableTraverseRunHooks = true // To execute persistent pre-run hooks from all parents. globalFlags := flag.NewGlobalFlagGroup() rootCmd := NewRootCommand(globalFlags) rootCmd.AddGroup( @@ -92,13 +91,13 @@ func NewApp() *cobra.Command { NewServerCommand(globalFlags), NewConfigCommand(globalFlags), NewConvertCommand(globalFlags), - NewPluginCommand(), + NewPluginCommand(globalFlags), NewModuleCommand(globalFlags), NewKubernetesCommand(globalFlags), NewSBOMCommand(globalFlags), NewVersionCommand(globalFlags), - NewAWSCommand(globalFlags), NewVMCommand(globalFlags), + NewCleanCommand(globalFlags), ) if plugins := loadPluginCommands(); len(plugins) > 0 { @@ -109,15 +108,23 @@ func NewApp() *cobra.Command { rootCmd.AddCommand(plugins...) } + // TODO(simar7): Only for backwards support guidance, delete the subcommand after a while. + if cmd, _, _ := rootCmd.Find([]string{"aws"}); cmd == cmd.Root() { // "trivy aws" not installed + rootCmd.AddCommand(&cobra.Command{ + Hidden: true, + Long: "Trivy AWS is now available as an optional plugin. See github.com/aquasecurity/trivy-aws for details.", + Use: "aws", + }) + } + return rootCmd } func loadPluginCommands() []*cobra.Command { ctx := context.Background() - manager := plugin.NewManager() var commands []*cobra.Command - plugins, err := manager.LoadAll(ctx) + plugins, err := plugin.NewManager().LoadAll(ctx) if err != nil { log.DebugContext(ctx, "No plugins loaded") return nil @@ -242,7 +249,7 @@ func NewImageCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { reportFlagGroup.ReportFormat = report compliance := flag.ComplianceFlag.Clone() - compliance.Values = []string{types.ComplianceDockerCIS} + compliance.Values = []string{types.ComplianceDockerCIS160} reportFlagGroup.Compliance = compliance // override usage as the accepted values differ for each subcommand. misconfFlagGroup := flag.NewMisconfFlagGroup() @@ -324,12 +331,6 @@ func NewImageCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { } func NewFilesystemCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { - reportFlagGroup := flag.NewReportFlagGroup() - reportFormat := flag.ReportFormatFlag.Clone() - reportFormat.Usage = "specify a compliance report format for the output" // @TODO: support --report summary for non compliance reports - reportFlagGroup.ReportFormat = reportFormat - reportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol' - fsFlags := &flag.Flags{ GlobalFlagGroup: globalFlags, CacheFlagGroup: flag.NewCacheFlagGroup(), @@ -340,12 +341,16 @@ func NewFilesystemCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { RemoteFlagGroup: flag.NewClientFlags(), // for client/server mode RegistryFlagGroup: flag.NewRegistryFlagGroup(), RegoFlagGroup: flag.NewRegoFlagGroup(), - ReportFlagGroup: reportFlagGroup, + ReportFlagGroup: flag.NewReportFlagGroup(), ScanFlagGroup: flag.NewScanFlagGroup(), SecretFlagGroup: flag.NewSecretFlagGroup(), VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(), } + fsFlags.CacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory) // Use memory cache by default + fsFlags.ReportFlagGroup.ReportFormat.Usage = "specify a compliance report format for the output" // @TODO: support --report summary for non compliance reports + fsFlags.ReportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol' + cmd := &cobra.Command{ Use: "filesystem [flags] PATH", Aliases: []string{"fs"}, @@ -399,10 +404,11 @@ func NewRootfsCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { SecretFlagGroup: flag.NewSecretFlagGroup(), VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(), } - rootfsFlags.ReportFlagGroup.ReportFormat = nil // TODO: support --report summary - rootfsFlags.ReportFlagGroup.Compliance = nil // disable '--compliance' - rootfsFlags.ReportFlagGroup.ReportFormat = nil // disable '--report' - rootfsFlags.ScanFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps' + rootfsFlags.ReportFlagGroup.ReportFormat = nil // TODO: support --report summary + rootfsFlags.ReportFlagGroup.Compliance = nil // disable '--compliance' + rootfsFlags.ReportFlagGroup.ReportFormat = nil // disable '--report' + rootfsFlags.ScanFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps' + rootfsFlags.CacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory) // Use memory cache by default cmd := &cobra.Command{ Use: "rootfs [flags] ROOTDIR", @@ -463,6 +469,8 @@ func NewRepositoryCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { repoFlags.ReportFlagGroup.Compliance = nil // disable '--compliance' repoFlags.ReportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol' + repoFlags.CacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory) // Use memory cache by default + cmd := &cobra.Command{ Use: "repository [flags] (REPO_PATH | REPO_URL)", Aliases: []string{"repo"}, @@ -645,15 +653,6 @@ func NewServerCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { } func NewConfigCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { - reportFlagGroup := flag.NewReportFlagGroup() - reportFlagGroup.DependencyTree = nil // disable '--dependency-tree' - reportFlagGroup.ListAllPkgs = nil // disable '--list-all-pkgs' - reportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol' - reportFlagGroup.ShowSuppressed = nil // disable '--show-suppressed' - reportFormat := flag.ReportFormatFlag.Clone() - reportFormat.Usage = "specify a compliance report format for the output" // @TODO: support --report summary for non compliance reports - reportFlagGroup.ReportFormat = reportFormat - scanFlags := &flag.ScanFlagGroup{ // Enable only '--skip-dirs' and '--skip-files' and disable other flags SkipDirs: flag.SkipDirsFlag.Clone(), @@ -672,10 +671,17 @@ func NewConfigCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { // disable unneeded flags K8sVersion: flag.K8sVersionFlag.Clone(), }, - ReportFlagGroup: reportFlagGroup, + ReportFlagGroup: flag.NewReportFlagGroup(), ScanFlagGroup: scanFlags, } + configFlags.ReportFlagGroup.DependencyTree = nil // disable '--dependency-tree' + configFlags.ReportFlagGroup.ListAllPkgs = nil // disable '--list-all-pkgs' + configFlags.ReportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol' + configFlags.ReportFlagGroup.ShowSuppressed = nil // disable '--show-suppressed' + configFlags.ReportFlagGroup.ReportFormat.Usage = "specify a compliance report format for the output" // @TODO: support --report summary for non compliance reports + configFlags.CacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory) + cmd := &cobra.Command{ Use: "config [flags] DIR", Aliases: []string{"conf"}, @@ -714,7 +720,11 @@ func NewConfigCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { return cmd } -func NewPluginCommand() *cobra.Command { +func NewPluginCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { + var pluginOptions flag.Options + pluginFlags := &flag.Flags{ + GlobalFlagGroup: globalFlags, + } cmd := &cobra.Command{ Use: "plugin subcommand", Aliases: []string{"p"}, @@ -722,6 +732,13 @@ func NewPluginCommand() *cobra.Command { Short: "Manage plugins", SilenceErrors: true, SilenceUsage: true, + PersistentPreRunE: func(cmd *cobra.Command, args []string) (err error) { + pluginOptions, err = pluginFlags.ToOptions(args) + if err != nil { + return err + } + return nil + }, } cmd.AddCommand( &cobra.Command{ @@ -741,7 +758,7 @@ func NewPluginCommand() *cobra.Command { DisableFlagsInUseLine: true, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - if _, err := plugin.Install(cmd.Context(), args[0], plugin.Options{}); err != nil { + if _, err := plugin.Install(cmd.Context(), args[0], plugin.Options{Insecure: pluginOptions.Insecure}); err != nil { return xerrors.Errorf("plugin install error: %w", err) } return nil @@ -800,7 +817,10 @@ func NewPluginCommand() *cobra.Command { Short: "Run a plugin on the fly", Args: cobra.MinimumNArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - return plugin.Run(cmd.Context(), args[0], plugin.Options{Args: args[1:]}) + return plugin.Run(cmd.Context(), args[0], plugin.Options{ + Args: args[1:], + Insecure: pluginOptions.Insecure, + }) }, }, &cobra.Command{ @@ -811,7 +831,7 @@ func NewPluginCommand() *cobra.Command { SilenceUsage: true, Args: cobra.NoArgs, RunE: func(cmd *cobra.Command, _ []string) error { - if err := plugin.Update(cmd.Context()); err != nil { + if err := plugin.Update(cmd.Context(), plugin.Options{Insecure: pluginOptions.Insecure}); err != nil { return xerrors.Errorf("plugin update error: %w", err) } return nil @@ -941,10 +961,12 @@ func NewKubernetesCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { reportFlagGroup := flag.NewReportFlagGroup() compliance := flag.ComplianceFlag.Clone() compliance.Values = []string{ - types.ComplianceK8sNsa, - types.ComplianceK8sCIS, - types.ComplianceK8sPSSBaseline, - types.ComplianceK8sPSSRestricted, + types.ComplianceK8sNsa10, + types.ComplianceK8sCIS123, + types.ComplianceEksCIS14, + types.ComplianceRke2CIS124, + types.ComplianceK8sPSSBaseline01, + types.ComplianceK8sPSSRestricted01, } reportFlagGroup.Compliance = compliance // override usage as the accepted values differ for each subcommand. reportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol' @@ -1019,80 +1041,6 @@ func NewKubernetesCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { return cmd } -func NewAWSCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { - reportFlagGroup := flag.NewReportFlagGroup() - compliance := flag.ComplianceFlag - compliance.Values = []string{ - types.ComplianceAWSCIS12, - types.ComplianceAWSCIS14, - } - reportFlagGroup.Compliance = &compliance // override usage as the accepted values differ for each subcommand. - reportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol' - reportFlagGroup.ShowSuppressed = nil // disable '--show-suppressed' - - awsFlags := &flag.Flags{ - GlobalFlagGroup: globalFlags, - AWSFlagGroup: flag.NewAWSFlagGroup(), - CloudFlagGroup: flag.NewCloudFlagGroup(), - MisconfFlagGroup: flag.NewMisconfFlagGroup(), - RegoFlagGroup: flag.NewRegoFlagGroup(), - ReportFlagGroup: reportFlagGroup, - } - - services := awsScanner.AllSupportedServices() - sort.Strings(services) - - cmd := &cobra.Command{ - Use: "aws [flags]", - Aliases: []string{}, - GroupID: groupScanning, - Args: cobra.ExactArgs(0), - Short: "[EXPERIMENTAL] Scan AWS account", - Long: fmt.Sprintf(`Scan an AWS account for misconfigurations. Trivy uses the same authentication methods as the AWS CLI. See https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html - -The following services are supported: - -- %s -`, strings.Join(services, "\n- ")), - Example: ` # basic scanning - $ trivy aws --region us-east-1 - - # limit scan to a single service: - $ trivy aws --region us-east-1 --service s3 - - # limit scan to multiple services: - $ trivy aws --region us-east-1 --service s3 --service ec2 - - # force refresh of cache for fresh results - $ trivy aws --region us-east-1 --update-cache -`, - PreRunE: func(cmd *cobra.Command, args []string) error { - if err := awsFlags.Bind(cmd); err != nil { - return xerrors.Errorf("flag bind error: %w", err) - } - return nil - }, - RunE: func(cmd *cobra.Command, args []string) error { - opts, err := awsFlags.ToOptions(args) - if err != nil { - return xerrors.Errorf("flag error: %w", err) - } - if opts.Timeout < time.Hour { - opts.Timeout = time.Hour - log.Info("Timeout is set to less than 1 hour - upgrading to 1 hour for this command.") - } - return awscommands.Run(cmd.Context(), opts) - }, - SilenceErrors: true, - SilenceUsage: true, - } - cmd.SetFlagErrorFunc(flagErrorFunc) - awsFlags.AddFlags(cmd) - cmd.SetUsageTemplate(fmt.Sprintf(usageTemplate, awsFlags.Usages(cmd))) - - return cmd -} - func NewVMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { vmFlags := &flag.Flags{ GlobalFlagGroup: globalFlags, @@ -1194,6 +1142,8 @@ func NewSBOMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { LicenseFlagGroup: licenseFlagGroup, } + sbomFlags.CacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory) // Use memory cache by default + cmd := &cobra.Command{ Use: "sbom [flags] SBOM_PATH", Short: "Scan SBOM for vulnerabilities and licenses", @@ -1231,6 +1181,48 @@ func NewSBOMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { return cmd } +func NewCleanCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { + cleanFlags := &flag.Flags{ + GlobalFlagGroup: globalFlags, + CleanFlagGroup: flag.NewCleanFlagGroup(), + } + cmd := &cobra.Command{ + Use: "clean [flags]", + GroupID: groupUtility, + Short: "Remove cached files", + Example: ` # Remove all caches + $ trivy clean --all + + # Remove scan cache + $ trivy clean --scan-cache + + # Remove vulnerability database + $ trivy clean --vuln-db +`, + PreRunE: func(cmd *cobra.Command, args []string) error { + if err := cleanFlags.Bind(cmd); err != nil { + return xerrors.Errorf("flag bind error: %w", err) + } + return nil + }, + RunE: func(cmd *cobra.Command, args []string) error { + opts, err := cleanFlags.ToOptions(args) + if err != nil { + return xerrors.Errorf("flag error: %w", err) + } + + return clean.Run(cmd.Context(), opts) + }, + SilenceErrors: true, + SilenceUsage: true, + } + cmd.SetFlagErrorFunc(flagErrorFunc) + cleanFlags.AddFlags(cmd) + cmd.SetUsageTemplate(fmt.Sprintf(usageTemplate, cleanFlags.Usages(cmd))) + + return cmd +} + func NewVersionCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { var versionFormat string cmd := &cobra.Command{ @@ -1270,7 +1262,8 @@ func showVersion(cacheDir, outputFormat string, w io.Writer) error { } func validateArgs(cmd *cobra.Command, args []string) error { - // '--clear-cache', '--download-db-only', '--download-java-db-only', '--reset', '--reset-checks-bundle' and '--generate-default-config' don't conduct the subsequent scanning + // '--clear-cache' (removed), '--download-db-only', '--download-java-db-only', '--reset' (removed), + // '--reset-checks-bundle' (removed) and '--generate-default-config' don't conduct the subsequent scanning if viper.GetBool(flag.ClearCacheFlag.ConfigName) || viper.GetBool(flag.DownloadDBOnlyFlag.ConfigName) || viper.GetBool(flag.ResetFlag.ConfigName) || viper.GetBool(flag.GenerateDefaultConfigFlag.ConfigName) || viper.GetBool(flag.DownloadJavaDBOnlyFlag.ConfigName) || viper.GetBool(flag.ResetChecksBundleFlag.ConfigName) { diff --git a/pkg/commands/app_test.go b/pkg/commands/app_test.go index 143de739caf0..7235a3e94c7d 100644 --- a/pkg/commands/app_test.go +++ b/pkg/commands/app_test.go @@ -271,7 +271,7 @@ func TestFlags(t *testing.T) { "--scanners", "license", "--compliance", - "docker-cis", + "docker-cis-1.6.0", }, want: want{ format: types.FormatTable, diff --git a/pkg/commands/artifact/inject.go b/pkg/commands/artifact/inject.go index f0538efdbce3..da2c05ac91e4 100644 --- a/pkg/commands/artifact/inject.go +++ b/pkg/commands/artifact/inject.go @@ -5,10 +5,11 @@ package artifact import ( "context" + "github.com/google/wire" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/artifact" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/rpc/client" "github.com/aquasecurity/trivy/pkg/scanner" @@ -20,8 +21,7 @@ import ( // initializeImageScanner is for container image scanning in standalone mode // e.g. dockerd, container registry, podman, etc. -func initializeImageScanner(ctx context.Context, imageName string, artifactCache cache.ArtifactCache, - localArtifactCache cache.LocalArtifactCache, imageOpt types.ImageOptions, artifactOption artifact.Option) ( +func initializeImageScanner(ctx context.Context, imageName string, imageOpt types.ImageOptions, cacheOptions cache.Options, artifactOption artifact.Option) ( scanner.Scanner, func(), error) { wire.Build(scanner.StandaloneDockerSet) return scanner.Scanner{}, nil, nil @@ -29,33 +29,29 @@ func initializeImageScanner(ctx context.Context, imageName string, artifactCache // initializeArchiveScanner is for container image archive scanning in standalone mode // e.g. docker save -o alpine.tar alpine:3.15 -func initializeArchiveScanner(ctx context.Context, filePath string, artifactCache cache.ArtifactCache, - localArtifactCache cache.LocalArtifactCache, artifactOption artifact.Option) (scanner.Scanner, error) { +func initializeArchiveScanner(ctx context.Context, filePath string, cacheOptions cache.Options, artifactOption artifact.Option) ( + scanner.Scanner, func(), error) { wire.Build(scanner.StandaloneArchiveSet) - return scanner.Scanner{}, nil + return scanner.Scanner{}, nil, nil } // initializeFilesystemScanner is for filesystem scanning in standalone mode -func initializeFilesystemScanner(ctx context.Context, path string, artifactCache cache.ArtifactCache, - localArtifactCache cache.LocalArtifactCache, artifactOption artifact.Option) (scanner.Scanner, func(), error) { +func initializeFilesystemScanner(ctx context.Context, path string, cacheOptions cache.Options, artifactOption artifact.Option) (scanner.Scanner, func(), error) { wire.Build(scanner.StandaloneFilesystemSet) return scanner.Scanner{}, nil, nil } -func initializeRepositoryScanner(ctx context.Context, url string, artifactCache cache.ArtifactCache, - localArtifactCache cache.LocalArtifactCache, artifactOption artifact.Option) (scanner.Scanner, func(), error) { +func initializeRepositoryScanner(ctx context.Context, url string, cacheOptions cache.Options, artifactOption artifact.Option) (scanner.Scanner, func(), error) { wire.Build(scanner.StandaloneRepositorySet) return scanner.Scanner{}, nil, nil } -func initializeSBOMScanner(ctx context.Context, filePath string, artifactCache cache.ArtifactCache, - localArtifactCache cache.LocalArtifactCache, artifactOption artifact.Option) (scanner.Scanner, func(), error) { +func initializeSBOMScanner(ctx context.Context, filePath string, cacheOptions cache.Options, artifactOption artifact.Option) (scanner.Scanner, func(), error) { wire.Build(scanner.StandaloneSBOMSet) return scanner.Scanner{}, nil, nil } -func initializeVMScanner(ctx context.Context, filePath string, artifactCache cache.ArtifactCache, - localArtifactCache cache.LocalArtifactCache, artifactOption artifact.Option) ( +func initializeVMScanner(ctx context.Context, filePath string, cacheOptions cache.Options, artifactOption artifact.Option) ( scanner.Scanner, func(), error) { wire.Build(scanner.StandaloneVMSet) return scanner.Scanner{}, nil, nil @@ -67,7 +63,7 @@ func initializeVMScanner(ctx context.Context, filePath string, artifactCache cac // initializeRemoteImageScanner is for container image scanning in client/server mode // e.g. dockerd, container registry, podman, etc. -func initializeRemoteImageScanner(ctx context.Context, imageName string, artifactCache cache.ArtifactCache, +func initializeRemoteImageScanner(ctx context.Context, imageName string, remoteCacheOptions cache.RemoteOptions, remoteScanOptions client.ScannerOption, imageOpt types.ImageOptions, artifactOption artifact.Option) ( scanner.Scanner, func(), error) { wire.Build(scanner.RemoteDockerSet) @@ -76,21 +72,21 @@ func initializeRemoteImageScanner(ctx context.Context, imageName string, artifac // initializeRemoteArchiveScanner is for container image archive scanning in client/server mode // e.g. docker save -o alpine.tar alpine:3.15 -func initializeRemoteArchiveScanner(ctx context.Context, filePath string, artifactCache cache.ArtifactCache, - remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, error) { +func initializeRemoteArchiveScanner(ctx context.Context, filePath string, remoteCacheOptions cache.RemoteOptions, + remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { wire.Build(scanner.RemoteArchiveSet) - return scanner.Scanner{}, nil + return scanner.Scanner{}, nil, nil } // initializeRemoteFilesystemScanner is for filesystem scanning in client/server mode -func initializeRemoteFilesystemScanner(ctx context.Context, path string, artifactCache cache.ArtifactCache, +func initializeRemoteFilesystemScanner(ctx context.Context, path string, remoteCacheOptions cache.RemoteOptions, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { wire.Build(scanner.RemoteFilesystemSet) return scanner.Scanner{}, nil, nil } // initializeRemoteRepositoryScanner is for repository scanning in client/server mode -func initializeRemoteRepositoryScanner(ctx context.Context, url string, artifactCache cache.ArtifactCache, +func initializeRemoteRepositoryScanner(ctx context.Context, url string, remoteCacheOptions cache.RemoteOptions, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) ( scanner.Scanner, func(), error) { wire.Build(scanner.RemoteRepositorySet) @@ -98,14 +94,14 @@ func initializeRemoteRepositoryScanner(ctx context.Context, url string, artifact } // initializeRemoteSBOMScanner is for sbom scanning in client/server mode -func initializeRemoteSBOMScanner(ctx context.Context, path string, artifactCache cache.ArtifactCache, +func initializeRemoteSBOMScanner(ctx context.Context, path string, remoteCacheOptions cache.RemoteOptions, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { wire.Build(scanner.RemoteSBOMSet) return scanner.Scanner{}, nil, nil } // initializeRemoteVMScanner is for vm scanning in client/server mode -func initializeRemoteVMScanner(ctx context.Context, path string, artifactCache cache.ArtifactCache, +func initializeRemoteVMScanner(ctx context.Context, path string, remoteCacheOptions cache.RemoteOptions, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { wire.Build(scanner.RemoteVMSet) return scanner.Scanner{}, nil, nil diff --git a/pkg/commands/artifact/run.go b/pkg/commands/artifact/run.go index afdbf2ee6966..db73cf58b391 100644 --- a/pkg/commands/artifact/run.go +++ b/pkg/commands/artifact/run.go @@ -4,20 +4,18 @@ import ( "context" "errors" "fmt" + "slices" "github.com/hashicorp/go-multierror" "github.com/samber/lo" "github.com/spf13/viper" - "golang.org/x/exp/slices" "golang.org/x/xerrors" - "github.com/aquasecurity/go-version/pkg/semver" "github.com/aquasecurity/trivy-db/pkg/db" - tcache "github.com/aquasecurity/trivy/pkg/cache" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/commands/operation" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/artifact" - "github.com/aquasecurity/trivy/pkg/fanal/cache" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/walker" "github.com/aquasecurity/trivy/pkg/flag" @@ -26,13 +24,12 @@ import ( "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/misconf" "github.com/aquasecurity/trivy/pkg/module" - "github.com/aquasecurity/trivy/pkg/policy" pkgReport "github.com/aquasecurity/trivy/pkg/report" "github.com/aquasecurity/trivy/pkg/result" "github.com/aquasecurity/trivy/pkg/rpc/client" "github.com/aquasecurity/trivy/pkg/scanner" "github.com/aquasecurity/trivy/pkg/types" - "github.com/aquasecurity/trivy/pkg/utils/fsutils" + "github.com/aquasecurity/trivy/pkg/version/doc" ) // TargetKind represents what kind of artifact Trivy scans @@ -46,8 +43,6 @@ const ( TargetImageArchive TargetKind = "archive" TargetSBOM TargetKind = "sbom" TargetVM TargetKind = "vm" - - devVersion = "dev" ) var ( @@ -62,8 +57,8 @@ type ScannerConfig struct { Target string // Cache - ArtifactCache cache.ArtifactCache - LocalArtifactCache cache.LocalArtifactCache + CacheOptions cache.Options + RemoteCacheOptions cache.RemoteOptions // Client/Server options ServerOption client.ScannerOption @@ -94,35 +89,31 @@ type Runner interface { } type runner struct { - cache cache.Cache - dbOpen bool + initializeScanner InitializeScanner + dbOpen bool // WASM modules module *module.Manager } -type runnerOption func(*runner) +type RunnerOption func(*runner) -// WithCacheClient takes a custom cache implementation +// WithInitializeScanner takes a custom scanner initialization function. // It is useful when Trivy is imported as a library. -func WithCacheClient(c cache.Cache) runnerOption { +func WithInitializeScanner(f InitializeScanner) RunnerOption { return func(r *runner) { - r.cache = c + r.initializeScanner = f } } // NewRunner initializes Runner that provides scanning functionalities. // It is possible to return SkipScan and it must be handled by caller. -func NewRunner(ctx context.Context, cliOptions flag.Options, opts ...runnerOption) (Runner, error) { +func NewRunner(ctx context.Context, cliOptions flag.Options, opts ...RunnerOption) (Runner, error) { r := &runner{} for _, opt := range opts { opt(r) } - if err := r.initCache(cliOptions); err != nil { - return nil, xerrors.Errorf("cache error: %w", err) - } - // Update the vulnerability database if needed. if err := r.initDB(ctx, cliOptions); err != nil { return nil, xerrors.Errorf("DB error: %w", err) @@ -145,10 +136,6 @@ func NewRunner(ctx context.Context, cliOptions flag.Options, opts ...runnerOptio // Close closes everything func (r *runner) Close(ctx context.Context) error { var errs error - if err := r.cache.Close(); err != nil { - errs = multierror.Append(errs, err) - } - if r.dbOpen { if err := db.Close(); err != nil { errs = multierror.Append(errs, err) @@ -261,7 +248,10 @@ func (r *runner) ScanVM(ctx context.Context, opts flag.Options) (types.Report, e } func (r *runner) scanArtifact(ctx context.Context, opts flag.Options, initializeScanner InitializeScanner) (types.Report, error) { - report, err := scan(ctx, opts, initializeScanner, r.cache) + if r.initializeScanner != nil { + initializeScanner = r.initializeScanner + } + report, err := r.scan(ctx, opts, initializeScanner) if err != nil { return types.Report{}, xerrors.Errorf("scan error: %w", err) } @@ -338,58 +328,6 @@ func (r *runner) initJavaDB(opts flag.Options) error { return nil } -func (r *runner) initCache(opts flag.Options) error { - // Skip initializing cache when custom cache is passed - if r.cache != nil { - return nil - } - - // client/server mode - if opts.ServerAddr != "" { - remoteCache := tcache.NewRemoteCache(opts.ServerAddr, opts.CustomHeaders, opts.Insecure) - r.cache = tcache.NopCache(remoteCache) - return nil - } - - // standalone mode - fsutils.SetCacheDir(opts.CacheDir) - cacheClient, err := operation.NewCache(opts.CacheOptions) - if err != nil { - return xerrors.Errorf("unable to initialize the cache: %w", err) - } - log.Debug("Cache dir", log.String("dir", fsutils.CacheDir())) - - if opts.Reset { - defer cacheClient.Close() - if err = cacheClient.Reset(); err != nil { - return xerrors.Errorf("cache reset error: %w", err) - } - return SkipScan - } - - if opts.ResetChecksBundle { - c, err := policy.NewClient(fsutils.CacheDir(), true, opts.MisconfOptions.ChecksBundleRepository) - if err != nil { - return xerrors.Errorf("failed to instantiate check client: %w", err) - } - if err := c.Clear(); err != nil { - return xerrors.Errorf("failed to remove the cache: %w", err) - } - return SkipScan - } - - if opts.ClearCache { - defer cacheClient.Close() - if err = cacheClient.ClearArtifacts(); err != nil { - return xerrors.Errorf("cache clear error: %w", err) - } - return SkipScan - } - - r.cache = cacheClient - return nil -} - // Run performs artifact scanning func Run(ctx context.Context, opts flag.Options, targetKind TargetKind) (err error) { ctx, cancel := context.WithTimeout(ctx, opts.Timeout) @@ -397,7 +335,8 @@ func Run(ctx context.Context, opts flag.Options, targetKind TargetKind) (err err defer func() { if errors.Is(err, context.DeadlineExceeded) { - log.Warn("Provide a higher timeout value, see https://aquasecurity.github.io/trivy/latest/docs/configuration/") + // e.g. https://aquasecurity.github.io/trivy/latest/docs/configuration/ + log.WarnContext(ctx, fmt.Sprintf("Provide a higher timeout value, see %s", doc.URL("/docs/configuration/", ""))) } }() @@ -527,7 +466,7 @@ func filterMisconfigAnalyzers(included, all []analyzer.Type) ([]analyzer.Type, e return lo.Without(all, included...), nil } -func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfig, types.ScanOptions, error) { +func (r *runner) initScannerConfig(opts flag.Options) (ScannerConfig, types.ScanOptions, error) { target := opts.Target if opts.Input != "" { target = opts.Input @@ -593,10 +532,10 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi // Do not load config file for secret scanning if opts.Scanners.Enabled(types.SecretScanner) { - ver := canonicalVersion(opts.AppVersion) log.Info("Secret scanning is enabled") log.Info("If your scanning is slow, please try '--scanners vuln' to disable secret scanning") - log.Infof("Please see also https://aquasecurity.github.io/trivy/%s/docs/scanner/secret/#recommendation for faster secret detection", ver) + // e.g. https://aquasecurity.github.io/trivy/latest/docs/scanner/secret/#recommendation + log.Infof("Please see also %s for faster secret detection", doc.URL("/docs/scanner/secret/", "recommendation")) } else { opts.SecretConfigPath = "" } @@ -617,8 +556,8 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi return ScannerConfig{ Target: target, - ArtifactCache: cacheClient, - LocalArtifactCache: cacheClient, + CacheOptions: opts.CacheOpts(), + RemoteCacheOptions: opts.RemoteCacheOpts(), ServerOption: client.ScannerOption{ RemoteURL: opts.ServerAddr, CustomHeaders: opts.CustomHeaders, @@ -636,10 +575,9 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi RepoTag: opts.RepoTag, SBOMSources: opts.SBOMSources, RekorURL: opts.RekorURL, - //Platform: opts.Platform, - AWSRegion: opts.Region, - AWSEndpoint: opts.Endpoint, - FileChecksum: fileChecksum, + AWSRegion: opts.Region, + AWSEndpoint: opts.Endpoint, + FileChecksum: fileChecksum, // For image scanning ImageOption: ftypes.ImageOptions{ @@ -676,9 +614,8 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi }, scanOptions, nil } -func scan(ctx context.Context, opts flag.Options, initializeScanner InitializeScanner, cacheClient cache.Cache) ( - types.Report, error) { - scannerConfig, scanOptions, err := initScannerConfig(opts, cacheClient) +func (r *runner) scan(ctx context.Context, opts flag.Options, initializeScanner InitializeScanner) (types.Report, error) { + scannerConfig, scanOptions, err := r.initScannerConfig(opts) if err != nil { return types.Report{}, err } @@ -694,20 +631,3 @@ func scan(ctx context.Context, opts flag.Options, initializeScanner InitializeSc } return report, nil } - -func canonicalVersion(ver string) string { - if ver == devVersion { - return ver - } - v, err := semver.Parse(ver) - if err != nil { - return devVersion - } - // Replace pre-release with "dev" - // e.g. v0.34.0-beta1+snapshot-1 - if v.IsPreRelease() || v.Metadata() != "" { - return devVersion - } - // Add "v" prefix and cut a patch number, "0.34.0" => "v0.34" for the url - return fmt.Sprintf("v%d.%d", v.Major(), v.Minor()) -} diff --git a/pkg/commands/artifact/run_test.go b/pkg/commands/artifact/run_test.go deleted file mode 100644 index 02d35a53d44b..000000000000 --- a/pkg/commands/artifact/run_test.go +++ /dev/null @@ -1,48 +0,0 @@ -package artifact - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -func TestCanonicalVersion(t *testing.T) { - tests := []struct { - title string - input string - want string - }{ - { - title: "good way", - input: "0.34.0", - want: "v0.34", - }, - { - title: "version with v - isn't right semver version", - input: "v0.34.0", - want: devVersion, - }, - { - title: "dev version", - input: devVersion, - want: devVersion, - }, - { - title: "pre-release", - input: "v0.34.0-beta1+snapshot-1", - want: devVersion, - }, - { - title: "no version", - input: "", - want: devVersion, - }, - } - - for _, test := range tests { - t.Run(test.title, func(t *testing.T) { - got := canonicalVersion(test.input) - require.Equal(t, test.want, got) - }) - } -} diff --git a/pkg/commands/artifact/scanner.go b/pkg/commands/artifact/scanner.go index cf7a58c52693..88430a09961b 100644 --- a/pkg/commands/artifact/scanner.go +++ b/pkg/commands/artifact/scanner.go @@ -11,8 +11,7 @@ import ( // imageStandaloneScanner initializes a container image scanner in standalone mode // $ trivy image alpine:3.15 func imageStandaloneScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner, func(), error) { - s, cleanup, err := initializeImageScanner(ctx, conf.Target, conf.ArtifactCache, conf.LocalArtifactCache, - conf.ArtifactOption.ImageOption, conf.ArtifactOption) + s, cleanup, err := initializeImageScanner(ctx, conf.Target, conf.ArtifactOption.ImageOption, conf.CacheOptions, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, func() {}, xerrors.Errorf("unable to initialize an image scanner: %w", err) } @@ -22,18 +21,18 @@ func imageStandaloneScanner(ctx context.Context, conf ScannerConfig) (scanner.Sc // archiveStandaloneScanner initializes an image archive scanner in standalone mode // $ trivy image --input alpine.tar func archiveStandaloneScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner, func(), error) { - s, err := initializeArchiveScanner(ctx, conf.Target, conf.ArtifactCache, conf.LocalArtifactCache, conf.ArtifactOption) + s, cleanup, err := initializeArchiveScanner(ctx, conf.Target, conf.CacheOptions, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, func() {}, xerrors.Errorf("unable to initialize the archive scanner: %w", err) } - return s, func() {}, nil + return s, cleanup, nil } // imageRemoteScanner initializes a container image scanner in client/server mode // $ trivy image --server localhost:4954 alpine:3.15 func imageRemoteScanner(ctx context.Context, conf ScannerConfig) ( scanner.Scanner, func(), error) { - s, cleanup, err := initializeRemoteImageScanner(ctx, conf.Target, conf.ArtifactCache, conf.ServerOption, + s, cleanup, err := initializeRemoteImageScanner(ctx, conf.Target, conf.RemoteCacheOptions, conf.ServerOption, conf.ArtifactOption.ImageOption, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, nil, xerrors.Errorf("unable to initialize a remote image scanner: %w", err) @@ -45,16 +44,16 @@ func imageRemoteScanner(ctx context.Context, conf ScannerConfig) ( // $ trivy image --server localhost:4954 --input alpine.tar func archiveRemoteScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner, func(), error) { // Scan tar file - s, err := initializeRemoteArchiveScanner(ctx, conf.Target, conf.ArtifactCache, conf.ServerOption, conf.ArtifactOption) + s, cleanup, err := initializeRemoteArchiveScanner(ctx, conf.Target, conf.RemoteCacheOptions, conf.ServerOption, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, nil, xerrors.Errorf("unable to initialize the remote archive scanner: %w", err) } - return s, func() {}, nil + return s, cleanup, nil } // filesystemStandaloneScanner initializes a filesystem scanner in standalone mode func filesystemStandaloneScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner, func(), error) { - s, cleanup, err := initializeFilesystemScanner(ctx, conf.Target, conf.ArtifactCache, conf.LocalArtifactCache, conf.ArtifactOption) + s, cleanup, err := initializeFilesystemScanner(ctx, conf.Target, conf.CacheOptions, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, func() {}, xerrors.Errorf("unable to initialize a filesystem scanner: %w", err) } @@ -63,7 +62,7 @@ func filesystemStandaloneScanner(ctx context.Context, conf ScannerConfig) (scann // filesystemRemoteScanner initializes a filesystem scanner in client/server mode func filesystemRemoteScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner, func(), error) { - s, cleanup, err := initializeRemoteFilesystemScanner(ctx, conf.Target, conf.ArtifactCache, conf.ServerOption, conf.ArtifactOption) + s, cleanup, err := initializeRemoteFilesystemScanner(ctx, conf.Target, conf.RemoteCacheOptions, conf.ServerOption, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, func() {}, xerrors.Errorf("unable to initialize a remote filesystem scanner: %w", err) } @@ -72,7 +71,7 @@ func filesystemRemoteScanner(ctx context.Context, conf ScannerConfig) (scanner.S // repositoryStandaloneScanner initializes a repository scanner in standalone mode func repositoryStandaloneScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner, func(), error) { - s, cleanup, err := initializeRepositoryScanner(ctx, conf.Target, conf.ArtifactCache, conf.LocalArtifactCache, conf.ArtifactOption) + s, cleanup, err := initializeRepositoryScanner(ctx, conf.Target, conf.CacheOptions, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, func() {}, xerrors.Errorf("unable to initialize a repository scanner: %w", err) } @@ -81,7 +80,7 @@ func repositoryStandaloneScanner(ctx context.Context, conf ScannerConfig) (scann // repositoryRemoteScanner initializes a repository scanner in client/server mode func repositoryRemoteScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner, func(), error) { - s, cleanup, err := initializeRemoteRepositoryScanner(ctx, conf.Target, conf.ArtifactCache, conf.ServerOption, + s, cleanup, err := initializeRemoteRepositoryScanner(ctx, conf.Target, conf.RemoteCacheOptions, conf.ServerOption, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, func() {}, xerrors.Errorf("unable to initialize a remote repository scanner: %w", err) @@ -91,7 +90,7 @@ func repositoryRemoteScanner(ctx context.Context, conf ScannerConfig) (scanner.S // sbomStandaloneScanner initializes a SBOM scanner in standalone mode func sbomStandaloneScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner, func(), error) { - s, cleanup, err := initializeSBOMScanner(ctx, conf.Target, conf.ArtifactCache, conf.LocalArtifactCache, conf.ArtifactOption) + s, cleanup, err := initializeSBOMScanner(ctx, conf.Target, conf.CacheOptions, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, func() {}, xerrors.Errorf("unable to initialize a cycloneDX scanner: %w", err) } @@ -100,7 +99,7 @@ func sbomStandaloneScanner(ctx context.Context, conf ScannerConfig) (scanner.Sca // sbomRemoteScanner initializes a SBOM scanner in client/server mode func sbomRemoteScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner, func(), error) { - s, cleanup, err := initializeRemoteSBOMScanner(ctx, conf.Target, conf.ArtifactCache, conf.ServerOption, conf.ArtifactOption) + s, cleanup, err := initializeRemoteSBOMScanner(ctx, conf.Target, conf.RemoteCacheOptions, conf.ServerOption, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, func() {}, xerrors.Errorf("unable to initialize a remote cycloneDX scanner: %w", err) } @@ -109,7 +108,7 @@ func sbomRemoteScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner // vmStandaloneScanner initializes a VM scanner in standalone mode func vmStandaloneScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner, func(), error) { - s, cleanup, err := initializeVMScanner(ctx, conf.Target, conf.ArtifactCache, conf.LocalArtifactCache, conf.ArtifactOption) + s, cleanup, err := initializeVMScanner(ctx, conf.Target, conf.CacheOptions, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, func() {}, xerrors.Errorf("unable to initialize a vm scanner: %w", err) } @@ -118,7 +117,7 @@ func vmStandaloneScanner(ctx context.Context, conf ScannerConfig) (scanner.Scann // vmRemoteScanner initializes a VM scanner in client/server mode func vmRemoteScanner(ctx context.Context, conf ScannerConfig) (scanner.Scanner, func(), error) { - s, cleanup, err := initializeRemoteVMScanner(ctx, conf.Target, conf.ArtifactCache, conf.ServerOption, conf.ArtifactOption) + s, cleanup, err := initializeRemoteVMScanner(ctx, conf.Target, conf.RemoteCacheOptions, conf.ServerOption, conf.ArtifactOption) if err != nil { return scanner.Scanner{}, func() {}, xerrors.Errorf("unable to initialize a remote vm scanner: %w", err) } diff --git a/pkg/commands/artifact/wire_gen.go b/pkg/commands/artifact/wire_gen.go index 5e12c10e54d8..f47c0b0f5649 100644 --- a/pkg/commands/artifact/wire_gen.go +++ b/pkg/commands/artifact/wire_gen.go @@ -9,6 +9,7 @@ package artifact import ( "context" "github.com/aquasecurity/trivy-db/pkg/db" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/applier" "github.com/aquasecurity/trivy/pkg/fanal/artifact" image2 "github.com/aquasecurity/trivy/pkg/fanal/artifact/image" @@ -16,7 +17,6 @@ import ( "github.com/aquasecurity/trivy/pkg/fanal/artifact/repo" "github.com/aquasecurity/trivy/pkg/fanal/artifact/sbom" "github.com/aquasecurity/trivy/pkg/fanal/artifact/vm" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/image" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/walker" @@ -32,32 +32,43 @@ import ( // initializeImageScanner is for container image scanning in standalone mode // e.g. dockerd, container registry, podman, etc. -func initializeImageScanner(ctx context.Context, imageName string, artifactCache cache.ArtifactCache, localArtifactCache cache.LocalArtifactCache, imageOpt types.ImageOptions, artifactOption artifact.Option) (scanner.Scanner, func(), error) { - applierApplier := applier.NewApplier(localArtifactCache) +func initializeImageScanner(ctx context.Context, imageName string, imageOpt types.ImageOptions, cacheOptions cache.Options, artifactOption artifact.Option) (scanner.Scanner, func(), error) { + cacheCache, cleanup, err := cache.New(cacheOptions) + if err != nil { + return scanner.Scanner{}, nil, err + } + applierApplier := applier.NewApplier(cacheCache) ospkgScanner := ospkg.NewScanner() langpkgScanner := langpkg.NewScanner() config := db.Config{} client := vulnerability.NewClient(config) localScanner := local.NewScanner(applierApplier, ospkgScanner, langpkgScanner, client) - typesImage, cleanup, err := image.NewContainerImage(ctx, imageName, imageOpt) + typesImage, cleanup2, err := image.NewContainerImage(ctx, imageName, imageOpt) if err != nil { + cleanup() return scanner.Scanner{}, nil, err } - artifactArtifact, err := image2.NewArtifact(typesImage, artifactCache, artifactOption) + artifactArtifact, err := image2.NewArtifact(typesImage, cacheCache, artifactOption) if err != nil { + cleanup2() cleanup() return scanner.Scanner{}, nil, err } scannerScanner := scanner.NewScanner(localScanner, artifactArtifact) return scannerScanner, func() { + cleanup2() cleanup() }, nil } // initializeArchiveScanner is for container image archive scanning in standalone mode // e.g. docker save -o alpine.tar alpine:3.15 -func initializeArchiveScanner(ctx context.Context, filePath string, artifactCache cache.ArtifactCache, localArtifactCache cache.LocalArtifactCache, artifactOption artifact.Option) (scanner.Scanner, error) { - applierApplier := applier.NewApplier(localArtifactCache) +func initializeArchiveScanner(ctx context.Context, filePath string, cacheOptions cache.Options, artifactOption artifact.Option) (scanner.Scanner, func(), error) { + cacheCache, cleanup, err := cache.New(cacheOptions) + if err != nil { + return scanner.Scanner{}, nil, err + } + applierApplier := applier.NewApplier(cacheCache) ospkgScanner := ospkg.NewScanner() langpkgScanner := langpkg.NewScanner() config := db.Config{} @@ -65,95 +76,124 @@ func initializeArchiveScanner(ctx context.Context, filePath string, artifactCach localScanner := local.NewScanner(applierApplier, ospkgScanner, langpkgScanner, client) typesImage, err := image.NewArchiveImage(filePath) if err != nil { - return scanner.Scanner{}, err + cleanup() + return scanner.Scanner{}, nil, err } - artifactArtifact, err := image2.NewArtifact(typesImage, artifactCache, artifactOption) + artifactArtifact, err := image2.NewArtifact(typesImage, cacheCache, artifactOption) if err != nil { - return scanner.Scanner{}, err + cleanup() + return scanner.Scanner{}, nil, err } scannerScanner := scanner.NewScanner(localScanner, artifactArtifact) - return scannerScanner, nil + return scannerScanner, func() { + cleanup() + }, nil } // initializeFilesystemScanner is for filesystem scanning in standalone mode -func initializeFilesystemScanner(ctx context.Context, path string, artifactCache cache.ArtifactCache, localArtifactCache cache.LocalArtifactCache, artifactOption artifact.Option) (scanner.Scanner, func(), error) { - applierApplier := applier.NewApplier(localArtifactCache) +func initializeFilesystemScanner(ctx context.Context, path string, cacheOptions cache.Options, artifactOption artifact.Option) (scanner.Scanner, func(), error) { + cacheCache, cleanup, err := cache.New(cacheOptions) + if err != nil { + return scanner.Scanner{}, nil, err + } + applierApplier := applier.NewApplier(cacheCache) ospkgScanner := ospkg.NewScanner() langpkgScanner := langpkg.NewScanner() config := db.Config{} client := vulnerability.NewClient(config) localScanner := local.NewScanner(applierApplier, ospkgScanner, langpkgScanner, client) fs := walker.NewFS() - artifactArtifact, err := local2.NewArtifact(path, artifactCache, fs, artifactOption) + artifactArtifact, err := local2.NewArtifact(path, cacheCache, fs, artifactOption) if err != nil { + cleanup() return scanner.Scanner{}, nil, err } scannerScanner := scanner.NewScanner(localScanner, artifactArtifact) return scannerScanner, func() { + cleanup() }, nil } -func initializeRepositoryScanner(ctx context.Context, url string, artifactCache cache.ArtifactCache, localArtifactCache cache.LocalArtifactCache, artifactOption artifact.Option) (scanner.Scanner, func(), error) { - applierApplier := applier.NewApplier(localArtifactCache) +func initializeRepositoryScanner(ctx context.Context, url string, cacheOptions cache.Options, artifactOption artifact.Option) (scanner.Scanner, func(), error) { + cacheCache, cleanup, err := cache.New(cacheOptions) + if err != nil { + return scanner.Scanner{}, nil, err + } + applierApplier := applier.NewApplier(cacheCache) ospkgScanner := ospkg.NewScanner() langpkgScanner := langpkg.NewScanner() config := db.Config{} client := vulnerability.NewClient(config) localScanner := local.NewScanner(applierApplier, ospkgScanner, langpkgScanner, client) fs := walker.NewFS() - artifactArtifact, cleanup, err := repo.NewArtifact(url, artifactCache, fs, artifactOption) + artifactArtifact, cleanup2, err := repo.NewArtifact(url, cacheCache, fs, artifactOption) if err != nil { + cleanup() return scanner.Scanner{}, nil, err } scannerScanner := scanner.NewScanner(localScanner, artifactArtifact) return scannerScanner, func() { + cleanup2() cleanup() }, nil } -func initializeSBOMScanner(ctx context.Context, filePath string, artifactCache cache.ArtifactCache, localArtifactCache cache.LocalArtifactCache, artifactOption artifact.Option) (scanner.Scanner, func(), error) { - applierApplier := applier.NewApplier(localArtifactCache) +func initializeSBOMScanner(ctx context.Context, filePath string, cacheOptions cache.Options, artifactOption artifact.Option) (scanner.Scanner, func(), error) { + cacheCache, cleanup, err := cache.New(cacheOptions) + if err != nil { + return scanner.Scanner{}, nil, err + } + applierApplier := applier.NewApplier(cacheCache) ospkgScanner := ospkg.NewScanner() langpkgScanner := langpkg.NewScanner() config := db.Config{} client := vulnerability.NewClient(config) localScanner := local.NewScanner(applierApplier, ospkgScanner, langpkgScanner, client) - artifactArtifact, err := sbom.NewArtifact(filePath, artifactCache, artifactOption) + artifactArtifact, err := sbom.NewArtifact(filePath, cacheCache, artifactOption) if err != nil { + cleanup() return scanner.Scanner{}, nil, err } scannerScanner := scanner.NewScanner(localScanner, artifactArtifact) return scannerScanner, func() { + cleanup() }, nil } -func initializeVMScanner(ctx context.Context, filePath string, artifactCache cache.ArtifactCache, localArtifactCache cache.LocalArtifactCache, artifactOption artifact.Option) (scanner.Scanner, func(), error) { - applierApplier := applier.NewApplier(localArtifactCache) +func initializeVMScanner(ctx context.Context, filePath string, cacheOptions cache.Options, artifactOption artifact.Option) (scanner.Scanner, func(), error) { + cacheCache, cleanup, err := cache.New(cacheOptions) + if err != nil { + return scanner.Scanner{}, nil, err + } + applierApplier := applier.NewApplier(cacheCache) ospkgScanner := ospkg.NewScanner() langpkgScanner := langpkg.NewScanner() config := db.Config{} client := vulnerability.NewClient(config) localScanner := local.NewScanner(applierApplier, ospkgScanner, langpkgScanner, client) walkerVM := walker.NewVM() - artifactArtifact, err := vm.NewArtifact(filePath, artifactCache, walkerVM, artifactOption) + artifactArtifact, err := vm.NewArtifact(filePath, cacheCache, walkerVM, artifactOption) if err != nil { + cleanup() return scanner.Scanner{}, nil, err } scannerScanner := scanner.NewScanner(localScanner, artifactArtifact) return scannerScanner, func() { + cleanup() }, nil } // initializeRemoteImageScanner is for container image scanning in client/server mode // e.g. dockerd, container registry, podman, etc. -func initializeRemoteImageScanner(ctx context.Context, imageName string, artifactCache cache.ArtifactCache, remoteScanOptions client.ScannerOption, imageOpt types.ImageOptions, artifactOption artifact.Option) (scanner.Scanner, func(), error) { +func initializeRemoteImageScanner(ctx context.Context, imageName string, remoteCacheOptions cache.RemoteOptions, remoteScanOptions client.ScannerOption, imageOpt types.ImageOptions, artifactOption artifact.Option) (scanner.Scanner, func(), error) { v := _wireValue clientScanner := client.NewScanner(remoteScanOptions, v...) typesImage, cleanup, err := image.NewContainerImage(ctx, imageName, imageOpt) if err != nil { return scanner.Scanner{}, nil, err } - artifactArtifact, err := image2.NewArtifact(typesImage, artifactCache, artifactOption) + remoteCache := cache.NewRemoteCache(remoteCacheOptions) + artifactArtifact, err := image2.NewArtifact(typesImage, remoteCache, artifactOption) if err != nil { cleanup() return scanner.Scanner{}, nil, err @@ -170,27 +210,30 @@ var ( // initializeRemoteArchiveScanner is for container image archive scanning in client/server mode // e.g. docker save -o alpine.tar alpine:3.15 -func initializeRemoteArchiveScanner(ctx context.Context, filePath string, artifactCache cache.ArtifactCache, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, error) { +func initializeRemoteArchiveScanner(ctx context.Context, filePath string, remoteCacheOptions cache.RemoteOptions, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { v := _wireValue clientScanner := client.NewScanner(remoteScanOptions, v...) typesImage, err := image.NewArchiveImage(filePath) if err != nil { - return scanner.Scanner{}, err + return scanner.Scanner{}, nil, err } - artifactArtifact, err := image2.NewArtifact(typesImage, artifactCache, artifactOption) + remoteCache := cache.NewRemoteCache(remoteCacheOptions) + artifactArtifact, err := image2.NewArtifact(typesImage, remoteCache, artifactOption) if err != nil { - return scanner.Scanner{}, err + return scanner.Scanner{}, nil, err } scannerScanner := scanner.NewScanner(clientScanner, artifactArtifact) - return scannerScanner, nil + return scannerScanner, func() { + }, nil } // initializeRemoteFilesystemScanner is for filesystem scanning in client/server mode -func initializeRemoteFilesystemScanner(ctx context.Context, path string, artifactCache cache.ArtifactCache, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { +func initializeRemoteFilesystemScanner(ctx context.Context, path string, remoteCacheOptions cache.RemoteOptions, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { v := _wireValue clientScanner := client.NewScanner(remoteScanOptions, v...) + remoteCache := cache.NewRemoteCache(remoteCacheOptions) fs := walker.NewFS() - artifactArtifact, err := local2.NewArtifact(path, artifactCache, fs, artifactOption) + artifactArtifact, err := local2.NewArtifact(path, remoteCache, fs, artifactOption) if err != nil { return scanner.Scanner{}, nil, err } @@ -200,11 +243,12 @@ func initializeRemoteFilesystemScanner(ctx context.Context, path string, artifac } // initializeRemoteRepositoryScanner is for repository scanning in client/server mode -func initializeRemoteRepositoryScanner(ctx context.Context, url string, artifactCache cache.ArtifactCache, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { +func initializeRemoteRepositoryScanner(ctx context.Context, url string, remoteCacheOptions cache.RemoteOptions, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { v := _wireValue clientScanner := client.NewScanner(remoteScanOptions, v...) + remoteCache := cache.NewRemoteCache(remoteCacheOptions) fs := walker.NewFS() - artifactArtifact, cleanup, err := repo.NewArtifact(url, artifactCache, fs, artifactOption) + artifactArtifact, cleanup, err := repo.NewArtifact(url, remoteCache, fs, artifactOption) if err != nil { return scanner.Scanner{}, nil, err } @@ -215,10 +259,11 @@ func initializeRemoteRepositoryScanner(ctx context.Context, url string, artifact } // initializeRemoteSBOMScanner is for sbom scanning in client/server mode -func initializeRemoteSBOMScanner(ctx context.Context, path string, artifactCache cache.ArtifactCache, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { +func initializeRemoteSBOMScanner(ctx context.Context, path string, remoteCacheOptions cache.RemoteOptions, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { v := _wireValue clientScanner := client.NewScanner(remoteScanOptions, v...) - artifactArtifact, err := sbom.NewArtifact(path, artifactCache, artifactOption) + remoteCache := cache.NewRemoteCache(remoteCacheOptions) + artifactArtifact, err := sbom.NewArtifact(path, remoteCache, artifactOption) if err != nil { return scanner.Scanner{}, nil, err } @@ -228,11 +273,12 @@ func initializeRemoteSBOMScanner(ctx context.Context, path string, artifactCache } // initializeRemoteVMScanner is for vm scanning in client/server mode -func initializeRemoteVMScanner(ctx context.Context, path string, artifactCache cache.ArtifactCache, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { +func initializeRemoteVMScanner(ctx context.Context, path string, remoteCacheOptions cache.RemoteOptions, remoteScanOptions client.ScannerOption, artifactOption artifact.Option) (scanner.Scanner, func(), error) { v := _wireValue clientScanner := client.NewScanner(remoteScanOptions, v...) + remoteCache := cache.NewRemoteCache(remoteCacheOptions) walkerVM := walker.NewVM() - artifactArtifact, err := vm.NewArtifact(path, artifactCache, walkerVM, artifactOption) + artifactArtifact, err := vm.NewArtifact(path, remoteCache, walkerVM, artifactOption) if err != nil { return scanner.Scanner{}, nil, err } diff --git a/pkg/commands/clean/run.go b/pkg/commands/clean/run.go new file mode 100644 index 000000000000..fb20799a571b --- /dev/null +++ b/pkg/commands/clean/run.go @@ -0,0 +1,104 @@ +package clean + +import ( + "context" + "os" + + "golang.org/x/xerrors" + + "github.com/aquasecurity/trivy/pkg/cache" + "github.com/aquasecurity/trivy/pkg/db" + "github.com/aquasecurity/trivy/pkg/flag" + "github.com/aquasecurity/trivy/pkg/javadb" + "github.com/aquasecurity/trivy/pkg/log" + "github.com/aquasecurity/trivy/pkg/policy" +) + +func Run(ctx context.Context, opts flag.Options) error { + ctx, cancel := context.WithTimeout(ctx, opts.Timeout) + defer cancel() + + if !opts.CleanAll && !opts.CleanScanCache && !opts.CleanVulnerabilityDB && !opts.CleanJavaDB && !opts.CleanChecksBundle { + return xerrors.New("no clean option is specified") + } + + if opts.CleanAll { + return cleanAll(ctx, opts) + } + + if opts.CleanScanCache { + if err := cleanScanCache(ctx, opts); err != nil { + return xerrors.Errorf("failed to remove scan cache : %w", err) + } + } + + if opts.CleanVulnerabilityDB { + if err := cleanVulnerabilityDB(ctx, opts); err != nil { + return xerrors.Errorf("vuln db clean error: %w", err) + } + } + + if opts.CleanJavaDB { + if err := cleanJavaDB(ctx, opts); err != nil { + return xerrors.Errorf("java db clean error: %w", err) + } + } + + if opts.CleanChecksBundle { + if err := cleanCheckBundle(opts); err != nil { + return xerrors.Errorf("check bundle clean error: %w", err) + } + } + return nil +} + +func cleanAll(ctx context.Context, opts flag.Options) error { + log.InfoContext(ctx, "Removing all caches...") + if err := os.RemoveAll(opts.CacheDir); err != nil { + return xerrors.Errorf("failed to remove the directory (%s) : %w", opts.CacheDir, err) + } + return nil +} + +func cleanScanCache(ctx context.Context, opts flag.Options) error { + log.InfoContext(ctx, "Removing scan cache...") + c, cleanup, err := cache.New(opts.CacheOpts()) + if err != nil { + return xerrors.Errorf("failed to instantiate cache client: %w", err) + } + defer cleanup() + + if err = c.Clear(); err != nil { + return xerrors.Errorf("clear scan cache: %w", err) + } + return nil +} + +func cleanVulnerabilityDB(ctx context.Context, opts flag.Options) error { + log.InfoContext(ctx, "Removing vulnerability database...") + if err := db.NewClient(opts.CacheDir, true).Clear(ctx); err != nil { + return xerrors.Errorf("clear vulnerability database: %w", err) + + } + return nil +} + +func cleanJavaDB(ctx context.Context, opts flag.Options) error { + log.InfoContext(ctx, "Removing Java database...") + if err := javadb.Clear(ctx, opts.CacheDir); err != nil { + return xerrors.Errorf("clear Java database: %w", err) + } + return nil +} + +func cleanCheckBundle(opts flag.Options) error { + log.Info("Removing check bundle...") + c, err := policy.NewClient(opts.CacheDir, true, opts.MisconfOptions.ChecksBundleRepository) + if err != nil { + return xerrors.Errorf("failed to instantiate check client: %w", err) + } + if err := c.Clear(); err != nil { + return xerrors.Errorf("clear check bundle: %w", err) + } + return nil +} diff --git a/pkg/commands/clean/run_test.go b/pkg/commands/clean/run_test.go new file mode 100644 index 000000000000..9b301d238219 --- /dev/null +++ b/pkg/commands/clean/run_test.go @@ -0,0 +1,139 @@ +package clean_test + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/pkg/cache" + "github.com/aquasecurity/trivy/pkg/commands/clean" + "github.com/aquasecurity/trivy/pkg/flag" +) + +func TestRun(t *testing.T) { + tests := []struct { + name string + cleanOpts flag.CleanOptions + wantErr bool + checkFunc func(*testing.T, string) + }{ + { + name: "clean all", + cleanOpts: flag.CleanOptions{ + CleanAll: true, + }, + wantErr: false, + checkFunc: func(t *testing.T, dir string) { + assert.NoDirExists(t, dir) + }, + }, + { + name: "clean scan cache", + cleanOpts: flag.CleanOptions{ + CleanScanCache: true, + }, + wantErr: false, + checkFunc: func(t *testing.T, dir string) { + assert.NoDirExists(t, filepath.Join(dir, "fanal")) + assert.DirExists(t, filepath.Join(dir, "db")) + assert.DirExists(t, filepath.Join(dir, "java-db")) + assert.DirExists(t, filepath.Join(dir, "policy")) + }, + }, + { + name: "clean vulnerability DB", + cleanOpts: flag.CleanOptions{ + CleanVulnerabilityDB: true, + }, + wantErr: false, + checkFunc: func(t *testing.T, dir string) { + assert.NoDirExists(t, filepath.Join(dir, "db")) + assert.DirExists(t, filepath.Join(dir, "fanal")) + assert.DirExists(t, filepath.Join(dir, "java-db")) + assert.DirExists(t, filepath.Join(dir, "policy")) + }, + }, + { + name: "clean Java DB", + cleanOpts: flag.CleanOptions{ + CleanJavaDB: true, + }, + wantErr: false, + checkFunc: func(t *testing.T, dir string) { + assert.NoDirExists(t, filepath.Join(dir, "java-db")) + assert.DirExists(t, filepath.Join(dir, "fanal")) + assert.DirExists(t, filepath.Join(dir, "db")) + assert.DirExists(t, filepath.Join(dir, "policy")) + }, + }, + { + name: "clean check bundle", + cleanOpts: flag.CleanOptions{ + CleanChecksBundle: true, + }, + wantErr: false, + checkFunc: func(t *testing.T, dir string) { + assert.NoDirExists(t, filepath.Join(dir, "policy")) + assert.DirExists(t, filepath.Join(dir, "fanal")) + assert.DirExists(t, filepath.Join(dir, "db")) + assert.DirExists(t, filepath.Join(dir, "java-db")) + }, + }, + { + name: "no clean option specified", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create a temporary directory for testing + tempDir := t.TempDir() + + // Create test directories and files + createTestFiles(t, tempDir) + + opts := flag.Options{ + GlobalOptions: flag.GlobalOptions{ + CacheDir: tempDir, + }, + CacheOptions: flag.CacheOptions{ + CacheBackend: string(cache.TypeFS), + }, + CleanOptions: tt.cleanOpts, + } + + err := clean.Run(context.Background(), opts) + + if tt.wantErr { + assert.Error(t, err) + return + } + require.NoError(t, err) + if tt.checkFunc != nil { + tt.checkFunc(t, tempDir) + } + }) + } +} + +func createTestFiles(t *testing.T, dir string) { + subdirs := []string{ + "fanal", + "db", + "java-db", + "policy", + } + for _, subdir := range subdirs { + err := os.MkdirAll(filepath.Join(dir, subdir), 0755) + require.NoError(t, err) + + testFile := filepath.Join(dir, subdir, "testfile.txt") + err = os.WriteFile(testFile, []byte("test content"), 0644) + require.NoError(t, err) + } +} diff --git a/pkg/commands/convert/run.go b/pkg/commands/convert/run.go index 428d6b5b0b4b..584bc6f6bddc 100644 --- a/pkg/commands/convert/run.go +++ b/pkg/commands/convert/run.go @@ -8,6 +8,8 @@ import ( "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/commands/operation" + "github.com/aquasecurity/trivy/pkg/dependency" + ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/flag" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/report" @@ -35,6 +37,7 @@ func Run(ctx context.Context, opts flag.Options) (err error) { return xerrors.New("AWS and Kubernetes scanning reports are not yet supported") } + compat(&r) if err = result.Filter(ctx, r, opts.FilterOpts()); err != nil { return xerrors.Errorf("unable to filter results: %w", err) } @@ -46,3 +49,30 @@ func Run(ctx context.Context, opts flag.Options) (err error) { return operation.Exit(opts, r.Results.Failed(), r.Metadata) } + +// compat converts the JSON report to the latest format +func compat(r *types.Report) { + for i, res := range r.Results { + pkgs := make(map[string]ftypes.Package, len(res.Packages)) + for j, pkg := range res.Packages { + if pkg.Identifier.UID != "" { + continue + } + // Fill in the UID field since older JSON reports don't have it + pkg.Identifier.UID = dependency.UID(res.Target, pkg) + pkgs[pkg.ID+pkg.FilePath] = pkg + r.Results[i].Packages[j] = pkg + } + + for j, vuln := range res.Vulnerabilities { + if vuln.PkgIdentifier.UID != "" { + continue + } + if pkg, ok := pkgs[vuln.PkgID+vuln.PkgPath]; !ok { + continue + } else { + r.Results[i].Vulnerabilities[j].PkgIdentifier = pkg.Identifier + } + } + } +} diff --git a/pkg/commands/operation/operation.go b/pkg/commands/operation/operation.go index 84783ba073ab..63946710f1b2 100644 --- a/pkg/commands/operation/operation.go +++ b/pkg/commands/operation/operation.go @@ -2,114 +2,22 @@ package operation import ( "context" - "crypto/tls" - "crypto/x509" - "os" - "strings" "sync" - "github.com/go-redis/redis/v8" "github.com/google/go-containerregistry/pkg/name" - "github.com/google/wire" - "github.com/samber/lo" "golang.org/x/xerrors" "github.com/aquasecurity/trivy-db/pkg/metadata" "github.com/aquasecurity/trivy/pkg/db" - "github.com/aquasecurity/trivy/pkg/fanal/cache" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/flag" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/policy" "github.com/aquasecurity/trivy/pkg/types" - "github.com/aquasecurity/trivy/pkg/utils/fsutils" ) var mu sync.Mutex -// SuperSet binds cache dependencies -var SuperSet = wire.NewSet( - cache.NewFSCache, - wire.Bind(new(cache.LocalArtifactCache), new(cache.FSCache)), - NewCache, -) - -// Cache implements the local cache -type Cache struct { - cache.Cache -} - -// NewCache is the factory method for Cache -func NewCache(c flag.CacheOptions) (Cache, error) { - if strings.HasPrefix(c.CacheBackend, "redis://") { - log.Info("Redis cache", log.String("url", c.CacheBackendMasked())) - options, err := redis.ParseURL(c.CacheBackend) - if err != nil { - return Cache{}, err - } - - if !lo.IsEmpty(c.RedisOptions) { - caCert, cert, err := GetTLSConfig(c.RedisCACert, c.RedisCert, c.RedisKey) - if err != nil { - return Cache{}, err - } - - options.TLSConfig = &tls.Config{ - RootCAs: caCert, - Certificates: []tls.Certificate{cert}, - MinVersion: tls.VersionTLS12, - } - } else if c.RedisTLS { - options.TLSConfig = &tls.Config{ - MinVersion: tls.VersionTLS12, - } - } - - redisCache := cache.NewRedisCache(options, c.CacheTTL) - return Cache{Cache: redisCache}, nil - } - - if c.CacheTTL != 0 { - log.Warn("'--cache-ttl' is only available with Redis cache backend") - } - - // standalone mode - fsCache, err := cache.NewFSCache(fsutils.CacheDir()) - if err != nil { - return Cache{}, xerrors.Errorf("unable to initialize fs cache: %w", err) - } - return Cache{Cache: fsCache}, nil -} - -// Reset resets the cache -func (c Cache) Reset() (err error) { - if err := c.ClearDB(); err != nil { - return xerrors.Errorf("failed to clear the database: %w", err) - } - if err := c.ClearArtifacts(); err != nil { - return xerrors.Errorf("failed to clear the artifact cache: %w", err) - } - return nil -} - -// ClearDB clears the DB cache -func (c Cache) ClearDB() (err error) { - log.Info("Removing DB file...") - if err = os.RemoveAll(fsutils.CacheDir()); err != nil { - return xerrors.Errorf("failed to remove the directory (%s) : %w", fsutils.CacheDir(), err) - } - return nil -} - -// ClearArtifacts clears the artifact cache -func (c Cache) ClearArtifacts() error { - log.Info("Removing artifact caches...") - if err := c.Clear(); err != nil { - return xerrors.Errorf("failed to remove the cache: %w", err) - } - return nil -} - // DownloadDB downloads the DB func DownloadDB(ctx context.Context, appVersion, cacheDir string, dbRepository name.Reference, quiet, skipUpdate bool, opt ftypes.RegistryOptions) error { @@ -117,7 +25,7 @@ func DownloadDB(ctx context.Context, appVersion, cacheDir string, dbRepository n defer mu.Unlock() client := db.NewClient(cacheDir, quiet, db.WithDBRepository(dbRepository)) - needsUpdate, err := client.NeedsUpdate(appVersion, skipUpdate) + needsUpdate, err := client.NeedsUpdate(ctx, appVersion, skipUpdate) if err != nil { return xerrors.Errorf("database error: %w", err) } @@ -186,24 +94,6 @@ func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate return policyPaths, nil } -// GetTLSConfig gets tls config from CA, Cert and Key file -func GetTLSConfig(caCertPath, certPath, keyPath string) (*x509.CertPool, tls.Certificate, error) { - cert, err := tls.LoadX509KeyPair(certPath, keyPath) - if err != nil { - return nil, tls.Certificate{}, err - } - - caCert, err := os.ReadFile(caCertPath) - if err != nil { - return nil, tls.Certificate{}, err - } - - caCertPool := x509.NewCertPool() - caCertPool.AppendCertsFromPEM(caCert) - - return caCertPool, cert, nil -} - func Exit(opts flag.Options, failedResults bool, m types.Metadata) error { if opts.ExitOnEOL != 0 && m.OS != nil && m.OS.Eosl { log.Error("Detected EOL OS", log.String("family", string(m.OS.Family)), diff --git a/pkg/commands/server/run.go b/pkg/commands/server/run.go index 70788db6a6f3..c5f7b0da2f0b 100644 --- a/pkg/commands/server/run.go +++ b/pkg/commands/server/run.go @@ -6,12 +6,12 @@ import ( "golang.org/x/xerrors" "github.com/aquasecurity/trivy-db/pkg/db" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/commands/operation" "github.com/aquasecurity/trivy/pkg/flag" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/module" rpcServer "github.com/aquasecurity/trivy/pkg/rpc/server" - "github.com/aquasecurity/trivy/pkg/utils/fsutils" ) // Run runs the scan @@ -19,17 +19,11 @@ func Run(ctx context.Context, opts flag.Options) (err error) { log.InitLogger(opts.Debug, opts.Quiet) // configure cache dir - fsutils.SetCacheDir(opts.CacheDir) - cache, err := operation.NewCache(opts.CacheOptions) + cacheClient, cleanup, err := cache.New(opts.CacheOpts()) if err != nil { return xerrors.Errorf("server cache error: %w", err) } - defer cache.Close() - log.Debug("Cache", log.String("dir", fsutils.CacheDir())) - - if opts.Reset { - return cache.ClearDB() - } + defer cleanup() // download the database file if err = operation.DownloadDB(ctx, opts.AppVersion, opts.CacheDir, opts.DBRepository, @@ -57,5 +51,5 @@ func Run(ctx context.Context, opts flag.Options) (err error) { server := rpcServer.NewServer(opts.AppVersion, opts.Listen, opts.CacheDir, opts.Token, opts.TokenHeader, opts.DBRepository, opts.RegistryOpts()) - return server.ListenAndServe(ctx, cache, opts.SkipDBUpdate) + return server.ListenAndServe(ctx, cacheClient, opts.SkipDBUpdate) } diff --git a/pkg/compliance/spec/compliance.go b/pkg/compliance/spec/compliance.go index bc91b7f664fe..7b0b4f6cffdd 100644 --- a/pkg/compliance/spec/compliance.go +++ b/pkg/compliance/spec/compliance.go @@ -5,7 +5,7 @@ import ( "os" "strings" - "golang.org/x/exp/maps" + "github.com/samber/lo" "golang.org/x/xerrors" "gopkg.in/yaml.v3" @@ -39,7 +39,7 @@ func (cs *ComplianceSpec) Scanners() (types.Scanners, error) { scannerTypes[scannerType] = struct{}{} } } - return maps.Keys(scannerTypes), nil + return lo.Keys(scannerTypes), nil } // CheckIDs return list of compliance check IDs diff --git a/pkg/compliance/spec/mapper.go b/pkg/compliance/spec/mapper.go index 2efa488bff54..6fa5d2bbd45a 100644 --- a/pkg/compliance/spec/mapper.go +++ b/pkg/compliance/spec/mapper.go @@ -1,7 +1,7 @@ package spec import ( - "golang.org/x/exp/slices" + "slices" "github.com/aquasecurity/trivy/pkg/types" ) diff --git a/pkg/db/db.go b/pkg/db/db.go index 5ac539f203e8..e87277f93375 100644 --- a/pkg/db/db.go +++ b/pkg/db/db.go @@ -4,18 +4,20 @@ import ( "context" "errors" "fmt" + "os" "time" "github.com/google/go-containerregistry/pkg/name" "github.com/google/go-containerregistry/pkg/v1/remote/transport" "golang.org/x/xerrors" - "k8s.io/utils/clock" "github.com/aquasecurity/trivy-db/pkg/db" "github.com/aquasecurity/trivy-db/pkg/metadata" + "github.com/aquasecurity/trivy/pkg/clock" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/oci" + "github.com/aquasecurity/trivy/pkg/version/doc" ) const ( @@ -28,15 +30,8 @@ var ( defaultRepository, _ = name.NewTag(DefaultRepository) ) -// Operation defines the DB operations -type Operation interface { - NeedsUpdate(cliVersion string, skip bool) (need bool, err error) - Download(ctx context.Context, dst string, opt types.RegistryOptions) (err error) -} - type options struct { artifact *oci.Artifact - clock clock.Clock dbRepository name.Reference } @@ -57,13 +52,6 @@ func WithDBRepository(dbRepository name.Reference) Option { } } -// WithClock takes a clock -func WithClock(c clock.Clock) Option { - return func(opts *options) { - opts.clock = c - } -} - // Client implements DB operations type Client struct { *options @@ -76,7 +64,6 @@ type Client struct { // NewClient is the factory method for DB client func NewClient(cacheDir string, quiet bool, opts ...Option) *Client { o := &options{ - clock: clock.RealClock{}, dbRepository: defaultRepository, } @@ -93,7 +80,7 @@ func NewClient(cacheDir string, quiet bool, opts ...Option) *Client { } // NeedsUpdate check is DB needs update -func (c *Client) NeedsUpdate(cliVersion string, skip bool) (bool, error) { +func (c *Client) NeedsUpdate(ctx context.Context, cliVersion string, skip bool) (bool, error) { meta, err := c.metadata.Get() if err != nil { log.Debug("There is no valid metadata file", log.Err(err)) @@ -124,7 +111,7 @@ func (c *Client) NeedsUpdate(cliVersion string, skip bool) (bool, error) { return true, nil } - return !c.isNewDB(meta), nil + return !c.isNewDB(ctx, meta), nil } func (c *Client) validate(meta metadata.Metadata) error { @@ -136,13 +123,14 @@ func (c *Client) validate(meta metadata.Metadata) error { return nil } -func (c *Client) isNewDB(meta metadata.Metadata) bool { - if c.clock.Now().Before(meta.NextUpdate) { +func (c *Client) isNewDB(ctx context.Context, meta metadata.Metadata) bool { + now := clock.Now(ctx) + if now.Before(meta.NextUpdate) { log.Debug("DB update was skipped because the local DB is the latest") return true } - if c.clock.Now().Before(meta.DownloadedAt.Add(time.Hour)) { + if now.Before(meta.DownloadedAt.Add(time.Hour)) { log.Debug("DB update was skipped because the local DB was downloaded during the last hour") return true } @@ -165,13 +153,20 @@ func (c *Client) Download(ctx context.Context, dst string, opt types.RegistryOpt return xerrors.Errorf("database download error: %w", err) } - if err = c.updateDownloadedAt(dst); err != nil { + if err = c.updateDownloadedAt(ctx, dst); err != nil { return xerrors.Errorf("failed to update downloaded_at: %w", err) } return nil } -func (c *Client) updateDownloadedAt(dst string) error { +func (c *Client) Clear(ctx context.Context) error { + if err := os.RemoveAll(db.Dir(c.cacheDir)); err != nil { + return xerrors.Errorf("failed to remove vulnerability database: %w", err) + } + return nil +} + +func (c *Client) updateDownloadedAt(ctx context.Context, dst string) error { log.Debug("Updating database metadata...") // We have to initialize a metadata client here @@ -182,7 +177,7 @@ func (c *Client) updateDownloadedAt(dst string) error { return xerrors.Errorf("unable to get metadata: %w", err) } - meta.DownloadedAt = c.clock.Now().UTC() + meta.DownloadedAt = clock.Now(ctx).UTC() if err = client.Update(meta); err != nil { return xerrors.Errorf("failed to update metadata: %w", err) } @@ -202,7 +197,8 @@ func (c *Client) initOCIArtifact(opt types.RegistryOptions) (*oci.Artifact, erro for _, diagnostic := range terr.Errors { // For better user experience if diagnostic.Code == transport.DeniedErrorCode || diagnostic.Code == transport.UnauthorizedErrorCode { - log.Warn("See https://aquasecurity.github.io/trivy/latest/docs/references/troubleshooting/#db") + // e.g. https://aquasecurity.github.io/trivy/latest/docs/references/troubleshooting/#db + log.Warnf("See %s", doc.URL("/docs/references/troubleshooting/", "db")) break } } diff --git a/pkg/db/db_test.go b/pkg/db/db_test.go index e627a684a696..d7eca907fe32 100644 --- a/pkg/db/db_test.go +++ b/pkg/db/db_test.go @@ -6,39 +6,17 @@ import ( "testing" "time" - v1 "github.com/google/go-containerregistry/pkg/v1" - fakei "github.com/google/go-containerregistry/pkg/v1/fake" - "github.com/google/go-containerregistry/pkg/v1/tarball" - "github.com/google/go-containerregistry/pkg/v1/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "k8s.io/utils/clock" - clocktesting "k8s.io/utils/clock/testing" tdb "github.com/aquasecurity/trivy-db/pkg/db" "github.com/aquasecurity/trivy-db/pkg/metadata" + "github.com/aquasecurity/trivy/internal/dbtest" + "github.com/aquasecurity/trivy/pkg/clock" "github.com/aquasecurity/trivy/pkg/db" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" - "github.com/aquasecurity/trivy/pkg/oci" ) -const mediaType = "application/vnd.aquasec.trivy.db.layer.v1.tar+gzip" - -type fakeLayer struct { - v1.Layer -} - -func (f fakeLayer) MediaType() (types.MediaType, error) { - return mediaType, nil -} - -func newFakeLayer(t *testing.T, input string) v1.Layer { - layer, err := tarball.LayerFromFile(input) - require.NoError(t, err) - - return fakeLayer{layer} -} - func TestClient_NeedsUpdate(t *testing.T) { timeNextUpdateDay1 := time.Date(2019, 9, 1, 0, 0, 0, 0, time.UTC) timeNextUpdateDay2 := time.Date(2019, 10, 2, 0, 0, 0, 0, time.UTC) @@ -46,14 +24,12 @@ func TestClient_NeedsUpdate(t *testing.T) { tests := []struct { name string skip bool - clock clock.Clock metadata metadata.Metadata want bool wantErr string }{ { - name: "happy path", - clock: clocktesting.NewFakeClock(time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)), + name: "happy path", metadata: metadata.Metadata{ Version: tdb.SchemaVersion, NextUpdate: timeNextUpdateDay1, @@ -62,13 +38,11 @@ func TestClient_NeedsUpdate(t *testing.T) { }, { name: "happy path for first run", - clock: clocktesting.NewFakeClock(time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)), metadata: metadata.Metadata{}, want: true, }, { - name: "happy path with old schema version", - clock: clocktesting.NewFakeClock(time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)), + name: "happy path with old schema version", metadata: metadata.Metadata{ Version: 0, NextUpdate: timeNextUpdateDay1, @@ -76,8 +50,7 @@ func TestClient_NeedsUpdate(t *testing.T) { want: true, }, { - name: "happy path with --skip-update", - clock: clocktesting.NewFakeClock(time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)), + name: "happy path with --skip-update", metadata: metadata.Metadata{ Version: tdb.SchemaVersion, NextUpdate: timeNextUpdateDay1, @@ -86,8 +59,7 @@ func TestClient_NeedsUpdate(t *testing.T) { want: false, }, { - name: "skip downloading DB", - clock: clocktesting.NewFakeClock(time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)), + name: "skip downloading DB", metadata: metadata.Metadata{ Version: tdb.SchemaVersion, NextUpdate: timeNextUpdateDay2, @@ -95,8 +67,7 @@ func TestClient_NeedsUpdate(t *testing.T) { want: false, }, { - name: "newer schema version", - clock: clocktesting.NewFakeClock(time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)), + name: "newer schema version", metadata: metadata.Metadata{ Version: tdb.SchemaVersion + 1, NextUpdate: timeNextUpdateDay2, @@ -106,14 +77,12 @@ func TestClient_NeedsUpdate(t *testing.T) { }, { name: "--skip-update on the first run", - clock: clocktesting.NewFakeClock(time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)), metadata: metadata.Metadata{}, skip: true, wantErr: "--skip-update cannot be specified on the first run", }, { - name: "--skip-update with different schema version", - clock: clocktesting.NewFakeClock(time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)), + name: "--skip-update with different schema version", metadata: metadata.Metadata{ Version: 0, NextUpdate: timeNextUpdateDay1, @@ -123,8 +92,7 @@ func TestClient_NeedsUpdate(t *testing.T) { 0, tdb.SchemaVersion), }, { - name: "happy with old DownloadedAt", - clock: clocktesting.NewFakeClock(time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)), + name: "happy with old DownloadedAt", metadata: metadata.Metadata{ Version: tdb.SchemaVersion, NextUpdate: timeNextUpdateDay1, @@ -133,8 +101,7 @@ func TestClient_NeedsUpdate(t *testing.T) { want: true, }, { - name: "skip downloading DB with recent DownloadedAt", - clock: clocktesting.NewFakeClock(time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)), + name: "skip downloading DB with recent DownloadedAt", metadata: metadata.Metadata{ Version: tdb.SchemaVersion, NextUpdate: timeNextUpdateDay1, @@ -153,8 +120,11 @@ func TestClient_NeedsUpdate(t *testing.T) { require.NoError(t, err) } - client := db.NewClient(cacheDir, true, db.WithClock(tt.clock)) - needsUpdate, err := client.NeedsUpdate("test", tt.skip) + // Set a fake time + ctx := clock.With(context.Background(), time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)) + + client := db.NewClient(cacheDir, true) + needsUpdate, err := client.NeedsUpdate(ctx, "test", tt.skip) switch { case tt.wantErr != "": @@ -170,7 +140,6 @@ func TestClient_NeedsUpdate(t *testing.T) { } func TestClient_Download(t *testing.T) { - timeDownloadedAt := clocktesting.NewFakeClock(time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)) tests := []struct { name string @@ -197,39 +166,18 @@ func TestClient_Download(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - cacheDir := t.TempDir() + // Set a fake time + ctx := clock.With(context.Background(), time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC)) - // Mock image - img := new(fakei.FakeImage) - img.LayersReturns([]v1.Layer{newFakeLayer(t, tt.input)}, nil) - img.ManifestReturns(&v1.Manifest{ - Layers: []v1.Descriptor{ - { - MediaType: "application/vnd.aquasec.trivy.db.layer.v1.tar+gzip", - Size: 100, - Digest: v1.Hash{ - Algorithm: "sha256", - Hex: "aec482bc254b5dd025d3eaf5bb35997d3dba783e394e8f91d5a415963151bfb8", - }, - Annotations: map[string]string{ - "org.opencontainers.image.title": "db.tar.gz", - }, - }, - }, - }, nil) - - // Mock OCI artifact - opt := ftypes.RegistryOptions{ - Insecure: false, - } - art, err := oci.NewArtifact("db", true, opt, oci.WithImage(img)) - require.NoError(t, err) + // Fake DB + art := dbtest.NewFakeDB(t, tt.input, dbtest.FakeDBOptions{}) - client := db.NewClient(cacheDir, true, db.WithOCIArtifact(art), db.WithClock(timeDownloadedAt)) - err = client.Download(context.Background(), cacheDir, opt) + cacheDir := t.TempDir() + client := db.NewClient(cacheDir, true, db.WithOCIArtifact(art)) + err := client.Download(ctx, cacheDir, ftypes.RegistryOptions{}) if tt.wantErr != "" { require.Error(t, err) - assert.Contains(t, err.Error(), tt.wantErr) + assert.ErrorContains(t, err, tt.wantErr) return } require.NoError(t, err) diff --git a/pkg/db/mock_operation.go b/pkg/db/mock_operation.go deleted file mode 100644 index b5a879fb5afc..000000000000 --- a/pkg/db/mock_operation.go +++ /dev/null @@ -1,126 +0,0 @@ -// Code generated by mockery v1.0.0. DO NOT EDIT. - -package db - -import ( - context "context" - - mock "github.com/stretchr/testify/mock" - - "github.com/aquasecurity/trivy/pkg/fanal/types" -) - -// MockOperation is an autogenerated mock type for the Operation type -type MockOperation struct { - mock.Mock -} - -type OperationDownloadArgs struct { - Ctx context.Context - CtxAnything bool - Dst string - DstAnything bool -} - -type OperationDownloadReturns struct { - Err error -} - -type OperationDownloadExpectation struct { - Args OperationDownloadArgs - Returns OperationDownloadReturns -} - -func (_m *MockOperation) ApplyDownloadExpectation(e OperationDownloadExpectation) { - var args []interface{} - if e.Args.CtxAnything { - args = append(args, mock.Anything) - } else { - args = append(args, e.Args.Ctx) - } - if e.Args.DstAnything { - args = append(args, mock.Anything) - } else { - args = append(args, e.Args.Dst) - } - _m.On("Download", args...).Return(e.Returns.Err) -} - -func (_m *MockOperation) ApplyDownloadExpectations(expectations []OperationDownloadExpectation) { - for _, e := range expectations { - _m.ApplyDownloadExpectation(e) - } -} - -// Download provides a mock function with given fields: ctx, dst -func (_m *MockOperation) Download(ctx context.Context, dst string, opt types.RegistryOptions) error { - ret := _m.Called(ctx, dst, opt) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { - r0 = rf(ctx, dst) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -type OperationNeedsUpdateArgs struct { - CliVersion string - CliVersionAnything bool - Skip bool - SkipAnything bool -} - -type OperationNeedsUpdateReturns struct { - Need bool - Err error -} - -type OperationNeedsUpdateExpectation struct { - Args OperationNeedsUpdateArgs - Returns OperationNeedsUpdateReturns -} - -func (_m *MockOperation) ApplyNeedsUpdateExpectation(e OperationNeedsUpdateExpectation) { - var args []interface{} - if e.Args.CliVersionAnything { - args = append(args, mock.Anything) - } else { - args = append(args, e.Args.CliVersion) - } - if e.Args.SkipAnything { - args = append(args, mock.Anything) - } else { - args = append(args, e.Args.Skip) - } - _m.On("NeedsUpdate", args...).Return(e.Returns.Need, e.Returns.Err) -} - -func (_m *MockOperation) ApplyNeedsUpdateExpectations(expectations []OperationNeedsUpdateExpectation) { - for _, e := range expectations { - _m.ApplyNeedsUpdateExpectation(e) - } -} - -// NeedsUpdate provides a mock function with given fields: cliVersion, skip -func (_m *MockOperation) NeedsUpdate(cliVersion string, skip bool) (bool, error) { - ret := _m.Called(cliVersion, skip) - - var r0 bool - if rf, ok := ret.Get(0).(func(string, bool) bool); ok { - r0 = rf(cliVersion, skip) - } else { - r0 = ret.Get(0).(bool) - } - - var r1 error - if rf, ok := ret.Get(1).(func(string, bool) error); ok { - r1 = rf(cliVersion, skip) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} diff --git a/pkg/dependency/id.go b/pkg/dependency/id.go index d40289cedc6a..577ed5d0ac41 100644 --- a/pkg/dependency/id.go +++ b/pkg/dependency/id.go @@ -1,9 +1,13 @@ package dependency import ( + "fmt" "strings" + "github.com/mitchellh/hashstructure/v2" + "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" ) // ID returns a unique ID for the given library. @@ -25,8 +29,24 @@ func ID(ltype types.LangType, name, version string) string { if !strings.HasPrefix(version, "v") { version = "v" + version } - case types.Jar, types.Pom, types.Gradle: + case types.Jar, types.Pom, types.Gradle, types.Sbt: sep = ":" } return name + sep + version } + +// UID calculates the hash of the package for the unique ID +func UID(filePath string, pkg types.Package) string { + v := map[string]any{ + "filePath": filePath, // To differentiate the hash of the same package but different file path + "pkg": pkg, + } + hash, err := hashstructure.Hash(v, hashstructure.FormatV2, &hashstructure.HashOptions{ + ZeroNil: true, + IgnoreZeroValue: true, + }) + if err != nil { + log.Warn("Failed to calculate the package hash", log.String("pkg", pkg.Name), log.Err(err)) + } + return fmt.Sprintf("%x", hash) +} diff --git a/pkg/dependency/id_test.go b/pkg/dependency/id_test.go index 68e380e6c651..18359f771e7b 100644 --- a/pkg/dependency/id_test.go +++ b/pkg/dependency/id_test.go @@ -47,6 +47,15 @@ func TestID(t *testing.T) { }, want: "test:1.0.0", }, + { + name: "sbt", + args: args{ + ltype: types.Sbt, + name: "test", + version: "1.0.0", + }, + want: "test:1.0.0", + }, { name: "pip", args: args{ diff --git a/pkg/dependency/parser/c/conan/parse.go b/pkg/dependency/parser/c/conan/parse.go index 7661131ec8b9..14da9358fb8a 100644 --- a/pkg/dependency/parser/c/conan/parse.go +++ b/pkg/dependency/parser/c/conan/parse.go @@ -2,11 +2,11 @@ package conan import ( "io" + "slices" "strings" "github.com/liamg/jfather" "github.com/samber/lo" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency" diff --git a/pkg/dependency/parser/golang/mod/parse.go b/pkg/dependency/parser/golang/mod/parse.go index fa5116f19bfa..508da6911521 100644 --- a/pkg/dependency/parser/golang/mod/parse.go +++ b/pkg/dependency/parser/golang/mod/parse.go @@ -7,7 +7,6 @@ import ( "strings" "github.com/samber/lo" - "golang.org/x/exp/maps" "golang.org/x/mod/modfile" "golang.org/x/xerrors" @@ -148,7 +147,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc } } - return maps.Values(pkgs), nil, nil + return lo.Values(pkgs), nil, nil } // Check if the Go version is less than 1.17 diff --git a/pkg/dependency/parser/java/pom/artifact.go b/pkg/dependency/parser/java/pom/artifact.go index a99ff8569357..b2e97efb229b 100644 --- a/pkg/dependency/parser/java/pom/artifact.go +++ b/pkg/dependency/parser/java/pom/artifact.go @@ -4,10 +4,10 @@ import ( "fmt" "os" "regexp" + "slices" "strings" "github.com/samber/lo" - "golang.org/x/exp/slices" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" diff --git a/pkg/dependency/parser/java/pom/metadata.go b/pkg/dependency/parser/java/pom/metadata.go new file mode 100644 index 000000000000..0a35e9e4f556 --- /dev/null +++ b/pkg/dependency/parser/java/pom/metadata.go @@ -0,0 +1,17 @@ +package pom + +type Metadata struct { + GroupId string `xml:"groupId"` + ArtifactId string `xml:"artifactId"` + Versioning Versioning `xml:"versioning"` + Version string `xml:"version"` +} + +type Versioning struct { + SnapshotVersions []SnapshotVersion `xml:"snapshotVersions>snapshotVersion"` +} + +type SnapshotVersion struct { + Extension string `xml:"extension"` + Value string `xml:"value"` +} diff --git a/pkg/dependency/parser/java/pom/parse.go b/pkg/dependency/parser/java/pom/parse.go index bf8df2ad1c0a..e905196b6fff 100644 --- a/pkg/dependency/parser/java/pom/parse.go +++ b/pkg/dependency/parser/java/pom/parse.go @@ -9,6 +9,7 @@ import ( "os" "path" "path/filepath" + "slices" "sort" "strings" @@ -48,6 +49,12 @@ func WithReleaseRemoteRepos(repos []string) option { } } +func WithSnapshotRemoteRepos(repos []string) option { + return func(opts *options) { + opts.snapshotRemoteRepos = repos + } +} + type Parser struct { logger *log.Logger rootPath string @@ -648,7 +655,18 @@ func (p *Parser) fetchPOMFromRemoteRepositories(paths []string, snapshot bool) ( // try all remoteRepositories for _, repo := range remoteRepos { - fetched, err := p.fetchPOMFromRemoteRepository(repo, paths) + repoPaths := slices.Clone(paths) // Clone slice to avoid overwriting last element of `paths` + if snapshot { + pomFileName, err := p.fetchPomFileNameFromMavenMetadata(repo, repoPaths) + if err != nil { + return nil, xerrors.Errorf("fetch maven-metadata.xml error: %w", err) + } + // Use file name from `maven-metadata.xml` if it exists + if pomFileName != "" { + repoPaths[len(repoPaths)-1] = pomFileName + } + } + fetched, err := p.fetchPOMFromRemoteRepository(repo, repoPaths) if err != nil { return nil, xerrors.Errorf("fetch repository error: %w", err) } else if fetched == nil { @@ -659,7 +677,7 @@ func (p *Parser) fetchPOMFromRemoteRepositories(paths []string, snapshot bool) ( return nil, xerrors.Errorf("the POM was not found in remote remoteRepositories") } -func (p *Parser) fetchPOMFromRemoteRepository(repo string, paths []string) (*pom, error) { +func (p *Parser) remoteRepoRequest(repo string, paths []string) (*http.Request, error) { repoURL, err := url.Parse(repo) if err != nil { p.logger.Error("URL parse error", log.String("repo", repo)) @@ -670,7 +688,6 @@ func (p *Parser) fetchPOMFromRemoteRepository(repo string, paths []string) (*pom repoURL.Path = path.Join(paths...) logger := p.logger.With(log.String("host", repoURL.Host), log.String("path", repoURL.Path)) - client := &http.Client{} req, err := http.NewRequest("GET", repoURL.String(), http.NoBody) if err != nil { logger.Debug("HTTP request failed") @@ -681,9 +698,54 @@ func (p *Parser) fetchPOMFromRemoteRepository(repo string, paths []string) (*pom req.SetBasicAuth(repoURL.User.Username(), password) } + return req, nil +} + +// fetchPomFileNameFromMavenMetadata fetches `maven-metadata.xml` file to detect file name of pom file. +func (p *Parser) fetchPomFileNameFromMavenMetadata(repo string, paths []string) (string, error) { + // Overwrite pom file name to `maven-metadata.xml` + mavenMetadataPaths := slices.Clone(paths[:len(paths)-1]) // Clone slice to avoid shadow overwriting last element of `paths` + mavenMetadataPaths = append(mavenMetadataPaths, "maven-metadata.xml") + + req, err := p.remoteRepoRequest(repo, mavenMetadataPaths) + if err != nil { + return "", xerrors.Errorf("unable to create request for maven-metadata.xml file") + } + + client := &http.Client{} resp, err := client.Do(req) if err != nil || resp.StatusCode != http.StatusOK { - logger.Debug("Failed to fetch") + p.logger.Debug("Failed to fetch", log.String("url", req.URL.String())) + return "", nil + } + defer resp.Body.Close() + + mavenMetadata, err := parseMavenMetadata(resp.Body) + if err != nil { + return "", xerrors.Errorf("failed to parse maven-metadata.xml file: %w", err) + } + + var pomFileName string + for _, sv := range mavenMetadata.Versioning.SnapshotVersions { + if sv.Extension == "pom" { + // mavenMetadataPaths[len(mavenMetadataPaths)-3] is always artifactID + pomFileName = fmt.Sprintf("%s-%s.pom", mavenMetadataPaths[len(mavenMetadataPaths)-3], sv.Value) + } + } + + return pomFileName, nil +} + +func (p *Parser) fetchPOMFromRemoteRepository(repo string, paths []string) (*pom, error) { + req, err := p.remoteRepoRequest(repo, paths) + if err != nil { + return nil, xerrors.Errorf("unable to create request for pom file") + } + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil || resp.StatusCode != http.StatusOK { + p.logger.Debug("Failed to fetch", log.String("url", req.URL.String())) return nil, nil } defer resp.Body.Close() @@ -709,6 +771,16 @@ func parsePom(r io.Reader) (*pomXML, error) { return parsed, nil } +func parseMavenMetadata(r io.Reader) (*Metadata, error) { + parsed := &Metadata{} + decoder := xml.NewDecoder(r) + decoder.CharsetReader = charset.NewReaderLabel + if err := decoder.Decode(parsed); err != nil { + return nil, xerrors.Errorf("xml decode error: %w", err) + } + return parsed, nil +} + func packageID(name, version string) string { return dependency.ID(ftypes.Pom, name, version) } diff --git a/pkg/dependency/parser/java/pom/parse_test.go b/pkg/dependency/parser/java/pom/parse_test.go index 1207f32adcf7..15740d599eb9 100644 --- a/pkg/dependency/parser/java/pom/parse_test.go +++ b/pkg/dependency/parser/java/pom/parse_test.go @@ -143,6 +143,13 @@ func TestPom_Parse(t *testing.T) { }, }, }, + { + ID: "org.example:example-api:2.0.0", + Name: "org.example:example-api", + Version: "2.0.0", + Licenses: []string{"The Apache Software License, Version 2.0"}, + Relationship: ftypes.RelationshipIndirect, + }, }, wantDeps: []ftypes.Dependency{ { @@ -151,6 +158,58 @@ func TestPom_Parse(t *testing.T) { "org.example:example-dependency:1.2.3-SNAPSHOT", }, }, + { + ID: "org.example:example-dependency:1.2.3-SNAPSHOT", + DependsOn: []string{ + "org.example:example-api:2.0.0", + }, + }, + }, + }, + { + name: "snapshot repository with maven-metadata.xml", + inputFile: filepath.Join("testdata", "snapshot", "with-maven-metadata", "pom.xml"), + local: false, + want: []ftypes.Package{ + { + ID: "com.example:happy:1.0.0", + Name: "com.example:happy", + Version: "1.0.0", + Relationship: ftypes.RelationshipRoot, + }, + { + ID: "org.example:example-dependency:2.17.0-SNAPSHOT", + Name: "org.example:example-dependency", + Version: "2.17.0-SNAPSHOT", + Relationship: ftypes.RelationshipDirect, + Locations: ftypes.Locations{ + { + StartLine: 14, + EndLine: 18, + }, + }, + }, + { + ID: "org.example:example-api:2.0.0", + Name: "org.example:example-api", + Version: "2.0.0", + Licenses: []string{"The Apache Software License, Version 2.0"}, + Relationship: ftypes.RelationshipIndirect, + }, + }, + wantDeps: []ftypes.Dependency{ + { + ID: "com.example:happy:1.0.0", + DependsOn: []string{ + "org.example:example-dependency:2.17.0-SNAPSHOT", + }, + }, + { + ID: "org.example:example-dependency:2.17.0-SNAPSHOT", + DependsOn: []string{ + "org.example:example-api:2.0.0", + }, + }, }, }, { @@ -1404,7 +1463,7 @@ func TestPom_Parse(t *testing.T) { remoteRepos = []string{ts.URL} } - p := pom.NewParser(tt.inputFile, pom.WithReleaseRemoteRepos(remoteRepos), pom.WithOffline(tt.offline)) + p := pom.NewParser(tt.inputFile, pom.WithReleaseRemoteRepos(remoteRepos), pom.WithSnapshotRemoteRepos(remoteRepos), pom.WithOffline(tt.offline)) gotPkgs, gotDeps, err := p.Parse(f) if tt.wantErr != "" { diff --git a/pkg/dependency/parser/java/pom/testdata/repository/org/example/example-dependency/1.2.3-SNAPSHOT/example-dependency-1.2.3.pom b/pkg/dependency/parser/java/pom/testdata/repository/org/example/example-dependency/1.2.3-SNAPSHOT/example-dependency-1.2.3-SNAPSHOT.pom similarity index 100% rename from pkg/dependency/parser/java/pom/testdata/repository/org/example/example-dependency/1.2.3-SNAPSHOT/example-dependency-1.2.3.pom rename to pkg/dependency/parser/java/pom/testdata/repository/org/example/example-dependency/1.2.3-SNAPSHOT/example-dependency-1.2.3-SNAPSHOT.pom diff --git a/pkg/dependency/parser/java/pom/testdata/repository/org/example/example-dependency/2.17.0-SNAPSHOT/example-dependency-2.17.0-20240312.035235-10.pom b/pkg/dependency/parser/java/pom/testdata/repository/org/example/example-dependency/2.17.0-SNAPSHOT/example-dependency-2.17.0-20240312.035235-10.pom new file mode 100644 index 000000000000..0ecec117f873 --- /dev/null +++ b/pkg/dependency/parser/java/pom/testdata/repository/org/example/example-dependency/2.17.0-SNAPSHOT/example-dependency-2.17.0-20240312.035235-10.pom @@ -0,0 +1,23 @@ + + + + 4.0.0 + + org.example + example-dependency + 2.17.0-SNAPSHOT + + jar + Example API Dependency + The example API + + + + org.example + example-api + 2.0.0 + + + + \ No newline at end of file diff --git a/pkg/dependency/parser/java/pom/testdata/repository/org/example/example-dependency/2.17.0-SNAPSHOT/maven-metadata.xml b/pkg/dependency/parser/java/pom/testdata/repository/org/example/example-dependency/2.17.0-SNAPSHOT/maven-metadata.xml new file mode 100644 index 000000000000..258de9db99c5 --- /dev/null +++ b/pkg/dependency/parser/java/pom/testdata/repository/org/example/example-dependency/2.17.0-SNAPSHOT/maven-metadata.xml @@ -0,0 +1,35 @@ + + org.example + example-dependency + + 20240312035235 + + 20240312.035235 + 10 + + + + sources + jar + 2.17.0-20240312.035235-10 + 20240312035235 + + + module + 2.17.0-20240312.035235-10 + 20240312035235 + + + jar + 2.17.0-20240312.035235-10 + 20240312035235 + + + pom + 2.17.0-20240312.035235-10 + 20240312035235 + + + + 2.17.0-SNAPSHOT + \ No newline at end of file diff --git a/pkg/dependency/parser/java/pom/testdata/snapshot/with-maven-metadata/pom.xml b/pkg/dependency/parser/java/pom/testdata/snapshot/with-maven-metadata/pom.xml new file mode 100644 index 000000000000..7e4c2144d417 --- /dev/null +++ b/pkg/dependency/parser/java/pom/testdata/snapshot/with-maven-metadata/pom.xml @@ -0,0 +1,20 @@ + + 4.0.0 + + com.example + happy + 1.0.0 + + happy + Example + + + + + org.example + example-dependency + 2.17.0-SNAPSHOT + + + diff --git a/pkg/dependency/parser/julia/manifest/parse.go b/pkg/dependency/parser/julia/manifest/parse.go index 13d1cb208bcb..aa0112a17e67 100644 --- a/pkg/dependency/parser/julia/manifest/parse.go +++ b/pkg/dependency/parser/julia/manifest/parse.go @@ -5,7 +5,7 @@ import ( "sort" "github.com/BurntSushi/toml" - "golang.org/x/exp/maps" + "github.com/samber/lo" "golang.org/x/xerrors" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" @@ -156,7 +156,7 @@ func decodeDependency(man *primitiveManifest, dep primitiveDependency, metadata var possibleDepsMap map[string]string err = metadata.PrimitiveDecode(dep.Dependencies, &possibleDepsMap) if err == nil { - possibleUuids := maps.Values(possibleDepsMap) + possibleUuids := lo.Values(possibleDepsMap) sort.Strings(possibleUuids) dep.DependsOn = possibleUuids return dep, nil diff --git a/pkg/dependency/parser/nodejs/npm/parse.go b/pkg/dependency/parser/nodejs/npm/parse.go index 6e99cdd1bfcb..05ce6301ff09 100644 --- a/pkg/dependency/parser/nodejs/npm/parse.go +++ b/pkg/dependency/parser/nodejs/npm/parse.go @@ -3,6 +3,7 @@ package npm import ( "fmt" "io" + "maps" "path" "slices" "sort" @@ -10,7 +11,6 @@ import ( "github.com/liamg/jfather" "github.com/samber/lo" - "golang.org/x/exp/maps" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency" @@ -186,7 +186,7 @@ func (p *Parser) parseV2(packages map[string]Package) ([]ftypes.Package, []ftype } - return maps.Values(pkgs), deps + return lo.Values(pkgs), deps } // for local package npm uses links. e.g.: diff --git a/pkg/dependency/parser/nodejs/pnpm/parse.go b/pkg/dependency/parser/nodejs/pnpm/parse.go index bbe6c5a57aab..8ccf9de0ae9a 100644 --- a/pkg/dependency/parser/nodejs/pnpm/parse.go +++ b/pkg/dependency/parser/nodejs/pnpm/parse.go @@ -7,7 +7,6 @@ import ( "strings" "github.com/samber/lo" - "golang.org/x/exp/maps" "golang.org/x/xerrors" "gopkg.in/yaml.v3" @@ -216,7 +215,7 @@ func (p *Parser) parseV9(lockFile LockFile) ([]ftypes.Package, []ftypes.Dependen } } - return maps.Values(resolvedPkgs), maps.Values(resolvedDeps) + return lo.Values(resolvedPkgs), lo.Values(resolvedDeps) } // markRootPkgs sets `Dev` to false for non dev dependency. diff --git a/pkg/dependency/parser/php/composer/parse.go b/pkg/dependency/parser/php/composer/parse.go index c95901686ba0..af99ceaf8be2 100644 --- a/pkg/dependency/parser/php/composer/parse.go +++ b/pkg/dependency/parser/php/composer/parse.go @@ -6,7 +6,7 @@ import ( "strings" "github.com/liamg/jfather" - "golang.org/x/exp/maps" + "github.com/samber/lo" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency" @@ -98,7 +98,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc }) } - pkgSlice := maps.Values(pkgs) + pkgSlice := lo.Values(pkgs) sort.Sort(ftypes.Packages(pkgSlice)) sort.Sort(deps) diff --git a/pkg/dependency/parser/ruby/bundler/parse.go b/pkg/dependency/parser/ruby/bundler/parse.go index 89f3a9ab4ab8..12dff78bc7c0 100644 --- a/pkg/dependency/parser/ruby/bundler/parse.go +++ b/pkg/dependency/parser/ruby/bundler/parse.go @@ -5,7 +5,7 @@ import ( "sort" "strings" - "golang.org/x/exp/maps" + "github.com/samber/lo" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency" @@ -103,7 +103,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc return nil, nil, xerrors.Errorf("scan error: %w", err) } - pkgSlice := maps.Values(pkgs) + pkgSlice := lo.Values(pkgs) sort.Sort(ftypes.Packages(pkgSlice)) return pkgSlice, deps, nil } diff --git a/pkg/dependency/parser/sbt/lockfile/parse.go b/pkg/dependency/parser/sbt/lockfile/parse.go new file mode 100644 index 000000000000..3b5b1865903d --- /dev/null +++ b/pkg/dependency/parser/sbt/lockfile/parse.go @@ -0,0 +1,84 @@ +package lockfile + +import ( + "io" + "slices" + "sort" + + "github.com/liamg/jfather" + "golang.org/x/xerrors" + + "github.com/aquasecurity/trivy/pkg/dependency" + ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" + xio "github.com/aquasecurity/trivy/pkg/x/io" +) + +// lockfile format defined at: https://stringbean.github.io/sbt-dependency-lock/file-formats/version-1.html +type sbtLockfile struct { + Version int `json:"lockVersion"` + Dependencies []sbtLockfileDependency `json:"dependencies"` +} + +type sbtLockfileDependency struct { + Organization string `json:"org"` + Name string `json:"name"` + Version string `json:"version"` + Configurations []string `json:"configurations"` + StartLine int + EndLine int +} + +type Parser struct{} + +func NewParser() *Parser { + return &Parser{} +} + +func (Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency, error) { + var lockfile sbtLockfile + input, err := io.ReadAll(r) + + if err != nil { + return nil, nil, xerrors.Errorf("failed to read sbt lockfile: %w", err) + } + if err := jfather.Unmarshal(input, &lockfile); err != nil { + return nil, nil, xerrors.Errorf("JSON decoding failed: %w", err) + } + + var libraries ftypes.Packages + + for _, dep := range lockfile.Dependencies { + if slices.ContainsFunc(dep.Configurations, isIncludedConfig) { + name := dep.Organization + ":" + dep.Name + libraries = append(libraries, ftypes.Package{ + ID: dependency.ID(ftypes.Sbt, name, dep.Version), + Name: name, + Version: dep.Version, + Locations: []ftypes.Location{ + { + StartLine: dep.StartLine, + EndLine: dep.EndLine, + }, + }, + }) + } + } + + sort.Sort(libraries) + return libraries, nil, nil +} + +// UnmarshalJSONWithMetadata needed to detect start and end lines of deps +func (t *sbtLockfileDependency) UnmarshalJSONWithMetadata(node jfather.Node) error { + if err := node.Decode(&t); err != nil { + return err + } + // Decode func will overwrite line numbers if we save them first + t.StartLine = node.Range().Start.Line + t.EndLine = node.Range().End.Line + return nil +} + +func isIncludedConfig(config string) bool { + return config == "compile" || config == "runtime" +} diff --git a/pkg/dependency/parser/sbt/lockfile/parse_test.go b/pkg/dependency/parser/sbt/lockfile/parse_test.go new file mode 100644 index 000000000000..a11e7b8aedb2 --- /dev/null +++ b/pkg/dependency/parser/sbt/lockfile/parse_test.go @@ -0,0 +1,77 @@ +package lockfile + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" +) + +func TestParser_Parse(t *testing.T) { + tests := []struct { + name string + inputFile string + want []ftypes.Package + }{ + { + name: "v1 happy path", + inputFile: "testdata/v1_happy.sbt.lock", + want: []ftypes.Package{ + { + ID: "org.apache.commons:commons-lang3:3.9", + Name: "org.apache.commons:commons-lang3", + Version: "3.9", + Locations: []ftypes.Location{ + { + StartLine: 10, + EndLine: 25, + }, + }, + }, + { + ID: "org.scala-lang:scala-library:2.12.10", + Name: "org.scala-lang:scala-library", + Version: "2.12.10", + Locations: []ftypes.Location{ + { + StartLine: 26, + EndLine: 41, + }, + }, + }, + { + ID: "org.typelevel:cats-core_2.12:2.9.0", + Name: "org.typelevel:cats-core_2.12", + Version: "2.9.0", + Locations: []ftypes.Location{ + { + StartLine: 42, + EndLine: 57, + }, + }, + }, + }, + }, + { + name: "empty", + inputFile: "testdata/empty.sbt.lock", + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + parser := NewParser() + f, err := os.Open(tt.inputFile) + require.NoError(t, err) + + libs, _, err := parser.Parse(f) + require.NoError(t, err) + + assert.Equal(t, tt.want, libs) + }) + } +} diff --git a/pkg/dependency/parser/sbt/lockfile/testdata/empty.sbt.lock b/pkg/dependency/parser/sbt/lockfile/testdata/empty.sbt.lock new file mode 100644 index 000000000000..6125547882da --- /dev/null +++ b/pkg/dependency/parser/sbt/lockfile/testdata/empty.sbt.lock @@ -0,0 +1,10 @@ +{ + "lockVersion": 1, + "timestamp": "2024-06-05T13:41:10.992Z", + "configurations": [ + "compile", + "runtime", + "test" + ], + "dependencies": [] +} \ No newline at end of file diff --git a/pkg/dependency/parser/sbt/lockfile/testdata/v1_happy.sbt.lock b/pkg/dependency/parser/sbt/lockfile/testdata/v1_happy.sbt.lock new file mode 100644 index 000000000000..0dcba8ba09c6 --- /dev/null +++ b/pkg/dependency/parser/sbt/lockfile/testdata/v1_happy.sbt.lock @@ -0,0 +1,73 @@ +{ + "lockVersion": 1, + "timestamp": "2024-06-05T13:41:10.992Z", + "configurations": [ + "compile", + "runtime", + "test" + ], + "dependencies": [ + { + "org": "org.apache.commons", + "name": "commons-lang3", + "version": "3.9", + "artifacts": [ + { + "name": "commons-lang3.jar", + "hash": "sha1:0122c7cee69b53ed4a7681c03d4ee4c0e2765da5" + } + ], + "configurations": [ + "test", + "compile", + "runtime" + ] + }, + { + "org": "org.scala-lang", + "name": "scala-library", + "version": "2.12.10", + "artifacts": [ + { + "name": "scala-library.jar", + "hash": "sha1:3509860bc2e5b3da001ed45aca94ffbe5694dbda" + } + ], + "configurations": [ + "test", + "compile", + "runtime" + ] + }, + { + "org" : "org.typelevel", + "name" : "cats-core_2.12", + "version" : "2.9.0", + "artifacts" : [ + { + "name" : "cats-core_2.12.jar", + "hash" : "sha1:844f21541d1809008586fbc1172dc02c96476639" + } + ], + "configurations" : [ + "compile", + "runtime", + "test" + ] + }, + { + "org" : "org.scalatest", + "name" : "scalatest-core_2.13", + "version" : "3.2.15", + "artifacts" : [ + { + "name" : "scalatest-core_2.13.jar", + "hash" : "sha1:231d1f4049a9fa4bd65c17b806a58180b9f4abe1" + } + ], + "configurations" : [ + "test" + ] + } + ] +} \ No newline at end of file diff --git a/pkg/dependency/parser/swift/cocoapods/parse.go b/pkg/dependency/parser/swift/cocoapods/parse.go index 27438c86e17f..2d946d417447 100644 --- a/pkg/dependency/parser/swift/cocoapods/parse.go +++ b/pkg/dependency/parser/swift/cocoapods/parse.go @@ -4,7 +4,7 @@ import ( "sort" "strings" - "golang.org/x/exp/maps" + "github.com/samber/lo" "golang.org/x/xerrors" "gopkg.in/yaml.v3" @@ -86,7 +86,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc } sort.Sort(deps) - return utils.UniquePackages(maps.Values(parsedDeps)), deps, nil + return utils.UniquePackages(lo.Values(parsedDeps)), deps, nil } func parseDep(dep string) (ftypes.Package, error) { diff --git a/pkg/dependency/parser/utils/utils.go b/pkg/dependency/parser/utils/utils.go index f22e994a7cb0..ce2aff36976b 100644 --- a/pkg/dependency/parser/utils/utils.go +++ b/pkg/dependency/parser/utils/utils.go @@ -2,9 +2,10 @@ package utils import ( "fmt" + "maps" "sort" - "golang.org/x/exp/maps" + "github.com/samber/lo" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" ) @@ -48,7 +49,7 @@ func UniquePackages(pkgs []ftypes.Package) []ftypes.Package { } } } - pkgSlice := maps.Values(unique) + pkgSlice := lo.Values(unique) sort.Sort(ftypes.Packages(pkgSlice)) return pkgSlice diff --git a/pkg/detector/library/driver.go b/pkg/detector/library/driver.go index f78932b13442..6990d3c7e84d 100644 --- a/pkg/detector/library/driver.go +++ b/pkg/detector/library/driver.go @@ -33,13 +33,13 @@ func NewDriver(libType ftypes.LangType) (Driver, bool) { case ftypes.RustBinary, ftypes.Cargo: ecosystem = vulnerability.Cargo comparer = compare.GenericComparer{} - case ftypes.Composer: + case ftypes.Composer, ftypes.ComposerVendor: ecosystem = vulnerability.Composer comparer = compare.GenericComparer{} case ftypes.GoBinary, ftypes.GoModule: ecosystem = vulnerability.Go comparer = compare.GenericComparer{} - case ftypes.Jar, ftypes.Pom, ftypes.Gradle: + case ftypes.Jar, ftypes.Pom, ftypes.Gradle, ftypes.Sbt: ecosystem = vulnerability.Maven comparer = maven.Comparer{} case ftypes.Npm, ftypes.Yarn, ftypes.Pnpm, ftypes.NodePkg, ftypes.JavaScript: diff --git a/pkg/detector/library/driver_test.go b/pkg/detector/library/driver_test.go index e6722e841e8e..10c3ad304f29 100644 --- a/pkg/detector/library/driver_test.go +++ b/pkg/detector/library/driver_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" - "github.com/aquasecurity/trivy/pkg/dbtest" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/detector/library" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/detector/ospkg/alma/alma_test.go b/pkg/detector/ospkg/alma/alma_test.go index 9c5f3023563a..c736f4b12c74 100644 --- a/pkg/detector/ospkg/alma/alma_test.go +++ b/pkg/detector/ospkg/alma/alma_test.go @@ -11,8 +11,8 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/alma" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/detector/ospkg/alpine/alpine_test.go b/pkg/detector/ospkg/alpine/alpine_test.go index 4ea5d59024f8..9dad87c4b50d 100644 --- a/pkg/detector/ospkg/alpine/alpine_test.go +++ b/pkg/detector/ospkg/alpine/alpine_test.go @@ -12,8 +12,8 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/alpine" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/detector/ospkg/amazon/amazon_test.go b/pkg/detector/ospkg/amazon/amazon_test.go index 3ac7b55d3ad0..98adad931959 100644 --- a/pkg/detector/ospkg/amazon/amazon_test.go +++ b/pkg/detector/ospkg/amazon/amazon_test.go @@ -11,8 +11,8 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/amazon" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/detector/ospkg/chainguard/chainguard_test.go b/pkg/detector/ospkg/chainguard/chainguard_test.go index af6e81afcc84..0c49077aee3e 100644 --- a/pkg/detector/ospkg/chainguard/chainguard_test.go +++ b/pkg/detector/ospkg/chainguard/chainguard_test.go @@ -10,7 +10,7 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" - "github.com/aquasecurity/trivy/pkg/dbtest" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/chainguard" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/detector/ospkg/debian/debian_test.go b/pkg/detector/ospkg/debian/debian_test.go index fa0e334eff33..790067c293ff 100644 --- a/pkg/detector/ospkg/debian/debian_test.go +++ b/pkg/detector/ospkg/debian/debian_test.go @@ -12,8 +12,8 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/debian" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/detector/ospkg/mariner/mariner_test.go b/pkg/detector/ospkg/mariner/mariner_test.go index 7c7410f28234..6e1ee9a37583 100644 --- a/pkg/detector/ospkg/mariner/mariner_test.go +++ b/pkg/detector/ospkg/mariner/mariner_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" - "github.com/aquasecurity/trivy/pkg/dbtest" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/mariner" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/detector/ospkg/oracle/oracle_test.go b/pkg/detector/ospkg/oracle/oracle_test.go index 7e5e9b6a7f7b..6fdc73a90e6a 100644 --- a/pkg/detector/ospkg/oracle/oracle_test.go +++ b/pkg/detector/ospkg/oracle/oracle_test.go @@ -11,8 +11,8 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/dbtest" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" ) diff --git a/pkg/detector/ospkg/photon/photon_test.go b/pkg/detector/ospkg/photon/photon_test.go index ffa978adc612..8d68ea08680e 100644 --- a/pkg/detector/ospkg/photon/photon_test.go +++ b/pkg/detector/ospkg/photon/photon_test.go @@ -11,8 +11,8 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/photon" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/detector/ospkg/redhat/redhat.go b/pkg/detector/ospkg/redhat/redhat.go index 277fa6203424..d8d9e0052920 100644 --- a/pkg/detector/ospkg/redhat/redhat.go +++ b/pkg/detector/ospkg/redhat/redhat.go @@ -3,13 +3,13 @@ package redhat import ( "context" "fmt" + "slices" "sort" "strings" "time" version "github.com/knqyf263/go-rpm-version" - "golang.org/x/exp/maps" - "golang.org/x/exp/slices" + "github.com/samber/lo" "golang.org/x/xerrors" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" @@ -176,7 +176,7 @@ func (s *Scanner) detect(osVer string, pkg ftypes.Package) ([]types.DetectedVuln } } - vulns := maps.Values(uniqVulns) + vulns := lo.Values(uniqVulns) sort.Slice(vulns, func(i, j int) bool { return vulns[i].VulnerabilityID < vulns[j].VulnerabilityID }) diff --git a/pkg/detector/ospkg/redhat/redhat_test.go b/pkg/detector/ospkg/redhat/redhat_test.go index 17a6a8768554..a1a62ac7666e 100644 --- a/pkg/detector/ospkg/redhat/redhat_test.go +++ b/pkg/detector/ospkg/redhat/redhat_test.go @@ -11,8 +11,8 @@ import ( dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/redhat" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" diff --git a/pkg/detector/ospkg/rocky/rocky_test.go b/pkg/detector/ospkg/rocky/rocky_test.go index e61c69076cce..ce5ad893bfc2 100644 --- a/pkg/detector/ospkg/rocky/rocky_test.go +++ b/pkg/detector/ospkg/rocky/rocky_test.go @@ -11,8 +11,8 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/rocky" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/detector/ospkg/suse/suse.go b/pkg/detector/ospkg/suse/suse.go index a5ccade5c813..eb2fed82cda0 100644 --- a/pkg/detector/ospkg/suse/suse.go +++ b/pkg/detector/ospkg/suse/suse.go @@ -39,9 +39,10 @@ var ( "15.2": time.Date(2021, 12, 31, 23, 59, 59, 0, time.UTC), "15.3": time.Date(2022, 12, 31, 23, 59, 59, 0, time.UTC), "15.4": time.Date(2023, 12, 31, 23, 59, 59, 0, time.UTC), - "15.5": time.Date(2028, 12, 31, 23, 59, 59, 0, time.UTC), + "15.5": time.Date(2024, 12, 31, 23, 59, 59, 0, time.UTC), + "15.6": time.Date(2031, 7, 31, 23, 59, 59, 0, time.UTC), // 6 months after SLES 15 SP7 release - // "15.6": time.Date(2028, 12, 31, 23, 59, 59, 0, time.UTC), + // "15.7": time.Date(2031, 7, 31, 23, 59, 59, 0, time.UTC), } opensuseEolDates = map[string]time.Time{ @@ -55,6 +56,7 @@ var ( "15.3": time.Date(2022, 11, 30, 23, 59, 59, 0, time.UTC), "15.4": time.Date(2023, 11, 30, 23, 59, 59, 0, time.UTC), "15.5": time.Date(2024, 12, 31, 23, 59, 59, 0, time.UTC), + "15.6": time.Date(2025, 12, 31, 23, 59, 59, 0, time.UTC), } ) diff --git a/pkg/detector/ospkg/suse/suse_test.go b/pkg/detector/ospkg/suse/suse_test.go index 663e502e717c..011fc3332b6a 100644 --- a/pkg/detector/ospkg/suse/suse_test.go +++ b/pkg/detector/ospkg/suse/suse_test.go @@ -11,8 +11,8 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/suse" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/detector/ospkg/ubuntu/ubuntu_test.go b/pkg/detector/ospkg/ubuntu/ubuntu_test.go index 750d49d4bd5f..044ea69b1056 100644 --- a/pkg/detector/ospkg/ubuntu/ubuntu_test.go +++ b/pkg/detector/ospkg/ubuntu/ubuntu_test.go @@ -12,8 +12,8 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/clock" - "github.com/aquasecurity/trivy/pkg/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/ubuntu" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/detector/ospkg/wolfi/wolfi_test.go b/pkg/detector/ospkg/wolfi/wolfi_test.go index 8df2c2ab0870..019d6627dc7c 100644 --- a/pkg/detector/ospkg/wolfi/wolfi_test.go +++ b/pkg/detector/ospkg/wolfi/wolfi_test.go @@ -10,7 +10,7 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" - "github.com/aquasecurity/trivy/pkg/dbtest" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/detector/ospkg/wolfi" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/downloader/download.go b/pkg/downloader/download.go index 0c9388248a6d..7190d3d3d0a3 100644 --- a/pkg/downloader/download.go +++ b/pkg/downloader/download.go @@ -2,16 +2,16 @@ package downloader import ( "context" + "maps" "os" getter "github.com/hashicorp/go-getter" - "golang.org/x/exp/maps" "golang.org/x/xerrors" ) // DownloadToTempDir downloads the configured source to a temp dir. -func DownloadToTempDir(ctx context.Context, url string) (string, error) { - tempDir, err := os.MkdirTemp("", "trivy-plugin") +func DownloadToTempDir(ctx context.Context, url string, insecure bool) (string, error) { + tempDir, err := os.MkdirTemp("", "trivy-download") if err != nil { return "", xerrors.Errorf("failed to create a temp dir: %w", err) } @@ -21,7 +21,7 @@ func DownloadToTempDir(ctx context.Context, url string) (string, error) { return "", xerrors.Errorf("unable to get the current dir: %w", err) } - if err = Download(ctx, url, tempDir, pwd); err != nil { + if err = Download(ctx, url, tempDir, pwd, insecure); err != nil { return "", xerrors.Errorf("download error: %w", err) } @@ -29,11 +29,14 @@ func DownloadToTempDir(ctx context.Context, url string) (string, error) { } // Download downloads the configured source to the destination. -func Download(ctx context.Context, src, dst, pwd string) error { +func Download(ctx context.Context, src, dst, pwd string, insecure bool) error { // go-getter doesn't allow the dst directory already exists if the src is directory. _ = os.RemoveAll(dst) var opts []getter.ClientOption + if insecure { + opts = append(opts, getter.WithInsecure()) + } // Clone the global map so that it will not be accessed concurrently. getters := maps.Clone(getter.Getters) @@ -41,6 +44,14 @@ func Download(ctx context.Context, src, dst, pwd string) error { // Overwrite the file getter so that a file will be copied getters["file"] = &getter.FileGetter{Copy: true} + // Since "httpGetter" is a global pointer and the state is shared, + // once it is executed without "WithInsecure()", + // it cannot enable WithInsecure() afterwards because its state is preserved. + // cf. https://github.com/hashicorp/go-getter/blob/5a63fd9c0d5b8da8a6805e8c283f46f0dacb30b3/get.go#L63-L65 + httpGetter := &getter.HttpGetter{Netrc: true} + getters["http"] = httpGetter + getters["https"] = httpGetter + // Build the client client := &getter.Client{ Ctx: ctx, diff --git a/pkg/downloader/downloader_test.go b/pkg/downloader/downloader_test.go new file mode 100644 index 000000000000..80e7ef530310 --- /dev/null +++ b/pkg/downloader/downloader_test.go @@ -0,0 +1,61 @@ +package downloader_test + +import ( + "context" + "net/http" + "net/http/httptest" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/pkg/downloader" +) + +func TestDownload(t *testing.T) { + // Set up a test server with a self-signed certificate + server := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + _, err := w.Write([]byte("test content")) + require.NoError(t, err) + })) + defer server.Close() + + tests := []struct { + name string + insecure bool + wantErr bool + }{ + { + "Secure (should fail)", + false, + true, + }, + { + "Insecure (should succeed)", + true, + false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Set up the destination path + dst := t.TempDir() + + // Execute the download + err := downloader.Download(context.Background(), server.URL, dst, "", tt.insecure) + + if tt.wantErr { + assert.Error(t, err) + return + } + require.NoError(t, err) + + // Check the content of the downloaded file + content, err := os.ReadFile(dst) + require.NoError(t, err) + assert.Equal(t, "test content", string(content)) + }) + } +} diff --git a/pkg/fanal/analyzer/all/import.go b/pkg/fanal/analyzer/all/import.go index a5b0d05298a1..1849bcebf682 100644 --- a/pkg/fanal/analyzer/all/import.go +++ b/pkg/fanal/analyzer/all/import.go @@ -20,6 +20,7 @@ import ( _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/gradle" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/jar" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/pom" + _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/sbt" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/julia/pkg" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/nodejs/npm" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/nodejs/pkg" diff --git a/pkg/fanal/analyzer/analyzer.go b/pkg/fanal/analyzer/analyzer.go index 56bb518f1b73..d6defb45aae5 100644 --- a/pkg/fanal/analyzer/analyzer.go +++ b/pkg/fanal/analyzer/analyzer.go @@ -6,12 +6,12 @@ import ( "io/fs" "os" "regexp" + "slices" "sort" "strings" "sync" "github.com/samber/lo" - "golang.org/x/exp/slices" "golang.org/x/sync/semaphore" "golang.org/x/xerrors" diff --git a/pkg/fanal/analyzer/config_analyzer.go b/pkg/fanal/analyzer/config_analyzer.go index c5682694a5c3..cddff6412e6a 100644 --- a/pkg/fanal/analyzer/config_analyzer.go +++ b/pkg/fanal/analyzer/config_analyzer.go @@ -2,9 +2,9 @@ package analyzer import ( "context" + "slices" v1 "github.com/google/go-containerregistry/pkg/v1" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/types" diff --git a/pkg/fanal/analyzer/const.go b/pkg/fanal/analyzer/const.go index 99ac3cf4bee5..6e9d0332eb61 100644 --- a/pkg/fanal/analyzer/const.go +++ b/pkg/fanal/analyzer/const.go @@ -49,12 +49,14 @@ const ( TypeCargo Type = "cargo" // PHP - TypeComposer Type = "composer" + TypeComposer Type = "composer" + TypeComposerVendor Type = "composer-vendor" // Java TypeJar Type = "jar" TypePom Type = "pom" TypeGradleLock Type = "gradle-lockfile" + TypeSbtLock Type = "sbt-lockfile" // Node.js TypeNpmPkgLock Type = "npm" @@ -173,6 +175,7 @@ var ( TypeJar, TypePom, TypeGradleLock, + TypeSbtLock, TypeNpmPkgLock, TypeNodePkg, TypeYarn, @@ -210,11 +213,13 @@ var ( TypePom, TypeConanLock, TypeGradleLock, + TypeSbtLock, TypeCocoaPods, TypeSwift, TypePubSpecLock, TypeMixLock, TypeCondaEnv, + TypeComposer, } // TypeIndividualPkgs has all analyzers for individual packages @@ -226,6 +231,7 @@ var ( TypeGoBinary, TypeJar, TypeRustBinary, + TypeComposerVendor, } // TypeConfigFiles has all config file analyzers diff --git a/pkg/fanal/analyzer/imgconf/apk/apk.go b/pkg/fanal/analyzer/imgconf/apk/apk.go index 430dc766d161..794eb9797e5d 100644 --- a/pkg/fanal/analyzer/imgconf/apk/apk.go +++ b/pkg/fanal/analyzer/imgconf/apk/apk.go @@ -13,7 +13,7 @@ import ( "time" v1 "github.com/google/go-containerregistry/pkg/v1" - "golang.org/x/exp/maps" + "github.com/samber/lo" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" @@ -138,7 +138,7 @@ func (a alpineCmdAnalyzer) parseConfig(apkIndexArchive *apkIndex, config *v1.Con } } - return maps.Values(uniqPkgs) + return lo.Values(uniqPkgs) } func (a alpineCmdAnalyzer) parseCommand(command string, envs map[string]string) (pkgs []string) { diff --git a/pkg/fanal/analyzer/language/c/conan/conan.go b/pkg/fanal/analyzer/language/c/conan/conan.go index 50252c2bb603..a32591dae7fe 100644 --- a/pkg/fanal/analyzer/language/c/conan/conan.go +++ b/pkg/fanal/analyzer/language/c/conan/conan.go @@ -11,6 +11,7 @@ import ( "sort" "strings" + "github.com/samber/lo" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency/parser/c/conan" @@ -44,7 +45,8 @@ func newConanLockAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, er func (a conanLockAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysisInput) (*analyzer.AnalysisResult, error) { required := func(filePath string, d fs.DirEntry) bool { - return a.Required(filePath, nil) + // we need all file got from `a.Required` function (conan.lock files) and from file-patterns. + return true } licenses, err := licensesFromCache() @@ -85,19 +87,13 @@ func (a conanLockAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAna } func licensesFromCache() (map[string]string, error) { - required := func(filePath string, d fs.DirEntry) bool { - return filepath.Base(filePath) == "conanfile.py" - } - - // cf. https://docs.conan.io/1/mastering/custom_cache.html - cacheDir := os.Getenv("CONAN_USER_HOME") - if cacheDir == "" { - cacheDir, _ = os.UserHomeDir() + cacheDir, err := detectCacheDir() + if err != nil { + return nil, err } - cacheDir = path.Join(cacheDir, ".conan", "data") - if !fsutils.DirExists(cacheDir) { - return nil, xerrors.Errorf("the Conan cache directory (%s) was not found.", cacheDir) + required := func(filePath string, d fs.DirEntry) bool { + return filepath.Base(filePath) == "conanfile.py" } licenses := make(map[string]string) @@ -154,6 +150,36 @@ func detectAttribute(attributeName, line string) string { return "" } +func detectCacheDir() (string, error) { + home, _ := os.UserHomeDir() + dirs := []string{ + // conan v2 uses `CONAN_HOME` env + // cf. https://docs.conan.io/2/reference/environment.html#conan-home + // `.conan2` dir is omitted for this env + lo.Ternary(os.Getenv("CONAN_HOME") != "", path.Join(os.Getenv("CONAN_HOME"), "p"), ""), + // conan v1 uses `CONAN_USER_HOME` env + // cf. https://docs.conan.io/en/1.64/reference/env_vars.html#conan-user-home + // `.conan` dir is used for this env + lo.Ternary(os.Getenv("CONAN_USER_HOME") != "", path.Join(os.Getenv("CONAN_USER_HOME"), ".conan", "data"), ""), + // `/.conan2` is default directory for conan v2 + // cf. https://docs.conan.io/2/reference/environment.html#conan-home + path.Join(home, ".conan2", "p"), + // `/.conan` is default directory for conan v1 + // cf. https://docs.conan.io/1/mastering/custom_cache.html + path.Join(home, ".conan", "data"), + } + + for _, dir := range dirs { + if dir != "" { + if fsutils.DirExists(dir) { + return dir, nil + } + } + } + + return "", xerrors.Errorf("the Conan cache directory was not found.") +} + func (a conanLockAnalyzer) Required(filePath string, _ os.FileInfo) bool { // Lock file name can be anything // cf. https://docs.conan.io/1/versioning/lockfiles/introduction.html#locking-dependencies diff --git a/pkg/fanal/analyzer/language/c/conan/conan_test.go b/pkg/fanal/analyzer/language/c/conan/conan_test.go index bfc2054ebe66..7f33af4df669 100644 --- a/pkg/fanal/analyzer/language/c/conan/conan_test.go +++ b/pkg/fanal/analyzer/language/c/conan/conan_test.go @@ -16,11 +16,11 @@ func Test_conanLockAnalyzer_Analyze(t *testing.T) { tests := []struct { name string dir string - cacheDir string + cacheDir map[string]string want *analyzer.AnalysisResult }{ { - name: "happy path", + name: "happy path V1", dir: "testdata/happy", want: &analyzer.AnalysisResult{ Applications: []types.Application{ @@ -62,9 +62,11 @@ func Test_conanLockAnalyzer_Analyze(t *testing.T) { }, }, { - name: "happy path with cache dir", - dir: "testdata/happy", - cacheDir: "testdata/cacheDir", + name: "happy path V1 with cache dir", + dir: "testdata/happy", + cacheDir: map[string]string{ + "CONAN_USER_HOME": "testdata/cacheDir", + }, want: &analyzer.AnalysisResult{ Applications: []types.Application{ { @@ -110,6 +112,92 @@ func Test_conanLockAnalyzer_Analyze(t *testing.T) { }, }, }, + { + name: "happy path V2", + dir: "testdata/happy_v2", + want: &analyzer.AnalysisResult{ + Applications: []types.Application{ + { + Type: types.Conan, + FilePath: "release.lock", + Packages: types.Packages{ + { + ID: "openssl/3.2.2", + Name: "openssl", + Version: "3.2.2", + Relationship: types.RelationshipUnknown, + Locations: []types.Location{ + { + StartLine: 5, + EndLine: 5, + }, + }, + }, + { + ID: "zlib/1.3.1", + Name: "zlib", + Version: "1.3.1", + Relationship: types.RelationshipUnknown, + Locations: []types.Location{ + { + StartLine: 4, + EndLine: 4, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "happy path V2 with cache dir", + dir: "testdata/happy_v2", + cacheDir: map[string]string{ + "CONAN_HOME": "testdata/cacheDir_v2", + }, + want: &analyzer.AnalysisResult{ + Applications: []types.Application{ + { + Type: types.Conan, + FilePath: "release.lock", + Packages: types.Packages{ + + { + ID: "openssl/3.2.2", + Name: "openssl", + Version: "3.2.2", + Relationship: types.RelationshipUnknown, + Locations: []types.Location{ + { + StartLine: 5, + EndLine: 5, + }, + }, + Licenses: []string{ + "Apache-2.0", + }, + }, + { + ID: "zlib/1.3.1", + Name: "zlib", + Version: "1.3.1", + Relationship: types.RelationshipUnknown, + Locations: []types.Location{ + { + StartLine: 4, + EndLine: 4, + }, + }, + Licenses: []string{ + "Zlib", + }, + }, + }, + }, + }, + }, + }, { name: "empty file", dir: "testdata/empty", @@ -119,8 +207,11 @@ func Test_conanLockAnalyzer_Analyze(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - if tt.cacheDir != "" { - t.Setenv("CONAN_USER_HOME", tt.cacheDir) + if len(tt.cacheDir) > 0 { + for env, path := range tt.cacheDir { + t.Setenv(env, path) + break + } } a, err := newConanLockAnalyzer(analyzer.AnalyzerOptions{}) require.NoError(t, err) diff --git a/pkg/fanal/analyzer/language/c/conan/testdata/cacheDir_v2/p/opens464b5c427ce9d/e/conanfile.py b/pkg/fanal/analyzer/language/c/conan/testdata/cacheDir_v2/p/opens464b5c427ce9d/e/conanfile.py new file mode 100644 index 000000000000..ba393a6668fe --- /dev/null +++ b/pkg/fanal/analyzer/language/c/conan/testdata/cacheDir_v2/p/opens464b5c427ce9d/e/conanfile.py @@ -0,0 +1,675 @@ +from conan import ConanFile +from conan.errors import ConanInvalidConfiguration +from conan.tools.apple import fix_apple_shared_install_name, is_apple_os, XCRun +from conan.tools.build import build_jobs +from conan.tools.files import chdir, copy, get, rename, replace_in_file, rmdir, save +from conan.tools.gnu import AutotoolsToolchain +from conan.tools.layout import basic_layout +from conan.tools.microsoft import is_msvc, msvc_runtime_flag, unix_path + +import fnmatch +import os +import textwrap + +required_conan_version = ">=1.57.0" + + +class OpenSSLConan(ConanFile): + name="openssl" + settings = "os", "arch", "compiler", "build_type" + url = "https://github.com/conan-io/conan-center-index" + homepage = "https://github.com/openssl/openssl" + license="Apache-2.0" + topics = ("ssl", "tls", "encryption", "security") + description = "A toolkit for the Transport Layer Security (TLS) and Secure Sockets Layer (SSL) protocols" + options = { + "shared": [True, False], + "fPIC": [True, False], + "enable_weak_ssl_ciphers": [True, False], + "386": [True, False], + "capieng_dialog": [True, False], + "enable_capieng": [True, False], + "no_aria": [True, False], + "no_asm": [True, False], + "no_async": [True, False], + "no_blake2": [True, False], + "no_bf": [True, False], + "no_camellia": [True, False], + "no_chacha": [True, False], + "no_cms": [True, False], + "no_comp": [True, False], + "no_ct": [True, False], + "no_cast": [True, False], + "no_deprecated": [True, False], + "no_des": [True, False], + "no_dgram": [True, False], + "no_dh": [True, False], + "no_dsa": [True, False], + "no_dso": [True, False], + "no_ec": [True, False], + "no_ecdh": [True, False], + "no_ecdsa": [True, False], + "no_engine": [True, False], + "no_filenames": [True, False], + "no_fips": [True, False], + "no_gost": [True, False], + "no_idea": [True, False], + "no_legacy": [True, False], + "no_md2": [True, False], + "no_md4": [True, False], + "no_mdc2": [True, False], + "no_module": [True, False], + "no_ocsp": [True, False], + "no_pinshared": [True, False], + "no_rc2": [True, False], + "no_rc4": [True, False], + "no_rc5": [True, False], + "no_rfc3779": [True, False], + "no_rmd160": [True, False], + "no_sm2": [True, False], + "no_sm3": [True, False], + "no_sm4": [True, False], + "no_srp": [True, False], + "no_srtp": [True, False], + "no_sse2": [True, False], + "no_ssl": [True, False], + "no_stdio": [True, False], + "no_seed": [True, False], + "no_sock": [True, False], + "no_ssl3": [True, False], + "no_threads": [True, False], + "no_tls1": [True, False], + "no_ts": [True, False], + "no_whirlpool": [True, False], + "no_zlib": [True, False], + "openssldir": [None, "ANY"], + } + default_options = {key: False for key in options.keys()} + default_options["fPIC"] = True + default_options["no_md2"] = True + default_options["openssldir"] = None + + @property + def _settings_build(self): + return getattr(self, "settings_build", self.settings) + + def config_options(self): + if self.settings.os != "Windows": + self.options.rm_safe("capieng_dialog") + self.options.rm_safe("enable_capieng") + else: + self.options.rm_safe("fPIC") + + if self.settings.os == "Emscripten": + self.options.no_asm = True + self.options.no_threads = True + self.options.no_stdio = True + + def configure(self): + if self.options.shared: + self.options.rm_safe("fPIC") + self.settings.rm_safe("compiler.libcxx") + self.settings.rm_safe("compiler.cppstd") + + def requirements(self): + if not self.options.no_zlib: + self.requires("zlib/1.2.13") + + def build_requirements(self): + if self._settings_build.os == "Windows": + if not self.options.no_asm: + self.tool_requires("nasm/2.15.05") + if self._use_nmake: + self.tool_requires("strawberryperl/5.32.1.1") + else: + self.win_bash = True + if not self.conf.get("tools.microsoft.bash:path", check_type=str): + self.tool_requires("msys2/cci.latest") + + def validate(self): + if self.settings.os == "Emscripten": + if not all((self.options.no_asm, self.options.no_threads, self.options.no_stdio)): + raise ConanInvalidConfiguration("os=Emscripten requires openssl:{no_asm,no_threads,no_stdio}=True") + + if self.settings.os == "iOS" and self.options.shared: + raise ConanInvalidConfiguration("OpenSSL 3 does not support building shared libraries for iOS") + + def layout(self): + basic_layout(self, src_folder="src") + + @property + def _is_clangcl(self): + return self.settings.compiler == "clang" and self.settings.os == "Windows" + + @property + def _is_mingw(self): + return self.settings.os == "Windows" and self.settings.compiler == "gcc" + + @property + def _use_nmake(self): + return self._is_clangcl or is_msvc(self) + + def source(self): + get(self, **self.conan_data["sources"][self.version], + destination=self.source_folder, strip_root=True) + + @property + def _target(self): + target = f"conan-{self.settings.build_type}-{self.settings.os}-{self.settings.arch}-{self.settings.compiler}-{self.settings.compiler.version}" + if self._use_nmake: + target = f"VC-{target}" # VC- prefix is important as it's checked by Configure + if self._is_mingw: + target = f"mingw-{target}" + return target + + @property + def _perlasm_scheme(self): + # right now, we need to tweak this for iOS & Android only, as they inherit from generic targets + if self.settings.os in ("iOS", "watchOS", "tvOS"): + return { + "armv7": "ios32", + "armv7s": "ios32", + "armv8": "ios64", + "armv8_32": "ios64", + "armv8.3": "ios64", + "armv7k": "ios32", + }.get(str(self.settings.arch), None) + elif self.settings.os == "Android": + return { + "armv7": "void", + "armv8": "linux64", + "mips": "o32", + "mips64": "64", + "x86": "android", + "x86_64": "elf", + }.get(str(self.settings.arch), None) + return None + + @property + def _asm_target(self): + if self.settings.os in ("Android", "iOS", "watchOS", "tvOS"): + return { + "x86": "x86_asm" if self.settings.os == "Android" else None, + "x86_64": "x86_64_asm" if self.settings.os == "Android" else None, + "armv5el": "armv4_asm", + "armv5hf": "armv4_asm", + "armv6": "armv4_asm", + "armv7": "armv4_asm", + "armv7hf": "armv4_asm", + "armv7s": "armv4_asm", + "armv7k": "armv4_asm", + "armv8": "aarch64_asm", + "armv8_32": "aarch64_asm", + "armv8.3": "aarch64_asm", + "mips": "mips32_asm", + "mips64": "mips64_asm", + "sparc": "sparcv8_asm", + "sparcv9": "sparcv9_asm", + "ia64": "ia64_asm", + "ppc32be": "ppc32_asm", + "ppc32": "ppc32_asm", + "ppc64le": "ppc64_asm", + "ppc64": "ppc64_asm", + "s390": "s390x_asm", + "s390x": "s390x_asm" + }.get(str(self.settings.os), None) + + @property + def _targets(self): + is_cygwin = self.settings.get_safe("os.subsystem") == "cygwin" + return { + "Linux-x86-clang": "linux-x86-clang", + "Linux-x86_64-clang": "linux-x86_64-clang", + "Linux-x86-*": "linux-x86", + "Linux-x86_64-*": "linux-x86_64", + "Linux-armv4-*": "linux-armv4", + "Linux-armv4i-*": "linux-armv4", + "Linux-armv5el-*": "linux-armv4", + "Linux-armv5hf-*": "linux-armv4", + "Linux-armv6-*": "linux-armv4", + "Linux-armv7-*": "linux-armv4", + "Linux-armv7hf-*": "linux-armv4", + "Linux-armv7s-*": "linux-armv4", + "Linux-armv7k-*": "linux-armv4", + "Linux-armv8-*": "linux-aarch64", + "Linux-armv8.3-*": "linux-aarch64", + "Linux-armv8-32-*": "linux-arm64ilp32", + "Linux-mips-*": "linux-mips32", + "Linux-mips64-*": "linux-mips64", + "Linux-ppc32-*": "linux-ppc32", + "Linux-ppc32le-*": "linux-pcc32", + "Linux-ppc32be-*": "linux-ppc32", + "Linux-ppc64-*": "linux-ppc64", + "Linux-ppc64le-*": "linux-ppc64le", + "Linux-pcc64be-*": "linux-pcc64", + "Linux-s390x-*": "linux64-s390x", + "Linux-e2k-*": "linux-generic64", + "Linux-sparc-*": "linux-sparcv8", + "Linux-sparcv9-*": "linux64-sparcv9", + "Linux-*-*": "linux-generic32", + "Macos-x86-*": "darwin-i386-cc", + "Macos-x86_64-*": "darwin64-x86_64-cc", + "Macos-ppc32-*": "darwin-ppc-cc", + "Macos-ppc32be-*": "darwin-ppc-cc", + "Macos-ppc64-*": "darwin64-ppc-cc", + "Macos-ppc64be-*": "darwin64-ppc-cc", + "Macos-armv8-*": "darwin64-arm64-cc", + "Macos-*-*": "darwin-common", + "iOS-x86_64-*": "darwin64-x86_64-cc", + "iOS-*-*": "iphoneos-cross", + "watchOS-*-*": "iphoneos-cross", + "tvOS-*-*": "iphoneos-cross", + # Android targets are very broken, see https://github.com/openssl/openssl/issues/7398 + "Android-armv7-*": "linux-generic32", + "Android-armv7hf-*": "linux-generic32", + "Android-armv8-*": "linux-generic64", + "Android-x86-*": "linux-x86-clang", + "Android-x86_64-*": "linux-x86_64-clang", + "Android-mips-*": "linux-generic32", + "Android-mips64-*": "linux-generic64", + "Android-*-*": "linux-generic32", + "Windows-x86-gcc": "Cygwin-x86" if is_cygwin else "mingw", + "Windows-x86_64-gcc": "Cygwin-x86_64" if is_cygwin else "mingw64", + "Windows-*-gcc": "Cygwin-common" if is_cygwin else "mingw-common", + "Windows-ia64-Visual Studio": "VC-WIN64I", # Itanium + "Windows-x86-Visual Studio": "VC-WIN32", + "Windows-x86_64-Visual Studio": "VC-WIN64A", + "Windows-armv7-Visual Studio": "VC-WIN32-ARM", + "Windows-armv8-Visual Studio": "VC-WIN64-ARM", + "Windows-*-Visual Studio": "VC-noCE-common", + "Windows-ia64-clang": "VC-WIN64I", # Itanium + "Windows-x86-clang": "VC-WIN32", + "Windows-x86_64-clang": "VC-WIN64A", + "Windows-armv7-clang": "VC-WIN32-ARM", + "Windows-armv8-clang": "VC-WIN64-ARM", + "Windows-*-clang": "VC-noCE-common", + "WindowsStore-x86-*": "VC-WIN32-UWP", + "WindowsStore-x86_64-*": "VC-WIN64A-UWP", + "WindowsStore-armv7-*": "VC-WIN32-ARM-UWP", + "WindowsStore-armv8-*": "VC-WIN64-ARM-UWP", + "WindowsStore-*-*": "VC-WIN32-ONECORE", + "WindowsCE-*-*": "VC-CE", + "SunOS-x86-gcc": "solaris-x86-gcc", + "SunOS-x86_64-gcc": "solaris64-x86_64-gcc", + "SunOS-sparc-gcc": "solaris-sparcv8-gcc", + "SunOS-sparcv9-gcc": "solaris64-sparcv9-gcc", + "SunOS-x86-suncc": "solaris-x86-cc", + "SunOS-x86_64-suncc": "solaris64-x86_64-cc", + "SunOS-sparc-suncc": "solaris-sparcv8-cc", + "SunOS-sparcv9-suncc": "solaris64-sparcv9-cc", + "SunOS-*-*": "solaris-common", + "*BSD-x86-*": "BSD-x86", + "*BSD-x86_64-*": "BSD-x86_64", + "*BSD-ia64-*": "BSD-ia64", + "*BSD-sparc-*": "BSD-sparcv8", + "*BSD-sparcv9-*": "BSD-sparcv9", + "*BSD-armv8-*": "BSD-generic64", + "*BSD-mips64-*": "BSD-generic64", + "*BSD-ppc64-*": "BSD-generic64", + "*BSD-ppc64le-*": "BSD-generic64", + "*BSD-ppc64be-*": "BSD-generic64", + "AIX-ppc32-gcc": "aix-gcc", + "AIX-ppc64-gcc": "aix64-gcc", + "AIX-pcc32-*": "aix-cc", + "AIX-ppc64-*": "aix64-cc", + "AIX-*-*": "aix-common", + "*BSD-*-*": "BSD-generic32", + "Emscripten-*-*": "cc", + "Neutrino-*-*": "BASE_unix", + } + + @property + def _ancestor_target(self): + if "CONAN_OPENSSL_CONFIGURATION" in os.environ: + return os.environ["CONAN_OPENSSL_CONFIGURATION"] + compiler = "Visual Studio" if self.settings.compiler == "msvc" else self.settings.compiler + query = f"{self.settings.os}-{self.settings.arch}-{compiler}" + ancestor = next((self._targets[i] for i in self._targets if fnmatch.fnmatch(query, i)), None) + if not ancestor: + raise ConanInvalidConfiguration( + f"Unsupported configuration ({self.settings.os}/{self.settings.arch}/{self.settings.compiler}).\n" + f"Please open an issue at {self.url}.\n" + f"Alternatively, set the CONAN_OPENSSL_CONFIGURATION environment variable into your conan profile." + ) + return ancestor + + def _get_default_openssl_dir(self): + if self.settings.os == "Linux": + return "/etc/ssl" + return os.path.join(self.package_folder, "res") + + @property + def _configure_args(self): + openssldir = self.options.openssldir or self._get_default_openssl_dir() + openssldir = unix_path(self, openssldir) if self.win_bash else openssldir + args = [ + '"%s"' % (self._target), + "shared" if self.options.shared else "no-shared", + "--prefix=/", + "--libdir=lib", + "--openssldir=\"%s\"" % openssldir, + "no-unit-test", + "no-threads" if self.options.no_threads else "threads", + "PERL=%s" % self._perl, + "no-tests", + "--debug" if self.settings.build_type == "Debug" else "--release", + ] + + if self.settings.os == "Android": + args.append(" -D__ANDROID_API__=%s" % str(self.settings.os.api_level)) # see NOTES.ANDROID + if self.settings.os == "Emscripten": + args.append("-D__STDC_NO_ATOMICS__=1") + if self.settings.os == "Windows": + if self.options.enable_capieng: + args.append("enable-capieng") + if self.options.capieng_dialog: + args.append("-DOPENSSL_CAPIENG_DIALOG=1") + else: + args.append("-fPIC" if self.options.get_safe("fPIC", True) else "no-pic") + + args.append("no-fips" if self.options.get_safe("no_fips", True) else "enable-fips") + args.append("no-md2" if self.options.get_safe("no_md2", True) else "enable-md2") + + if self.settings.os == "Neutrino": + args.append("no-asm -lsocket -latomic") + + if not self.options.no_zlib: + zlib_info = self.dependencies["zlib"].cpp_info.aggregated_components() + include_path = zlib_info.includedirs[0] + if self.settings.os == "Windows": + lib_path = "%s/%s.lib" % (zlib_info.libdirs[0], zlib_info.libs[0]) + else: + # Just path, linux will find the right file + lib_path = zlib_info.libdirs[0] + if self._settings_build.os == "Windows": + # clang-cl doesn't like backslashes in #define CFLAGS (builldinf.h -> cversion.c) + include_path = include_path.replace("\\", "/") + lib_path = lib_path.replace("\\", "/") + + if self.dependencies["zlib"].options.shared: + args.append("zlib-dynamic") + else: + args.append("zlib") + + args.extend([ + '--with-zlib-include="%s"' % include_path, + '--with-zlib-lib="%s"' % lib_path + ]) + + for option_name in self.default_options.keys(): + if self.options.get_safe(option_name, False) and option_name not in ("shared", "fPIC", "openssldir", "capieng_dialog", "enable_capieng", "zlib", "no_fips", "no_md2"): + self.output.info(f"Activated option: {option_name}") + args.append(option_name.replace("_", "-")) + return args + + def generate(self): + tc = AutotoolsToolchain(self) + env = tc.environment() + env.define_path("PERL", self._perl) + if self.settings.compiler == "apple-clang": + xcrun = XCRun(self) + env.define_path("CROSS_SDK", os.path.basename(xcrun.sdk_path)) + env.define_path("CROSS_TOP", os.path.dirname(os.path.dirname(xcrun.sdk_path))) + + self._create_targets(tc.cflags, tc.cxxflags, tc.defines, tc.ldflags) + tc.generate(env) + + def _create_targets(self, cflags, cxxflags, defines, ldflags): + config_template = textwrap.dedent("""\ + {targets} = ( + "{target}" => {{ + inherit_from => {ancestor}, + cflags => add("{cflags}"), + cxxflags => add("{cxxflags}"), + {defines} + lflags => add("{lflags}"), + {shared_target} + {shared_cflag} + {shared_extension} + {perlasm_scheme} + }}, + ); + """) + + perlasm_scheme = "" + if self._perlasm_scheme: + perlasm_scheme = 'perlasm_scheme => "%s",' % self._perlasm_scheme + + defines = " ".join(defines) + defines = 'defines => add("%s"),' % defines if defines else "" + targets = "my %targets" + includes = "" + if self.settings.os == "Windows": + includes = includes.replace("\\", "/") # OpenSSL doesn't like backslashes + + if self._asm_target: + ancestor = '[ "%s", asm("%s") ]' % (self._ancestor_target, self._asm_target) + else: + ancestor = '[ "%s" ]' % self._ancestor_target + shared_cflag = "" + shared_extension = "" + shared_target = "" + if self.settings.os == "Neutrino": + if self.options.shared: + shared_extension = 'shared_extension => ".so.\$(SHLIB_VERSION_NUMBER)",' + shared_target = 'shared_target => "gnu-shared",' + if self.options.get_safe("fPIC", True): + shared_cflag = 'shared_cflag => "-fPIC",' + + if self.settings.os in ["iOS", "tvOS", "watchOS"] and self.conf.get("tools.apple:enable_bitcode", check_type=bool): + cflags.append("-fembed-bitcode") + cxxflags.append("-fembed-bitcode") + + config = config_template.format( + targets=targets, + target=self._target, + ancestor=ancestor, + cflags=" ".join(cflags), + cxxflags=" ".join(cxxflags), + defines=defines, + perlasm_scheme=perlasm_scheme, + shared_target=shared_target, + shared_extension=shared_extension, + shared_cflag=shared_cflag, + lflags=" ".join(ldflags) + ) + self.output.info("using target: %s -> %s" % (self._target, self._ancestor_target)) + self.output.info(config) + + save(self, os.path.join(self.source_folder, "Configurations", "20-conan.conf"), config) + + def _run_make(self, targets=None, parallel=True, install=False): + command = [self._make_program] + if install: + command.append(f"DESTDIR={self.package_folder}") + if targets: + command.extend(targets) + if not self._use_nmake: + command.append(("-j%s" % build_jobs(self)) if parallel else "-j1") + self.run(" ".join(command), env="conanbuild") + + @property + def _perl(self): + if self._use_nmake: + return self.dependencies.build["strawberryperl"].conf_info.get("user.strawberryperl:perl", check_type=str) + return "perl" + + def _make(self): + with chdir(self, self.source_folder): + # workaround for clang-cl not producing .pdb files + if self._is_clangcl: + save(self, "ossl_static.pdb", "") + args = " ".join(self._configure_args) + + if self._use_nmake: + self._replace_runtime_in_file(os.path.join("Configurations", "10-main.conf")) + + self.run("{perl} ./Configure {args}".format(perl=self._perl, args=args), env="conanbuild") + if self._use_nmake: + # When `--prefix=/`, the scripts derive `\` without escaping, which + # causes issues on Windows + replace_in_file(self, "Makefile", "INSTALLTOP_dir=\\", "INSTALLTOP_dir=\\\\") + self._run_make() + + def _make_install(self): + with chdir(self, self.source_folder): + self._run_make(targets=["install_sw"], parallel=False, install=True) + + def build(self): + self._make() + self.run(f"{self._perl} {self.source_folder}/configdata.pm --dump") + + @property + def _make_program(self): + return "nmake" if self._use_nmake else "make" + + def _replace_runtime_in_file(self, filename): + runtime = msvc_runtime_flag(self) + for e in ["MDd", "MTd", "MD", "MT"]: + replace_in_file(self, filename, f"/{e} ", f"/{runtime} ", strict=False) + replace_in_file(self, filename, f"/{e}\"", f"/{runtime}\"", strict=False) + + def package(self): + copy(self, "*LICENSE*", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses")) + self._make_install() + if is_apple_os(self): + fix_apple_shared_install_name(self) + + for root, _, files in os.walk(self.package_folder): + for filename in files: + if fnmatch.fnmatch(filename, "*.pdb"): + os.unlink(os.path.join(self.package_folder, root, filename)) + if self._use_nmake: + if self.settings.build_type == "Debug": + with chdir(self, os.path.join(self.package_folder, "lib")): + rename(self, "libssl.lib", "libssld.lib") + rename(self, "libcrypto.lib", "libcryptod.lib") + + if self.options.shared: + libdir = os.path.join(self.package_folder, "lib") + for file in os.listdir(libdir): + if self._is_mingw and file.endswith(".dll.a"): + continue + if file.endswith(".a"): + os.unlink(os.path.join(libdir, file)) + + if not self.options.no_fips: + provdir = os.path.join(self.source_folder, "providers") + modules_dir = os.path.join(self.package_folder, "lib", "ossl-modules") + if self.settings.os == "Macos": + copy(self, "fips.dylib", src=provdir, dst=modules_dir) + elif self.settings.os == "Windows": + copy(self, "fips.dll", src=provdir, dst=modules_dir) + else: + copy(self, "fips.so", src=provdir, dst=modules_dir) + + rmdir(self, os.path.join(self.package_folder, "lib", "pkgconfig")) + + self._create_cmake_module_variables( + os.path.join(self.package_folder, self._module_file_rel_path) + ) + + def _create_cmake_module_variables(self, module_file): + content = textwrap.dedent("""\ + set(OPENSSL_FOUND TRUE) + if(DEFINED OpenSSL_INCLUDE_DIR) + set(OPENSSL_INCLUDE_DIR ${OpenSSL_INCLUDE_DIR}) + endif() + if(DEFINED OpenSSL_Crypto_LIBS) + set(OPENSSL_CRYPTO_LIBRARY ${OpenSSL_Crypto_LIBS}) + set(OPENSSL_CRYPTO_LIBRARIES ${OpenSSL_Crypto_LIBS} + ${OpenSSL_Crypto_DEPENDENCIES} + ${OpenSSL_Crypto_FRAMEWORKS} + ${OpenSSL_Crypto_SYSTEM_LIBS}) + elseif(DEFINED openssl_OpenSSL_Crypto_LIBS_%(config)s) + set(OPENSSL_CRYPTO_LIBRARY ${openssl_OpenSSL_Crypto_LIBS_%(config)s}) + set(OPENSSL_CRYPTO_LIBRARIES ${openssl_OpenSSL_Crypto_LIBS_%(config)s} + ${openssl_OpenSSL_Crypto_DEPENDENCIES_%(config)s} + ${openssl_OpenSSL_Crypto_FRAMEWORKS_%(config)s} + ${openssl_OpenSSL_Crypto_SYSTEM_LIBS_%(config)s}) + endif() + if(DEFINED OpenSSL_SSL_LIBS) + set(OPENSSL_SSL_LIBRARY ${OpenSSL_SSL_LIBS}) + set(OPENSSL_SSL_LIBRARIES ${OpenSSL_SSL_LIBS} + ${OpenSSL_SSL_DEPENDENCIES} + ${OpenSSL_SSL_FRAMEWORKS} + ${OpenSSL_SSL_SYSTEM_LIBS}) + elseif(DEFINED openssl_OpenSSL_SSL_LIBS_%(config)s) + set(OPENSSL_SSL_LIBRARY ${openssl_OpenSSL_SSL_LIBS_%(config)s}) + set(OPENSSL_SSL_LIBRARIES ${openssl_OpenSSL_SSL_LIBS_%(config)s} + ${openssl_OpenSSL_SSL_DEPENDENCIES_%(config)s} + ${openssl_OpenSSL_SSL_FRAMEWORKS_%(config)s} + ${openssl_OpenSSL_SSL_SYSTEM_LIBS_%(config)s}) + endif() + if(DEFINED OpenSSL_LIBRARIES) + set(OPENSSL_LIBRARIES ${OpenSSL_LIBRARIES}) + endif() + if(DEFINED OpenSSL_VERSION) + set(OPENSSL_VERSION ${OpenSSL_VERSION}) + endif() + """% {"config":str(self.settings.build_type).upper()}) + save(self, module_file, content) + + @property + def _module_subfolder(self): + return os.path.join("lib", "cmake") + + @property + def _module_file_rel_path(self): + return os.path.join(self._module_subfolder, + "conan-official-{}-variables.cmake".format(self.name)) + + def package_info(self): + self.cpp_info.set_property("cmake_file_name", "OpenSSL") + self.cpp_info.set_property("cmake_find_mode", "both") + self.cpp_info.set_property("pkg_config_name", "openssl") + self.cpp_info.set_property("cmake_build_modules", [self._module_file_rel_path]) + self.cpp_info.names["cmake_find_package"] = "OpenSSL" + self.cpp_info.names["cmake_find_package_multi"] = "OpenSSL" + self.cpp_info.components["ssl"].builddirs.append(self._module_subfolder) + self.cpp_info.components["ssl"].build_modules["cmake_find_package"] = [self._module_file_rel_path] + self.cpp_info.components["ssl"].set_property("cmake_build_modules", [self._module_file_rel_path]) + self.cpp_info.components["crypto"].builddirs.append(self._module_subfolder) + self.cpp_info.components["crypto"].build_modules["cmake_find_package"] = [self._module_file_rel_path] + self.cpp_info.components["crypto"].set_property("cmake_build_modules", [self._module_file_rel_path]) + + if self._use_nmake: + libsuffix = "d" if self.settings.build_type == "Debug" else "" + self.cpp_info.components["ssl"].libs = ["libssl" + libsuffix] + self.cpp_info.components["crypto"].libs = ["libcrypto" + libsuffix] + else: + self.cpp_info.components["ssl"].libs = ["ssl"] + self.cpp_info.components["crypto"].libs = ["crypto"] + + self.cpp_info.components["ssl"].requires = ["crypto"] + + if not self.options.no_zlib: + self.cpp_info.components["crypto"].requires.append("zlib::zlib") + + if self.settings.os == "Windows": + self.cpp_info.components["crypto"].system_libs.extend(["crypt32", "ws2_32", "advapi32", "user32", "bcrypt"]) + elif self.settings.os == "Linux": + self.cpp_info.components["crypto"].system_libs.extend(["dl", "rt"]) + self.cpp_info.components["ssl"].system_libs.append("dl") + if not self.options.no_threads: + self.cpp_info.components["crypto"].system_libs.append("pthread") + self.cpp_info.components["ssl"].system_libs.append("pthread") + elif self.settings.os == "Neutrino": + self.cpp_info.components["crypto"].system_libs.append("atomic") + self.cpp_info.components["ssl"].system_libs.append("atomic") + + self.cpp_info.components["crypto"].set_property("cmake_target_name", "OpenSSL::Crypto") + self.cpp_info.components["crypto"].set_property("pkg_config_name", "libcrypto") + self.cpp_info.components["ssl"].set_property("cmake_target_name", "OpenSSL::SSL") + self.cpp_info.components["ssl"].set_property("pkg_config_name", "libssl") + self.cpp_info.components["crypto"].names["cmake_find_package"] = "Crypto" + self.cpp_info.components["crypto"].names["cmake_find_package_multi"] = "Crypto" + self.cpp_info.components["ssl"].names["cmake_find_package"] = "SSL" + self.cpp_info.components["ssl"].names["cmake_find_package_multi"] = "SSL" + + openssl_modules_dir = os.path.join(self.package_folder, "lib", "ossl-modules") + self.runenv_info.define_path("OPENSSL_MODULES", openssl_modules_dir) + + # For legacy 1.x downstream consumers, remove once recipe is 2.0 only: + self.env_info.OPENSSL_MODULES = openssl_modules_dir + diff --git a/pkg/fanal/analyzer/language/c/conan/testdata/cacheDir_v2/p/zlib41bd3946e7341/e/conanfile.py b/pkg/fanal/analyzer/language/c/conan/testdata/cacheDir_v2/p/zlib41bd3946e7341/e/conanfile.py new file mode 100644 index 000000000000..ead39ff73661 --- /dev/null +++ b/pkg/fanal/analyzer/language/c/conan/testdata/cacheDir_v2/p/zlib41bd3946e7341/e/conanfile.py @@ -0,0 +1,110 @@ +from conan import ConanFile +from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout +from conan.tools.files import apply_conandata_patches, export_conandata_patches, get, load, replace_in_file, save +from conan.tools.scm import Version +import os + +required_conan_version = ">=1.53.0" + + +class ZlibConan(ConanFile): + license = "Zlib" + name = "zlib" + package_type = "library" + url = "https://github.com/conan-io/conan-center-index" + homepage = "https://zlib.net" + description = ("A Massively Spiffy Yet Delicately Unobtrusive Compression Library " + "(Also Free, Not to Mention Unencumbered by Patents)") + topics = ("zlib", "compression") + + settings = "os", "arch", "compiler", "build_type" + options = { + "shared": [True, False], + "fPIC": [True, False], + } + default_options = { + "shared": False, + "fPIC": True, + } + + @property + def _is_mingw(self): + return self.settings.os == "Windows" and self.settings.compiler == "gcc" + + def export_sources(self): + export_conandata_patches(self) + + def config_options(self): + if self.settings.os == "Windows": + del self.options.fPIC + + def configure(self): + if self.options.shared: + self.options.rm_safe("fPIC") + self.settings.rm_safe("compiler.libcxx") + self.settings.rm_safe("compiler.cppstd") + + def layout(self): + cmake_layout(self, src_folder="src") + + def source(self): + get(self, **self.conan_data["sources"][self.version], + destination=self.source_folder, strip_root=True) + + def generate(self): + tc = CMakeToolchain(self) + tc.variables["SKIP_INSTALL_ALL"] = False + tc.variables["SKIP_INSTALL_LIBRARIES"] = False + tc.variables["SKIP_INSTALL_HEADERS"] = False + tc.variables["SKIP_INSTALL_FILES"] = True + # Correct for misuse of "${CMAKE_INSTALL_PREFIX}/" in CMakeLists.txt + tc.variables["INSTALL_LIB_DIR"] = "lib" + tc.variables["INSTALL_INC_DIR"] = "include" + tc.variables["ZLIB_BUILD_EXAMPLES"] = False + tc.generate() + + def _patch_sources(self): + apply_conandata_patches(self) + + is_apple_clang12 = self.settings.compiler == "apple-clang" and Version(self.settings.compiler.version) >= "12.0" + if not is_apple_clang12: + for filename in ['zconf.h', 'zconf.h.cmakein', 'zconf.h.in']: + filepath = os.path.join(self.source_folder, filename) + replace_in_file(self, filepath, + '#ifdef HAVE_UNISTD_H ' + '/* may be set to #if 1 by ./configure */', + '#if defined(HAVE_UNISTD_H) && (1-HAVE_UNISTD_H-1 != 0)') + replace_in_file(self, filepath, + '#ifdef HAVE_STDARG_H ' + '/* may be set to #if 1 by ./configure */', + '#if defined(HAVE_STDARG_H) && (1-HAVE_STDARG_H-1 != 0)') + + def build(self): + self._patch_sources() + cmake = CMake(self) + cmake.configure() + cmake.build() + + def _extract_license(self): + tmp = load(self, os.path.join(self.source_folder, "zlib.h")) + license_contents = tmp[2:tmp.find("*/", 1)] + return license_contents + + def package(self): + save(self, os.path.join(self.package_folder, "licenses", "LICENSE"), self._extract_license()) + cmake = CMake(self) + cmake.install() + + def package_info(self): + self.cpp_info.set_property("cmake_find_mode", "both") + self.cpp_info.set_property("cmake_file_name", "ZLIB") + self.cpp_info.set_property("cmake_target_name", "ZLIB::ZLIB") + self.cpp_info.set_property("pkg_config_name", "zlib") + if self.settings.os == "Windows" and not self._is_mingw: + libname = "zdll" if self.options.shared else "zlib" + else: + libname = "z" + self.cpp_info.libs = [libname] + + self.cpp_info.names["cmake_find_package"] = "ZLIB" + self.cpp_info.names["cmake_find_package_multi"] = "ZLIB" diff --git a/pkg/fanal/analyzer/language/c/conan/testdata/happy_v2/release.lock b/pkg/fanal/analyzer/language/c/conan/testdata/happy_v2/release.lock new file mode 100644 index 000000000000..c05aa48b0e3e --- /dev/null +++ b/pkg/fanal/analyzer/language/c/conan/testdata/happy_v2/release.lock @@ -0,0 +1,10 @@ +{ + "version": "0.5", + "requires": [ + "zlib/1.3.1#f52e03ae3d251dec704634230cd806a2%1708593606.497", + "openssl/3.2.2#899583c694f9deccec74dbe0bbc65a15%1717540517.968" + ], + "build_requires": [], + "python_requires": [], + "config_requires": [] +} \ No newline at end of file diff --git a/pkg/fanal/analyzer/language/dart/pub/pubspec.go b/pkg/fanal/analyzer/language/dart/pub/pubspec.go index 9f7fc02b26d7..fc1ebc9bc586 100644 --- a/pkg/fanal/analyzer/language/dart/pub/pubspec.go +++ b/pkg/fanal/analyzer/language/dart/pub/pubspec.go @@ -10,7 +10,6 @@ import ( "sort" "github.com/samber/lo" - "golang.org/x/exp/maps" "golang.org/x/xerrors" "gopkg.in/yaml.v3" @@ -166,7 +165,7 @@ func parsePubSpecYaml(r io.Reader) (string, []string, error) { // pubspec.yaml uses version ranges // save only dependencies names - dependsOn := maps.Keys(spec.Dependencies) + dependsOn := lo.Keys(spec.Dependencies) return dependency.ID(types.Pub, spec.Name, spec.Version), dependsOn, nil } diff --git a/pkg/fanal/analyzer/language/dotnet/nuget/nuget.go b/pkg/fanal/analyzer/language/dotnet/nuget/nuget.go index 2e24610719e4..fa0cc486def1 100644 --- a/pkg/fanal/analyzer/language/dotnet/nuget/nuget.go +++ b/pkg/fanal/analyzer/language/dotnet/nuget/nuget.go @@ -7,9 +7,9 @@ import ( "io/fs" "os" "path/filepath" + "slices" "sort" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency/parser/nuget/config" diff --git a/pkg/fanal/analyzer/language/golang/mod/mod.go b/pkg/fanal/analyzer/language/golang/mod/mod.go index f97d9bed5add..96d40ba1c954 100644 --- a/pkg/fanal/analyzer/language/golang/mod/mod.go +++ b/pkg/fanal/analyzer/language/golang/mod/mod.go @@ -10,11 +10,10 @@ import ( "os" "path/filepath" "regexp" + "slices" "unicode" "github.com/samber/lo" - "golang.org/x/exp/maps" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency/parser/golang/mod" @@ -262,7 +261,7 @@ func mergeGoSum(gomod, gosum *types.Application) { uniq[lib.Name] = lib } - gomod.Packages = maps.Values(uniq) + gomod.Packages = lo.Values(uniq) } func findLicense(dir string, classifierConfidenceLevel float64) ([]string, error) { diff --git a/pkg/fanal/analyzer/language/java/sbt/lockfile.go b/pkg/fanal/analyzer/language/java/sbt/lockfile.go new file mode 100644 index 000000000000..4d1d17cb5d34 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/sbt/lockfile.go @@ -0,0 +1,47 @@ +package sbt + +import ( + "context" + "os" + "path/filepath" + + "golang.org/x/xerrors" + + "github.com/aquasecurity/trivy/pkg/dependency/parser/sbt/lockfile" + "github.com/aquasecurity/trivy/pkg/fanal/analyzer" + "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language" + "github.com/aquasecurity/trivy/pkg/fanal/types" +) + +func init() { + analyzer.RegisterAnalyzer(&sbtDependencyLockAnalyzer{}) +} + +const version = 1 + +// sbtDependencyLockAnalyzer analyzes '*.sbt.lock' +type sbtDependencyLockAnalyzer struct{} + +func (a sbtDependencyLockAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { + parser := lockfile.NewParser() + + res, err := language.Analyze(types.Sbt, input.FilePath, input.Content, parser) + + if err != nil { + return nil, xerrors.Errorf("%s parse error: %w", input.FilePath, err) + } + + return res, nil +} + +func (a sbtDependencyLockAnalyzer) Required(filePath string, _ os.FileInfo) bool { + return types.SbtLock == filepath.Base(filePath) +} + +func (a sbtDependencyLockAnalyzer) Type() analyzer.Type { + return analyzer.TypeSbtLock +} + +func (a sbtDependencyLockAnalyzer) Version() int { + return version +} diff --git a/pkg/fanal/analyzer/language/java/sbt/lockfile_test.go b/pkg/fanal/analyzer/language/java/sbt/lockfile_test.go new file mode 100644 index 000000000000..0469d9156a19 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/sbt/lockfile_test.go @@ -0,0 +1,92 @@ +package sbt + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/pkg/fanal/analyzer" + "github.com/aquasecurity/trivy/pkg/fanal/types" +) + +func Test_sbtDependencyLockAnalyzer(t *testing.T) { + tests := []struct { + name string + inputFile string + want *analyzer.AnalysisResult + }{ + { + name: "v1 lockfile", + inputFile: "testdata/v1/build.sbt.lock", + want: &analyzer.AnalysisResult{ + Applications: []types.Application{ + { + Type: types.Sbt, + FilePath: "testdata/v1/build.sbt.lock", + Packages: types.Packages{ + { + ID: "org.apache.commons:commons-lang3:3.9", + Name: "org.apache.commons:commons-lang3", + Version: "3.9", + Locations: []types.Location{ + { + StartLine: 10, + EndLine: 25, + }, + }, + }, + { + ID: "org.scala-lang:scala-library:2.12.10", + Name: "org.scala-lang:scala-library", + Version: "2.12.10", + Locations: []types.Location{ + { + StartLine: 26, + EndLine: 41, + }, + }, + }, + { + ID: "org.typelevel:cats-core_2.12:2.9.0", + Name: "org.typelevel:cats-core_2.12", + Version: "2.9.0", + Locations: []types.Location{ + { + StartLine: 42, + EndLine: 57, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "empty lockfile", + inputFile: "testdata/empty/build.sbt.lock", + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f, err := os.Open(tt.inputFile) + require.NoError(t, err) + + a := sbtDependencyLockAnalyzer{} + ctx := context.Background() + + got, err := a.Analyze(ctx, analyzer.AnalysisInput{ + FilePath: tt.inputFile, + Content: f, + }) + + require.NoError(t, err) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/pkg/fanal/analyzer/language/java/sbt/testdata/empty/build.sbt.lock b/pkg/fanal/analyzer/language/java/sbt/testdata/empty/build.sbt.lock new file mode 100644 index 000000000000..6125547882da --- /dev/null +++ b/pkg/fanal/analyzer/language/java/sbt/testdata/empty/build.sbt.lock @@ -0,0 +1,10 @@ +{ + "lockVersion": 1, + "timestamp": "2024-06-05T13:41:10.992Z", + "configurations": [ + "compile", + "runtime", + "test" + ], + "dependencies": [] +} \ No newline at end of file diff --git a/pkg/fanal/analyzer/language/java/sbt/testdata/v1/build.sbt.lock b/pkg/fanal/analyzer/language/java/sbt/testdata/v1/build.sbt.lock new file mode 100644 index 000000000000..26b5ef401aeb --- /dev/null +++ b/pkg/fanal/analyzer/language/java/sbt/testdata/v1/build.sbt.lock @@ -0,0 +1,59 @@ +{ + "lockVersion": 1, + "timestamp": "2024-06-05T13:41:10.992Z", + "configurations": [ + "compile", + "runtime", + "test" + ], + "dependencies": [ + { + "org": "org.apache.commons", + "name": "commons-lang3", + "version": "3.9", + "artifacts": [ + { + "name": "commons-lang3.jar", + "hash": "sha1:0122c7cee69b53ed4a7681c03d4ee4c0e2765da5" + } + ], + "configurations": [ + "test", + "compile", + "runtime" + ] + }, + { + "org": "org.scala-lang", + "name": "scala-library", + "version": "2.12.10", + "artifacts": [ + { + "name": "scala-library.jar", + "hash": "sha1:3509860bc2e5b3da001ed45aca94ffbe5694dbda" + } + ], + "configurations": [ + "test", + "compile", + "runtime" + ] + }, + { + "org" : "org.typelevel", + "name" : "cats-core_2.12", + "version" : "2.9.0", + "artifacts" : [ + { + "name" : "cats-core_2.12.jar", + "hash" : "sha1:844f21541d1809008586fbc1172dc02c96476639" + } + ], + "configurations" : [ + "compile", + "runtime", + "test" + ] + } + ] +} \ No newline at end of file diff --git a/pkg/fanal/analyzer/language/julia/pkg/pkg.go b/pkg/fanal/analyzer/language/julia/pkg/pkg.go index c2b9fda035e3..4e69cb43c326 100644 --- a/pkg/fanal/analyzer/language/julia/pkg/pkg.go +++ b/pkg/fanal/analyzer/language/julia/pkg/pkg.go @@ -7,12 +7,11 @@ import ( "io/fs" "os" "path/filepath" + "slices" "sort" "github.com/BurntSushi/toml" "github.com/samber/lo" - "golang.org/x/exp/maps" - "golang.org/x/exp/slices" "golang.org/x/xerrors" julia "github.com/aquasecurity/trivy/pkg/dependency/parser/julia/manifest" @@ -167,7 +166,7 @@ func walkDependencies(directDeps map[string]string, allPackages types.Packages, walkIndirectDependencies(pkg, pkgsByID, visited) } - return maps.Values(visited) + return lo.Values(visited) } // Marks all indirect dependencies as indirect. Starts from `rootPkg`. Visited deps are added to `visited`. diff --git a/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go b/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go index 086f5fe7f615..70d0d1ee8951 100644 --- a/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go +++ b/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go @@ -15,7 +15,6 @@ import ( "github.com/hashicorp/go-multierror" "github.com/samber/lo" - "golang.org/x/exp/maps" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency/parser/nodejs/packagejson" @@ -186,7 +185,7 @@ func (a yarnAnalyzer) analyzeDependencies(fsys fs.FS, dir string, app *types.App // If the same package is found in both prod and dev dependencies, use the one in prod. pkgs = lo.Assign(devPkgs, pkgs) - pkgSlice := maps.Values(pkgs) + pkgSlice := lo.Values(pkgs) sort.Sort(types.Packages(pkgSlice)) // Save packages diff --git a/pkg/fanal/analyzer/language/php/composer/composer.go b/pkg/fanal/analyzer/language/php/composer/composer.go index 5e726168a5ae..15d2a2e8ec27 100644 --- a/pkg/fanal/analyzer/language/php/composer/composer.go +++ b/pkg/fanal/analyzer/language/php/composer/composer.go @@ -8,10 +8,10 @@ import ( "io/fs" "os" "path/filepath" + "slices" "sort" "strings" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency/parser/php/composer" @@ -26,7 +26,7 @@ func init() { analyzer.RegisterPostAnalyzer(analyzer.TypeComposer, newComposerAnalyzer) } -const version = 1 +const composerAnalyzerVersion = 1 var requiredFiles = []string{ types.ComposerLock, @@ -96,7 +96,7 @@ func (a composerAnalyzer) Type() analyzer.Type { } func (a composerAnalyzer) Version() int { - return version + return composerAnalyzerVersion } func (a composerAnalyzer) parseComposerLock(path string, r io.Reader) (*types.Application, error) { diff --git a/pkg/fanal/analyzer/language/php/composer/composer_test.go b/pkg/fanal/analyzer/language/php/composer/composer_test.go index ea963d94dcf2..67ed0a0daa6e 100644 --- a/pkg/fanal/analyzer/language/php/composer/composer_test.go +++ b/pkg/fanal/analyzer/language/php/composer/composer_test.go @@ -20,7 +20,7 @@ func Test_composerAnalyzer_PostAnalyze(t *testing.T) { }{ { name: "happy path", - dir: "testdata/happy", + dir: "testdata/composer/happy", want: &analyzer.AnalysisResult{ Applications: []types.Application{ { @@ -63,7 +63,7 @@ func Test_composerAnalyzer_PostAnalyze(t *testing.T) { }, { name: "no composer.json", - dir: "testdata/no-composer-json", + dir: "testdata/composer/no-composer-json", want: &analyzer.AnalysisResult{ Applications: []types.Application{ { @@ -106,7 +106,7 @@ func Test_composerAnalyzer_PostAnalyze(t *testing.T) { }, { name: "wrong composer.json", - dir: "testdata/wrong-composer-json", + dir: "testdata/composer/wrong-composer-json", want: &analyzer.AnalysisResult{ Applications: []types.Application{ { @@ -149,7 +149,7 @@ func Test_composerAnalyzer_PostAnalyze(t *testing.T) { }, { name: "broken composer.lock", - dir: "testdata/sad", + dir: "testdata/composer/sad", want: &analyzer.AnalysisResult{}, }, } diff --git a/pkg/fanal/analyzer/language/php/composer/testdata/composer-vendor/happy/installed.json b/pkg/fanal/analyzer/language/php/composer/testdata/composer-vendor/happy/installed.json new file mode 100644 index 000000000000..e44e60d9050a --- /dev/null +++ b/pkg/fanal/analyzer/language/php/composer/testdata/composer-vendor/happy/installed.json @@ -0,0 +1,131 @@ +{ + "packages": [ + { + "name": "pear/log", + "version": "1.13.3", + "version_normalized": "1.13.3.0", + "source": { + "type": "git", + "url": "https://github.com/pear/Log.git", + "reference": "21af0be11669194d72d88b5ee9d5f176dc75d9a3" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/pear/Log/zipball/21af0be11669194d72d88b5ee9d5f176dc75d9a3", + "reference": "21af0be11669194d72d88b5ee9d5f176dc75d9a3", + "shasum": "" + }, + "require": { + "pear/pear_exception": "1.0.1 || 1.0.2", + "php": ">5.2" + }, + "require-dev": { + "phpunit/phpunit": "*" + }, + "suggest": { + "pear/db": "Install optionally via your project's composer.json" + }, + "time": "2021-05-04T23:51:30+00:00", + "type": "library", + "installation-source": "dist", + "autoload": { + "psr-0": { + "Log": "./" + }, + "exclude-from-classmap": [ + "/examples/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "include-path": [ + "" + ], + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Jon Parise", + "email": "jon@php.net", + "homepage": "http://www.indelible.org", + "role": "Developer" + } + ], + "description": "PEAR Logging Framework", + "homepage": "http://pear.github.io/Log/", + "keywords": [ + "log", + "logging" + ], + "support": { + "issues": "https://github.com/pear/Log/issues", + "source": "https://github.com/pear/Log" + }, + "install-path": "../pear/log" + }, + { + "name": "pear/pear_exception", + "version": "v1.0.2", + "version_normalized": "1.0.2.0", + "source": { + "type": "git", + "url": "https://github.com/pear/PEAR_Exception.git", + "reference": "b14fbe2ddb0b9f94f5b24cf08783d599f776fff0" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/pear/PEAR_Exception/zipball/b14fbe2ddb0b9f94f5b24cf08783d599f776fff0", + "reference": "b14fbe2ddb0b9f94f5b24cf08783d599f776fff0", + "shasum": "" + }, + "require": { + "php": ">=5.2.0" + }, + "require-dev": { + "phpunit/phpunit": "<9" + }, + "time": "2021-03-21T15:43:46+00:00", + "type": "class", + "extra": { + "branch-alias": { + "dev-master": "1.0.x-dev" + } + }, + "installation-source": "dist", + "autoload": { + "classmap": [ + "PEAR/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "include-path": [ + "." + ], + "license": [ + "BSD-2-Clause" + ], + "authors": [ + { + "name": "Helgi Thormar", + "email": "dufuz@php.net" + }, + { + "name": "Greg Beaver", + "email": "cellog@php.net" + } + ], + "description": "The PEAR Exception base class.", + "homepage": "https://github.com/pear/PEAR_Exception", + "keywords": [ + "exception" + ], + "support": { + "issues": "http://pear.php.net/bugs/search.php?cmd=display&package_name[]=PEAR_Exception", + "source": "https://github.com/pear/PEAR_Exception" + }, + "install-path": "../pear/pear_exception" + } + ], + "dev": true, + "dev-package-names": [] +} diff --git a/pkg/fanal/analyzer/language/php/composer/testdata/sad/composer.lock b/pkg/fanal/analyzer/language/php/composer/testdata/composer-vendor/sad/installed.json similarity index 100% rename from pkg/fanal/analyzer/language/php/composer/testdata/sad/composer.lock rename to pkg/fanal/analyzer/language/php/composer/testdata/composer-vendor/sad/installed.json diff --git a/pkg/fanal/analyzer/language/php/composer/testdata/happy/composer.json b/pkg/fanal/analyzer/language/php/composer/testdata/composer/happy/composer.json similarity index 100% rename from pkg/fanal/analyzer/language/php/composer/testdata/happy/composer.json rename to pkg/fanal/analyzer/language/php/composer/testdata/composer/happy/composer.json diff --git a/pkg/fanal/analyzer/language/php/composer/testdata/happy/composer.lock b/pkg/fanal/analyzer/language/php/composer/testdata/composer/happy/composer.lock similarity index 100% rename from pkg/fanal/analyzer/language/php/composer/testdata/happy/composer.lock rename to pkg/fanal/analyzer/language/php/composer/testdata/composer/happy/composer.lock diff --git a/pkg/fanal/analyzer/language/php/composer/testdata/no-composer-json/composer.lock b/pkg/fanal/analyzer/language/php/composer/testdata/composer/no-composer-json/composer.lock similarity index 100% rename from pkg/fanal/analyzer/language/php/composer/testdata/no-composer-json/composer.lock rename to pkg/fanal/analyzer/language/php/composer/testdata/composer/no-composer-json/composer.lock diff --git a/pkg/fanal/analyzer/language/php/composer/testdata/wrong-composer-json/composer.json b/pkg/fanal/analyzer/language/php/composer/testdata/composer/sad/composer.lock similarity index 100% rename from pkg/fanal/analyzer/language/php/composer/testdata/wrong-composer-json/composer.json rename to pkg/fanal/analyzer/language/php/composer/testdata/composer/sad/composer.lock diff --git a/pkg/fanal/analyzer/language/php/composer/testdata/composer/wrong-composer-json/composer.json b/pkg/fanal/analyzer/language/php/composer/testdata/composer/wrong-composer-json/composer.json new file mode 100644 index 000000000000..81750b96f9d8 --- /dev/null +++ b/pkg/fanal/analyzer/language/php/composer/testdata/composer/wrong-composer-json/composer.json @@ -0,0 +1 @@ +{ \ No newline at end of file diff --git a/pkg/fanal/analyzer/language/php/composer/testdata/wrong-composer-json/composer.lock b/pkg/fanal/analyzer/language/php/composer/testdata/composer/wrong-composer-json/composer.lock similarity index 100% rename from pkg/fanal/analyzer/language/php/composer/testdata/wrong-composer-json/composer.lock rename to pkg/fanal/analyzer/language/php/composer/testdata/composer/wrong-composer-json/composer.lock diff --git a/pkg/fanal/analyzer/language/php/composer/vendor.go b/pkg/fanal/analyzer/language/php/composer/vendor.go new file mode 100644 index 000000000000..423de2b7a352 --- /dev/null +++ b/pkg/fanal/analyzer/language/php/composer/vendor.go @@ -0,0 +1,39 @@ +package composer + +import ( + "context" + "os" + "path/filepath" + + "github.com/aquasecurity/trivy/pkg/dependency/parser/php/composer" + "github.com/aquasecurity/trivy/pkg/fanal/analyzer" + "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language" + "github.com/aquasecurity/trivy/pkg/fanal/types" +) + +func init() { + analyzer.RegisterAnalyzer(&composerVendorAnalyzer{}) +} + +const ( + composerInstalledAnalyzerVersion = 1 +) + +// composerVendorAnalyzer analyzes 'installed.json' +type composerVendorAnalyzer struct{} + +func (a composerVendorAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { + return language.Analyze(types.ComposerVendor, input.FilePath, input.Content, composer.NewParser()) +} + +func (a composerVendorAnalyzer) Required(filePath string, _ os.FileInfo) bool { + return filepath.Base(filePath) == types.ComposerInstalledJson +} + +func (a composerVendorAnalyzer) Type() analyzer.Type { + return analyzer.TypeComposerVendor +} + +func (a composerVendorAnalyzer) Version() int { + return composerInstalledAnalyzerVersion +} diff --git a/pkg/fanal/analyzer/language/php/composer/vendor_test.go b/pkg/fanal/analyzer/language/php/composer/vendor_test.go new file mode 100644 index 000000000000..887c5d404039 --- /dev/null +++ b/pkg/fanal/analyzer/language/php/composer/vendor_test.go @@ -0,0 +1,120 @@ +package composer + +import ( + "os" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/pkg/fanal/analyzer" + "github.com/aquasecurity/trivy/pkg/fanal/types" +) + +func Test_composerVendorAnalyzer_Analyze(t *testing.T) { + tests := []struct { + name string + inputFile string + want *analyzer.AnalysisResult + wantErr string + }{ + { + name: "happy path", + inputFile: "testdata/composer-vendor/happy/installed.json", + want: &analyzer.AnalysisResult{ + Applications: []types.Application{ + { + Type: types.ComposerVendor, + FilePath: "testdata/composer-vendor/happy/installed.json", + Packages: []types.Package{ + { + ID: "pear/log@1.13.3", + Name: "pear/log", + Version: "1.13.3", + Indirect: false, + Relationship: types.RelationshipUnknown, + Licenses: []string{"MIT"}, + Locations: []types.Location{ + { + StartLine: 3, + EndLine: 65, + }, + }, + DependsOn: []string{"pear/pear_exception@v1.0.2"}, + }, + { + ID: "pear/pear_exception@v1.0.2", + Name: "pear/pear_exception", + Version: "v1.0.2", + Indirect: false, + Relationship: types.RelationshipUnknown, + Licenses: []string{"BSD-2-Clause"}, + Locations: []types.Location{ + { + StartLine: 66, + EndLine: 127, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "sad path", + inputFile: "testdata/composer-vendor/sad/installed.json", + wantErr: "decode error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f, err := os.Open(tt.inputFile) + require.NoError(t, err) + defer func() { + err = f.Close() + require.NoError(t, err) + }() + + a := composerVendorAnalyzer{} + got, err := a.Analyze(nil, analyzer.AnalysisInput{ + FilePath: tt.inputFile, + Content: f, + }) + + if tt.wantErr != "" { + require.ErrorContains(t, err, tt.wantErr) + return + } + + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func Test_composerVendorAnalyzer_Required(t *testing.T) { + tests := []struct { + name string + filePath string + want bool + }{ + { + name: "happy path", + filePath: "app/vendor/composer/installed.json", + want: true, + }, + { + name: "sad path", + filePath: "composer.json", + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + a := composerVendorAnalyzer{} + got := a.Required(tt.filePath, nil) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/pkg/fanal/analyzer/language/rust/cargo/cargo.go b/pkg/fanal/analyzer/language/rust/cargo/cargo.go index 88c8a005595e..bd54273552c6 100644 --- a/pkg/fanal/analyzer/language/rust/cargo/cargo.go +++ b/pkg/fanal/analyzer/language/rust/cargo/cargo.go @@ -6,15 +6,15 @@ import ( "fmt" "io" "io/fs" + "maps" "os" "path" "path/filepath" + "slices" "sort" "github.com/BurntSushi/toml" "github.com/samber/lo" - "golang.org/x/exp/maps" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/go-version/pkg/semver" @@ -148,7 +148,7 @@ func (a cargoAnalyzer) removeDevDependencies(fsys fs.FS, dir string, app *types. a.walkIndirectDependencies(pkg, pkgIDs, pkgs) } - pkgSlice := maps.Values(pkgs) + pkgSlice := lo.Values(pkgs) sort.Sort(types.Packages(pkgSlice)) // Save only prod packages diff --git a/pkg/fanal/analyzer/licensing/license.go b/pkg/fanal/analyzer/licensing/license.go index 42872b1c8474..ceef1d90cecc 100644 --- a/pkg/fanal/analyzer/licensing/license.go +++ b/pkg/fanal/analyzer/licensing/license.go @@ -6,9 +6,9 @@ import ( "math" "os" "path/filepath" + "slices" "strings" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" diff --git a/pkg/fanal/analyzer/os/alpine/alpine.go b/pkg/fanal/analyzer/os/alpine/alpine.go index 0caa5189b8f5..da3d2da00d89 100644 --- a/pkg/fanal/analyzer/os/alpine/alpine.go +++ b/pkg/fanal/analyzer/os/alpine/alpine.go @@ -4,8 +4,8 @@ import ( "bufio" "context" "os" + "slices" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" diff --git a/pkg/fanal/analyzer/os/release/release.go b/pkg/fanal/analyzer/os/release/release.go index d4b959c3a9b3..229c13c932aa 100644 --- a/pkg/fanal/analyzer/os/release/release.go +++ b/pkg/fanal/analyzer/os/release/release.go @@ -4,10 +4,9 @@ import ( "bufio" "context" "os" + "slices" "strings" - "golang.org/x/exp/slices" - "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/types" ) diff --git a/pkg/fanal/analyzer/os/ubuntu/esm.go b/pkg/fanal/analyzer/os/ubuntu/esm.go index 9f1dd08f9c6d..d6fa38d30c16 100644 --- a/pkg/fanal/analyzer/os/ubuntu/esm.go +++ b/pkg/fanal/analyzer/os/ubuntu/esm.go @@ -4,8 +4,8 @@ import ( "context" "encoding/json" "os" + "slices" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" diff --git a/pkg/fanal/analyzer/os/ubuntu/ubuntu.go b/pkg/fanal/analyzer/os/ubuntu/ubuntu.go index 2fff3ac8339f..75a24756365d 100644 --- a/pkg/fanal/analyzer/os/ubuntu/ubuntu.go +++ b/pkg/fanal/analyzer/os/ubuntu/ubuntu.go @@ -4,9 +4,9 @@ import ( "bufio" "context" "os" + "slices" "strings" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" diff --git a/pkg/fanal/analyzer/pkg/apk/apk.go b/pkg/fanal/analyzer/pkg/apk/apk.go index bb2007470b1b..9ce13e4b013d 100644 --- a/pkg/fanal/analyzer/pkg/apk/apk.go +++ b/pkg/fanal/analyzer/pkg/apk/apk.go @@ -8,12 +8,12 @@ import ( "fmt" "os" "path" + "slices" "sort" "strings" apkVersion "github.com/knqyf263/go-apk-version" "github.com/samber/lo" - "golang.org/x/exp/slices" "github.com/aquasecurity/trivy/pkg/digest" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" diff --git a/pkg/fanal/analyzer/pkg/dpkg/copyright.go b/pkg/fanal/analyzer/pkg/dpkg/copyright.go index 1f50088f86b7..ac98c2e404c3 100644 --- a/pkg/fanal/analyzer/pkg/dpkg/copyright.go +++ b/pkg/fanal/analyzer/pkg/dpkg/copyright.go @@ -7,10 +7,10 @@ import ( "os" "path" "regexp" + "slices" "strings" "github.com/samber/lo" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" diff --git a/pkg/fanal/analyzer/pkg/dpkg/dpkg.go b/pkg/fanal/analyzer/pkg/dpkg/dpkg.go index a83592e82523..1d8435ecb686 100644 --- a/pkg/fanal/analyzer/pkg/dpkg/dpkg.go +++ b/pkg/fanal/analyzer/pkg/dpkg/dpkg.go @@ -11,12 +11,12 @@ import ( "os" "path/filepath" "regexp" + "slices" "sort" "strings" debVersion "github.com/knqyf263/go-deb-version" "github.com/samber/lo" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/digest" diff --git a/pkg/fanal/analyzer/pkg/rpm/rpm.go b/pkg/fanal/analyzer/pkg/rpm/rpm.go index 70d5b9dcd26a..70d4de217418 100644 --- a/pkg/fanal/analyzer/pkg/rpm/rpm.go +++ b/pkg/fanal/analyzer/pkg/rpm/rpm.go @@ -6,12 +6,12 @@ import ( "io" "os" "path/filepath" + "slices" "sort" "strings" rpmdb "github.com/knqyf263/go-rpmdb/pkg" "github.com/samber/lo" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/digest" diff --git a/pkg/fanal/analyzer/pkg/rpm/rpmqa.go b/pkg/fanal/analyzer/pkg/rpm/rpmqa.go index 83b06f16823b..55f4feaa9232 100644 --- a/pkg/fanal/analyzer/pkg/rpm/rpmqa.go +++ b/pkg/fanal/analyzer/pkg/rpm/rpmqa.go @@ -4,9 +4,9 @@ import ( "bufio" "context" "os" + "slices" "strings" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" diff --git a/pkg/fanal/analyzer/repo/apk/apk.go b/pkg/fanal/analyzer/repo/apk/apk.go index 454710d6841b..cdbc23d5e076 100644 --- a/pkg/fanal/analyzer/repo/apk/apk.go +++ b/pkg/fanal/analyzer/repo/apk/apk.go @@ -5,8 +5,8 @@ import ( "context" "os" "regexp" + "slices" - "golang.org/x/exp/slices" "golang.org/x/xerrors" ver "github.com/aquasecurity/go-version/pkg/version" diff --git a/pkg/fanal/analyzer/sbom/sbom_test.go b/pkg/fanal/analyzer/sbom/sbom_test.go index cce12a7c4955..542a7f50addd 100644 --- a/pkg/fanal/analyzer/sbom/sbom_test.go +++ b/pkg/fanal/analyzer/sbom/sbom_test.go @@ -93,8 +93,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { FilePath: "opt/bitnami/elasticsearch", Packages: types.Packages{ { - ID: "Elasticsearch@8.9.1", - Name: "Elasticsearch", + ID: "elasticsearch@8.9.1", + Name: "elasticsearch", Version: "8.9.1", Arch: "arm64", Licenses: []string{"Elastic-2.0"}, @@ -174,8 +174,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { FilePath: "opt/bitnami/postgresql", Packages: types.Packages{ { - ID: "GDAL@3.7.1", - Name: "GDAL", + ID: "gdal@3.7.1", + Name: "gdal", Version: "3.7.1", Licenses: []string{"MIT"}, Identifier: types.PkgIdentifier{ @@ -187,8 +187,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { - ID: "GEOS@3.8.3", - Name: "GEOS", + ID: "geos@3.8.3", + Name: "geos", Version: "3.8.3", Licenses: []string{"LGPL-2.1-only"}, Identifier: types.PkgIdentifier{ @@ -200,8 +200,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { - ID: "PostgreSQL@15.3.0", - Name: "PostgreSQL", + ID: "postgresql@15.3.0", + Name: "postgresql", Version: "15.3.0", Licenses: []string{"PostgreSQL"}, Identifier: types.PkgIdentifier{ @@ -212,14 +212,14 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, DependsOn: []string{ - "GEOS@3.8.3", - "Proj@6.3.2", - "GDAL@3.7.1", + "geos@3.8.3", + "proj@6.3.2", + "gdal@3.7.1", }, }, { - ID: "Proj@6.3.2", - Name: "Proj", + ID: "proj@6.3.2", + Name: "proj", Version: "6.3.2", Licenses: []string{"MIT"}, Identifier: types.PkgIdentifier{ diff --git a/pkg/fanal/analyzer/secret/secret.go b/pkg/fanal/analyzer/secret/secret.go index bbce32af326e..cad32e00ad8f 100644 --- a/pkg/fanal/analyzer/secret/secret.go +++ b/pkg/fanal/analyzer/secret/secret.go @@ -7,10 +7,10 @@ import ( "io" "os" "path/filepath" + "slices" "strings" "github.com/samber/lo" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" @@ -34,10 +34,26 @@ var ( "Pipfile.lock", "Gemfile.lock", } - skipDirs = []string{".git", "node_modules"} + skipDirs = []string{ + ".git", + "node_modules", + } skipExts = []string{ - ".jpg", ".png", ".gif", ".doc", ".pdf", ".bin", ".svg", ".socket", ".deb", ".rpm", - ".zip", ".gz", ".gzip", ".tar", ".pyc", + ".jpg", + ".png", + ".gif", + ".doc", + ".pdf", + ".bin", + ".svg", + ".socket", + ".deb", + ".rpm", + ".zip", + ".gz", + ".gzip", + ".tar", + ".pyc", } ) diff --git a/pkg/fanal/applier/applier.go b/pkg/fanal/applier/applier.go index 192c9d2184f2..0ddeef11eba2 100644 --- a/pkg/fanal/applier/applier.go +++ b/pkg/fanal/applier/applier.go @@ -3,8 +3,8 @@ package applier import ( "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" - "github.com/aquasecurity/trivy/pkg/fanal/cache" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" ) diff --git a/pkg/fanal/applier/applier_test.go b/pkg/fanal/applier/applier_test.go index ac8915f2dad6..b2a992f80012 100644 --- a/pkg/fanal/applier/applier_test.go +++ b/pkg/fanal/applier/applier_test.go @@ -8,8 +8,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/applier" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/types" ) diff --git a/pkg/fanal/applier/docker.go b/pkg/fanal/applier/docker.go index d4f88bc851a7..c1c21f236b22 100644 --- a/pkg/fanal/applier/docker.go +++ b/pkg/fanal/applier/docker.go @@ -6,10 +6,10 @@ import ( "time" "github.com/knqyf263/nested" - "github.com/mitchellh/hashstructure/v2" "github.com/package-url/packageurl-go" "github.com/samber/lo" + "github.com/aquasecurity/trivy/pkg/dependency" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/purl" @@ -223,7 +223,7 @@ func ApplyLayers(layers []ftypes.BlobInfo) ftypes.ArtifactDetail { if mergedLayer.OS.Family != "" { mergedLayer.Packages[i].Identifier.PURL = newPURL(mergedLayer.OS.Family, types.Metadata{OS: &mergedLayer.OS}, pkg) } - mergedLayer.Packages[i].Identifier.UID = calcPkgUID("", pkg) + mergedLayer.Packages[i].Identifier.UID = dependency.UID("", pkg) // Only debian packages if licenses, ok := dpkgLicenses[pkg.Name]; ok { @@ -244,7 +244,7 @@ func ApplyLayers(layers []ftypes.BlobInfo) ftypes.ArtifactDetail { if pkg.Identifier.PURL == nil { app.Packages[i].Identifier.PURL = newPURL(app.Type, types.Metadata{}, pkg) } - app.Packages[i].Identifier.UID = calcPkgUID(app.FilePath, pkg) + app.Packages[i].Identifier.UID = dependency.UID(app.FilePath, pkg) } } @@ -263,22 +263,6 @@ func newPURL(pkgType ftypes.TargetType, metadata types.Metadata, pkg ftypes.Pack return p.Unwrap() } -// calcPkgUID calculates the hash of the package for the unique ID -func calcPkgUID(filePath string, pkg ftypes.Package) string { - v := map[string]any{ - "filePath": filePath, // To differentiate the hash of the same package but different file path - "pkg": pkg, - } - hash, err := hashstructure.Hash(v, hashstructure.FormatV2, &hashstructure.HashOptions{ - ZeroNil: true, - IgnoreZeroValue: true, - }) - if err != nil { - log.Warn("Failed to calculate the package hash", log.String("pkg", pkg.Name), log.Err(err)) - } - return fmt.Sprintf("%x", hash) -} - // aggregate merges all packages installed by pip/gem/npm/jar/conda into each application func aggregate(detail *ftypes.ArtifactDetail) { var apps []ftypes.Application diff --git a/pkg/fanal/artifact/image/image.go b/pkg/fanal/artifact/image/image.go index 08b61de1b228..b4350b25b866 100644 --- a/pkg/fanal/artifact/image/image.go +++ b/pkg/fanal/artifact/image/image.go @@ -6,17 +6,17 @@ import ( "io" "os" "reflect" + "slices" "strings" "sync" v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/samber/lo" - "golang.org/x/exp/slices" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/artifact" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/handler" "github.com/aquasecurity/trivy/pkg/fanal/image" "github.com/aquasecurity/trivy/pkg/fanal/types" diff --git a/pkg/fanal/artifact/image/image_test.go b/pkg/fanal/artifact/image/image_test.go index 05fc6b229105..cd7fea2df1e2 100644 --- a/pkg/fanal/artifact/image/image_test.go +++ b/pkg/fanal/artifact/image/image_test.go @@ -11,10 +11,10 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/artifact" image2 "github.com/aquasecurity/trivy/pkg/fanal/artifact/image" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/image" "github.com/aquasecurity/trivy/pkg/fanal/types" @@ -352,17 +352,17 @@ func TestArtifact_Inspect(t *testing.T) { missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{ Args: cache.ArtifactCacheMissingBlobsArgs{ ArtifactID: "sha256:c232b7d8ac8aa08aa767313d0b53084c4380d1c01a213a5971bdb039e6538313", - BlobIDs: []string{"sha256:1fd280c63e1416a2261e76454caa19a5b77c6bddedd48309c9687c4fe72b34c0"}, + BlobIDs: []string{"sha256:d4e6142cda465c55c8adf5b6c3148f3417a2c5582a76f933836738206e01b638"}, }, Returns: cache.ArtifactCacheMissingBlobsReturns{ MissingArtifact: true, - MissingBlobIDs: []string{"sha256:1fd280c63e1416a2261e76454caa19a5b77c6bddedd48309c9687c4fe72b34c0"}, + MissingBlobIDs: []string{"sha256:d4e6142cda465c55c8adf5b6c3148f3417a2c5582a76f933836738206e01b638"}, }, }, putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{ { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:1fd280c63e1416a2261e76454caa19a5b77c6bddedd48309c9687c4fe72b34c0", + BlobID: "sha256:d4e6142cda465c55c8adf5b6c3148f3417a2c5582a76f933836738206e01b638", BlobInfo: types.BlobInfo{ SchemaVersion: types.BlobJSONSchemaVersion, Digest: "", @@ -429,7 +429,7 @@ func TestArtifact_Inspect(t *testing.T) { Name: "../../test/testdata/alpine-311.tar.gz", Type: artifact.TypeContainerImage, ID: "sha256:c232b7d8ac8aa08aa767313d0b53084c4380d1c01a213a5971bdb039e6538313", - BlobIDs: []string{"sha256:1fd280c63e1416a2261e76454caa19a5b77c6bddedd48309c9687c4fe72b34c0"}, + BlobIDs: []string{"sha256:d4e6142cda465c55c8adf5b6c3148f3417a2c5582a76f933836738206e01b638"}, ImageMetadata: artifact.ImageMetadata{ ID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72", DiffIDs: []string{ @@ -488,25 +488,25 @@ func TestArtifact_Inspect(t *testing.T) { Args: cache.ArtifactCacheMissingBlobsArgs{ ArtifactID: "sha256:33f9415ed2cd5a9cef5d5144333619745b9ec0f851f0684dd45fa79c6b26a650", BlobIDs: []string{ - "sha256:dd0a4f4754bf4590327be34f4266f63c92184352afadb72e4c9b162f76224000", - "sha256:f9e6a3065bb47f810916e90249076950a4b70785a27d3bcb90406d0ab342fa67", - "sha256:b6be0de11c6090f71dea119f43dd360335643420058e317baffb089f0dff4001", - "sha256:37c561c19b169f5f9832f4b0060bf74ebc8d1c9e01662ad4fa21c394da159440", + "sha256:a3eb0f92862bc742ea1e7ee875dd5623568ee17213ae7d29f05960eb1135fa6d", + "sha256:05b96a707dab6e1fcd9543f0df6a0e4cdf5c7e26272d7f6bc7ed2e1cf23afa9f", + "sha256:677cd3a664e4923227de2c2571c40b9956d99e4775b2e11ce8aa207842123119", + "sha256:e870ba0421bc71c046819f809c8369e98f59a2cde34961fdd429a2102da33c0c", }, }, Returns: cache.ArtifactCacheMissingBlobsReturns{ MissingBlobIDs: []string{ - "sha256:dd0a4f4754bf4590327be34f4266f63c92184352afadb72e4c9b162f76224000", - "sha256:f9e6a3065bb47f810916e90249076950a4b70785a27d3bcb90406d0ab342fa67", - "sha256:b6be0de11c6090f71dea119f43dd360335643420058e317baffb089f0dff4001", - "sha256:37c561c19b169f5f9832f4b0060bf74ebc8d1c9e01662ad4fa21c394da159440", + "sha256:a3eb0f92862bc742ea1e7ee875dd5623568ee17213ae7d29f05960eb1135fa6d", + "sha256:05b96a707dab6e1fcd9543f0df6a0e4cdf5c7e26272d7f6bc7ed2e1cf23afa9f", + "sha256:677cd3a664e4923227de2c2571c40b9956d99e4775b2e11ce8aa207842123119", + "sha256:e870ba0421bc71c046819f809c8369e98f59a2cde34961fdd429a2102da33c0c", }, }, }, putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{ { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:dd0a4f4754bf4590327be34f4266f63c92184352afadb72e4c9b162f76224000", + BlobID: "sha256:a3eb0f92862bc742ea1e7ee875dd5623568ee17213ae7d29f05960eb1135fa6d", BlobInfo: types.BlobInfo{ SchemaVersion: types.BlobJSONSchemaVersion, Digest: "", @@ -594,7 +594,7 @@ func TestArtifact_Inspect(t *testing.T) { }, { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:f9e6a3065bb47f810916e90249076950a4b70785a27d3bcb90406d0ab342fa67", + BlobID: "sha256:05b96a707dab6e1fcd9543f0df6a0e4cdf5c7e26272d7f6bc7ed2e1cf23afa9f", BlobInfo: types.BlobInfo{ SchemaVersion: types.BlobJSONSchemaVersion, Digest: "", @@ -690,7 +690,7 @@ func TestArtifact_Inspect(t *testing.T) { }, { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:b6be0de11c6090f71dea119f43dd360335643420058e317baffb089f0dff4001", + BlobID: "sha256:677cd3a664e4923227de2c2571c40b9956d99e4775b2e11ce8aa207842123119", BlobInfo: types.BlobInfo{ SchemaVersion: types.BlobJSONSchemaVersion, Digest: "", @@ -898,7 +898,7 @@ func TestArtifact_Inspect(t *testing.T) { }, { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:37c561c19b169f5f9832f4b0060bf74ebc8d1c9e01662ad4fa21c394da159440", + BlobID: "sha256:e870ba0421bc71c046819f809c8369e98f59a2cde34961fdd429a2102da33c0c", BlobInfo: types.BlobInfo{ SchemaVersion: types.BlobJSONSchemaVersion, Digest: "", @@ -1761,10 +1761,10 @@ func TestArtifact_Inspect(t *testing.T) { Type: artifact.TypeContainerImage, ID: "sha256:33f9415ed2cd5a9cef5d5144333619745b9ec0f851f0684dd45fa79c6b26a650", BlobIDs: []string{ - "sha256:dd0a4f4754bf4590327be34f4266f63c92184352afadb72e4c9b162f76224000", - "sha256:f9e6a3065bb47f810916e90249076950a4b70785a27d3bcb90406d0ab342fa67", - "sha256:b6be0de11c6090f71dea119f43dd360335643420058e317baffb089f0dff4001", - "sha256:37c561c19b169f5f9832f4b0060bf74ebc8d1c9e01662ad4fa21c394da159440", + "sha256:a3eb0f92862bc742ea1e7ee875dd5623568ee17213ae7d29f05960eb1135fa6d", + "sha256:05b96a707dab6e1fcd9543f0df6a0e4cdf5c7e26272d7f6bc7ed2e1cf23afa9f", + "sha256:677cd3a664e4923227de2c2571c40b9956d99e4775b2e11ce8aa207842123119", + "sha256:e870ba0421bc71c046819f809c8369e98f59a2cde34961fdd429a2102da33c0c", }, ImageMetadata: artifact.ImageMetadata{ ID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4", @@ -1858,25 +1858,25 @@ func TestArtifact_Inspect(t *testing.T) { Args: cache.ArtifactCacheMissingBlobsArgs{ ArtifactID: "sha256:33f9415ed2cd5a9cef5d5144333619745b9ec0f851f0684dd45fa79c6b26a650", BlobIDs: []string{ - "sha256:e1187118cdbe8893fc2fd4b345f813d195ee6aaeb4820d4576694199f8c10350", - "sha256:12c266a627dc4014c3ee96936058ba98209056f4ffe0081bb5fca7ff91592cdb", - "sha256:47adac0e28b12338e99dedbd7e8b0ef1f7aaa28e646f637ab2db8908b80704c8", - "sha256:dd1082b33b17401fdc31bcbf60eaaecb9ce29e23956c50db6f34b2cc6cfa13c8", + "sha256:f46989447d5a1357f6b2427b86ca2af827dd380dbd7fbf392d2abf9a5d457323", + "sha256:487a6fb0914825c8fb9f3a0662a608039bd5a8b6488d76b9de2eb1a684e908e1", + "sha256:a23b05a9c95939a0d30d6b4f6c25393473252bde47b2daa03258c27461367509", + "sha256:47226d3c41a3ffd99dacdbcd2b197a7394ee8948270710ee035181427f88dfab", }, }, Returns: cache.ArtifactCacheMissingBlobsReturns{ MissingBlobIDs: []string{ - "sha256:e1187118cdbe8893fc2fd4b345f813d195ee6aaeb4820d4576694199f8c10350", - "sha256:12c266a627dc4014c3ee96936058ba98209056f4ffe0081bb5fca7ff91592cdb", - "sha256:47adac0e28b12338e99dedbd7e8b0ef1f7aaa28e646f637ab2db8908b80704c8", - "sha256:dd1082b33b17401fdc31bcbf60eaaecb9ce29e23956c50db6f34b2cc6cfa13c8", + "sha256:f46989447d5a1357f6b2427b86ca2af827dd380dbd7fbf392d2abf9a5d457323", + "sha256:487a6fb0914825c8fb9f3a0662a608039bd5a8b6488d76b9de2eb1a684e908e1", + "sha256:a23b05a9c95939a0d30d6b4f6c25393473252bde47b2daa03258c27461367509", + "sha256:47226d3c41a3ffd99dacdbcd2b197a7394ee8948270710ee035181427f88dfab", }, }, }, putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{ { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:e1187118cdbe8893fc2fd4b345f813d195ee6aaeb4820d4576694199f8c10350", + BlobID: "sha256:f46989447d5a1357f6b2427b86ca2af827dd380dbd7fbf392d2abf9a5d457323", BlobInfo: types.BlobInfo{ SchemaVersion: types.BlobJSONSchemaVersion, Digest: "", @@ -1887,7 +1887,7 @@ func TestArtifact_Inspect(t *testing.T) { }, { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:12c266a627dc4014c3ee96936058ba98209056f4ffe0081bb5fca7ff91592cdb", + BlobID: "sha256:487a6fb0914825c8fb9f3a0662a608039bd5a8b6488d76b9de2eb1a684e908e1", BlobInfo: types.BlobInfo{ SchemaVersion: types.BlobJSONSchemaVersion, Digest: "", @@ -1898,7 +1898,7 @@ func TestArtifact_Inspect(t *testing.T) { }, { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:47adac0e28b12338e99dedbd7e8b0ef1f7aaa28e646f637ab2db8908b80704c8", + BlobID: "sha256:a23b05a9c95939a0d30d6b4f6c25393473252bde47b2daa03258c27461367509", BlobInfo: types.BlobInfo{ SchemaVersion: types.BlobJSONSchemaVersion, Digest: "", @@ -1910,7 +1910,7 @@ func TestArtifact_Inspect(t *testing.T) { }, { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:dd1082b33b17401fdc31bcbf60eaaecb9ce29e23956c50db6f34b2cc6cfa13c8", + BlobID: "sha256:47226d3c41a3ffd99dacdbcd2b197a7394ee8948270710ee035181427f88dfab", BlobInfo: types.BlobInfo{ SchemaVersion: types.BlobJSONSchemaVersion, Digest: "", @@ -1926,10 +1926,10 @@ func TestArtifact_Inspect(t *testing.T) { Type: artifact.TypeContainerImage, ID: "sha256:33f9415ed2cd5a9cef5d5144333619745b9ec0f851f0684dd45fa79c6b26a650", BlobIDs: []string{ - "sha256:e1187118cdbe8893fc2fd4b345f813d195ee6aaeb4820d4576694199f8c10350", - "sha256:12c266a627dc4014c3ee96936058ba98209056f4ffe0081bb5fca7ff91592cdb", - "sha256:47adac0e28b12338e99dedbd7e8b0ef1f7aaa28e646f637ab2db8908b80704c8", - "sha256:dd1082b33b17401fdc31bcbf60eaaecb9ce29e23956c50db6f34b2cc6cfa13c8", + "sha256:f46989447d5a1357f6b2427b86ca2af827dd380dbd7fbf392d2abf9a5d457323", + "sha256:487a6fb0914825c8fb9f3a0662a608039bd5a8b6488d76b9de2eb1a684e908e1", + "sha256:a23b05a9c95939a0d30d6b4f6c25393473252bde47b2daa03258c27461367509", + "sha256:47226d3c41a3ffd99dacdbcd2b197a7394ee8948270710ee035181427f88dfab", }, ImageMetadata: artifact.ImageMetadata{ ID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4", @@ -2012,7 +2012,7 @@ func TestArtifact_Inspect(t *testing.T) { missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{ Args: cache.ArtifactCacheMissingBlobsArgs{ ArtifactID: "sha256:c232b7d8ac8aa08aa767313d0b53084c4380d1c01a213a5971bdb039e6538313", - BlobIDs: []string{"sha256:1fd280c63e1416a2261e76454caa19a5b77c6bddedd48309c9687c4fe72b34c0"}, + BlobIDs: []string{"sha256:d4e6142cda465c55c8adf5b6c3148f3417a2c5582a76f933836738206e01b638"}, }, Returns: cache.ArtifactCacheMissingBlobsReturns{ Err: xerrors.New("MissingBlobs failed"), @@ -2026,16 +2026,16 @@ func TestArtifact_Inspect(t *testing.T) { missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{ Args: cache.ArtifactCacheMissingBlobsArgs{ ArtifactID: "sha256:c232b7d8ac8aa08aa767313d0b53084c4380d1c01a213a5971bdb039e6538313", - BlobIDs: []string{"sha256:1fd280c63e1416a2261e76454caa19a5b77c6bddedd48309c9687c4fe72b34c0"}, + BlobIDs: []string{"sha256:d4e6142cda465c55c8adf5b6c3148f3417a2c5582a76f933836738206e01b638"}, }, Returns: cache.ArtifactCacheMissingBlobsReturns{ - MissingBlobIDs: []string{"sha256:1fd280c63e1416a2261e76454caa19a5b77c6bddedd48309c9687c4fe72b34c0"}, + MissingBlobIDs: []string{"sha256:d4e6142cda465c55c8adf5b6c3148f3417a2c5582a76f933836738206e01b638"}, }, }, putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{ { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:1fd280c63e1416a2261e76454caa19a5b77c6bddedd48309c9687c4fe72b34c0", + BlobID: "sha256:d4e6142cda465c55c8adf5b6c3148f3417a2c5582a76f933836738206e01b638", BlobInfo: types.BlobInfo{ SchemaVersion: types.BlobJSONSchemaVersion, Digest: "", @@ -2095,18 +2095,18 @@ func TestArtifact_Inspect(t *testing.T) { Args: cache.ArtifactCacheMissingBlobsArgs{ ArtifactID: "sha256:33f9415ed2cd5a9cef5d5144333619745b9ec0f851f0684dd45fa79c6b26a650", BlobIDs: []string{ - "sha256:dd0a4f4754bf4590327be34f4266f63c92184352afadb72e4c9b162f76224000", - "sha256:f9e6a3065bb47f810916e90249076950a4b70785a27d3bcb90406d0ab342fa67", - "sha256:b6be0de11c6090f71dea119f43dd360335643420058e317baffb089f0dff4001", - "sha256:37c561c19b169f5f9832f4b0060bf74ebc8d1c9e01662ad4fa21c394da159440", + "sha256:a3eb0f92862bc742ea1e7ee875dd5623568ee17213ae7d29f05960eb1135fa6d", + "sha256:05b96a707dab6e1fcd9543f0df6a0e4cdf5c7e26272d7f6bc7ed2e1cf23afa9f", + "sha256:677cd3a664e4923227de2c2571c40b9956d99e4775b2e11ce8aa207842123119", + "sha256:e870ba0421bc71c046819f809c8369e98f59a2cde34961fdd429a2102da33c0c", }, }, Returns: cache.ArtifactCacheMissingBlobsReturns{ MissingBlobIDs: []string{ - "sha256:dd0a4f4754bf4590327be34f4266f63c92184352afadb72e4c9b162f76224000", - "sha256:f9e6a3065bb47f810916e90249076950a4b70785a27d3bcb90406d0ab342fa67", - "sha256:b6be0de11c6090f71dea119f43dd360335643420058e317baffb089f0dff4001", - "sha256:37c561c19b169f5f9832f4b0060bf74ebc8d1c9e01662ad4fa21c394da159440", + "sha256:a3eb0f92862bc742ea1e7ee875dd5623568ee17213ae7d29f05960eb1135fa6d", + "sha256:05b96a707dab6e1fcd9543f0df6a0e4cdf5c7e26272d7f6bc7ed2e1cf23afa9f", + "sha256:677cd3a664e4923227de2c2571c40b9956d99e4775b2e11ce8aa207842123119", + "sha256:e870ba0421bc71c046819f809c8369e98f59a2cde34961fdd429a2102da33c0c", }, }, }, @@ -2114,7 +2114,7 @@ func TestArtifact_Inspect(t *testing.T) { { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:dd0a4f4754bf4590327be34f4266f63c92184352afadb72e4c9b162f76224000", + BlobID: "sha256:a3eb0f92862bc742ea1e7ee875dd5623568ee17213ae7d29f05960eb1135fa6d", BlobInfoAnything: true, }, @@ -2125,7 +2125,7 @@ func TestArtifact_Inspect(t *testing.T) { { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:f9e6a3065bb47f810916e90249076950a4b70785a27d3bcb90406d0ab342fa67", + BlobID: "sha256:05b96a707dab6e1fcd9543f0df6a0e4cdf5c7e26272d7f6bc7ed2e1cf23afa9f", BlobInfoAnything: true, }, @@ -2136,7 +2136,7 @@ func TestArtifact_Inspect(t *testing.T) { { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:b6be0de11c6090f71dea119f43dd360335643420058e317baffb089f0dff4001", + BlobID: "sha256:677cd3a664e4923227de2c2571c40b9956d99e4775b2e11ce8aa207842123119", BlobInfoAnything: true, }, @@ -2147,7 +2147,7 @@ func TestArtifact_Inspect(t *testing.T) { { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:37c561c19b169f5f9832f4b0060bf74ebc8d1c9e01662ad4fa21c394da159440", + BlobID: "sha256:e870ba0421bc71c046819f809c8369e98f59a2cde34961fdd429a2102da33c0c", BlobInfoAnything: true, }, @@ -2164,17 +2164,17 @@ func TestArtifact_Inspect(t *testing.T) { missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{ Args: cache.ArtifactCacheMissingBlobsArgs{ ArtifactID: "sha256:c232b7d8ac8aa08aa767313d0b53084c4380d1c01a213a5971bdb039e6538313", - BlobIDs: []string{"sha256:1fd280c63e1416a2261e76454caa19a5b77c6bddedd48309c9687c4fe72b34c0"}, + BlobIDs: []string{"sha256:d4e6142cda465c55c8adf5b6c3148f3417a2c5582a76f933836738206e01b638"}, }, Returns: cache.ArtifactCacheMissingBlobsReturns{ MissingArtifact: true, - MissingBlobIDs: []string{"sha256:1fd280c63e1416a2261e76454caa19a5b77c6bddedd48309c9687c4fe72b34c0"}, + MissingBlobIDs: []string{"sha256:d4e6142cda465c55c8adf5b6c3148f3417a2c5582a76f933836738206e01b638"}, }, }, putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{ { Args: cache.ArtifactCachePutBlobArgs{ - BlobID: "sha256:1fd280c63e1416a2261e76454caa19a5b77c6bddedd48309c9687c4fe72b34c0", + BlobID: "sha256:d4e6142cda465c55c8adf5b6c3148f3417a2c5582a76f933836738206e01b638", BlobInfo: types.BlobInfo{ SchemaVersion: types.BlobJSONSchemaVersion, Digest: "", diff --git a/pkg/fanal/artifact/image/remote_sbom.go b/pkg/fanal/artifact/image/remote_sbom.go index 8a386546c07a..37303a9f7b05 100644 --- a/pkg/fanal/artifact/image/remote_sbom.go +++ b/pkg/fanal/artifact/image/remote_sbom.go @@ -6,11 +6,11 @@ import ( "fmt" "os" "path/filepath" + "slices" "github.com/google/go-containerregistry/pkg/name" v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/samber/lo" - "golang.org/x/exp/slices" "golang.org/x/xerrors" sbomatt "github.com/aquasecurity/trivy/pkg/attestation/sbom" diff --git a/pkg/fanal/artifact/image/remote_sbom_test.go b/pkg/fanal/artifact/image/remote_sbom_test.go index 1fd29fe2c69a..29fcc10f52fc 100644 --- a/pkg/fanal/artifact/image/remote_sbom_test.go +++ b/pkg/fanal/artifact/image/remote_sbom_test.go @@ -14,9 +14,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/artifact" image2 "github.com/aquasecurity/trivy/pkg/fanal/artifact/image" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/rekortest" diff --git a/pkg/fanal/artifact/local/fs.go b/pkg/fanal/artifact/local/fs.go index b807db2c6733..2f5ef7fe4ecd 100644 --- a/pkg/fanal/artifact/local/fs.go +++ b/pkg/fanal/artifact/local/fs.go @@ -14,9 +14,9 @@ import ( "github.com/opencontainers/go-digest" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/artifact" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/handler" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/walker" diff --git a/pkg/fanal/artifact/local/fs_test.go b/pkg/fanal/artifact/local/fs_test.go index 2cee794c85b2..d27b5ffb4366 100644 --- a/pkg/fanal/artifact/local/fs_test.go +++ b/pkg/fanal/artifact/local/fs_test.go @@ -10,9 +10,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/artifact" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/walker" "github.com/aquasecurity/trivy/pkg/misconf" diff --git a/pkg/fanal/artifact/repo/git.go b/pkg/fanal/artifact/repo/git.go index 4ce1c990c925..8eb3d9af7f9d 100644 --- a/pkg/fanal/artifact/repo/git.go +++ b/pkg/fanal/artifact/repo/git.go @@ -12,9 +12,9 @@ import ( "github.com/hashicorp/go-multierror" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/artifact" "github.com/aquasecurity/trivy/pkg/fanal/artifact/local" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/walker" ) diff --git a/pkg/fanal/artifact/repo/git_test.go b/pkg/fanal/artifact/repo/git_test.go index 0e4c8ee39d4b..8de1f3d8864b 100644 --- a/pkg/fanal/artifact/repo/git_test.go +++ b/pkg/fanal/artifact/repo/git_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/require" "github.com/aquasecurity/trivy/internal/gittest" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/artifact" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/walker" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/all" diff --git a/pkg/fanal/artifact/sbom/sbom.go b/pkg/fanal/artifact/sbom/sbom.go index 979c5c5a8517..a5b646c18889 100644 --- a/pkg/fanal/artifact/sbom/sbom.go +++ b/pkg/fanal/artifact/sbom/sbom.go @@ -11,9 +11,9 @@ import ( "github.com/samber/lo" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/artifact" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/handler" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" diff --git a/pkg/fanal/artifact/sbom/sbom_test.go b/pkg/fanal/artifact/sbom/sbom_test.go index 37ea39380b43..1dffa52f68c4 100644 --- a/pkg/fanal/artifact/sbom/sbom_test.go +++ b/pkg/fanal/artifact/sbom/sbom_test.go @@ -11,9 +11,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/artifact" "github.com/aquasecurity/trivy/pkg/fanal/artifact/sbom" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/types" ) diff --git a/pkg/fanal/artifact/vm/ebs.go b/pkg/fanal/artifact/vm/ebs.go index 64e1cc6a6b5a..280236cb371f 100644 --- a/pkg/fanal/artifact/vm/ebs.go +++ b/pkg/fanal/artifact/vm/ebs.go @@ -8,9 +8,9 @@ import ( ebsfile "github.com/masahiro331/go-ebs-file" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/cloud/aws/config" "github.com/aquasecurity/trivy/pkg/fanal/artifact" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/log" ) diff --git a/pkg/fanal/artifact/vm/file.go b/pkg/fanal/artifact/vm/file.go index 7968cf44681b..a3cb262a98c8 100644 --- a/pkg/fanal/artifact/vm/file.go +++ b/pkg/fanal/artifact/vm/file.go @@ -12,8 +12,8 @@ import ( "github.com/opencontainers/go-digest" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/artifact" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/vm" "github.com/aquasecurity/trivy/pkg/fanal/vm/disk" diff --git a/pkg/fanal/artifact/vm/vm.go b/pkg/fanal/artifact/vm/vm.go index 5b9aae130f4c..56f7a0f5fa88 100644 --- a/pkg/fanal/artifact/vm/vm.go +++ b/pkg/fanal/artifact/vm/vm.go @@ -10,9 +10,9 @@ import ( "github.com/google/wire" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/artifact" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/handler" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/walker" diff --git a/pkg/fanal/artifact/vm/vm_test.go b/pkg/fanal/artifact/vm/vm_test.go index d1becf0f7067..c1331554f5cc 100644 --- a/pkg/fanal/artifact/vm/vm_test.go +++ b/pkg/fanal/artifact/vm/vm_test.go @@ -14,10 +14,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/artifact" "github.com/aquasecurity/trivy/pkg/fanal/artifact/vm" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/walker" "github.com/aquasecurity/trivy/pkg/misconf" diff --git a/pkg/fanal/cache/s3.go b/pkg/fanal/cache/s3.go deleted file mode 100644 index 0176d2cf80d8..000000000000 --- a/pkg/fanal/cache/s3.go +++ /dev/null @@ -1,180 +0,0 @@ -package cache - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - - "github.com/aws/aws-sdk-go-v2/aws" - "github.com/aws/aws-sdk-go-v2/feature/s3/manager" - "github.com/aws/aws-sdk-go-v2/service/s3" - "github.com/hashicorp/go-multierror" - "golang.org/x/xerrors" - - "github.com/aquasecurity/trivy/pkg/fanal/types" -) - -var _ Cache = &S3Cache{} - -type s3API interface { - HeadObject(ctx context.Context, params *s3.HeadObjectInput, optFns ...func(*s3.Options)) (*s3.HeadObjectOutput, error) - PutObject(ctx context.Context, params *s3.PutObjectInput, optFns ...func(*s3.Options)) (*s3.PutObjectOutput, error) - DeleteBucket(ctx context.Context, params *s3.DeleteBucketInput, optFns ...func(*s3.Options)) (*s3.DeleteBucketOutput, error) -} - -type S3Cache struct { - s3Client s3API - downloader *manager.Downloader - bucketName string - prefix string -} - -func NewS3Cache(bucketName, prefix string, api s3API, downloaderAPI *manager.Downloader) S3Cache { - return S3Cache{ - s3Client: api, - downloader: downloaderAPI, - bucketName: bucketName, - prefix: prefix, - } -} - -func (c S3Cache) PutArtifact(artifactID string, artifactConfig types.ArtifactInfo) (err error) { - key := fmt.Sprintf("%s/%s/%s", artifactBucket, c.prefix, artifactID) - if err := c.put(key, artifactConfig); err != nil { - return xerrors.Errorf("unable to store artifact information in cache (%s): %w", artifactID, err) - } - return nil -} - -func (c S3Cache) DeleteBlobs(blobIDs []string) error { - var errs error - for _, blobID := range blobIDs { - key := fmt.Sprintf("%s/%s/%s", blobBucket, c.prefix, blobID) - input := &s3.DeleteBucketInput{Bucket: aws.String(key)} - if _, err := c.s3Client.DeleteBucket(context.TODO(), input); err != nil { - errs = multierror.Append(errs, err) - } - } - return errs -} - -func (c S3Cache) PutBlob(blobID string, blobInfo types.BlobInfo) error { - key := fmt.Sprintf("%s/%s/%s", blobBucket, c.prefix, blobID) - if err := c.put(key, blobInfo); err != nil { - return xerrors.Errorf("unable to store blob information in cache (%s): %w", blobID, err) - } - return nil -} - -func (c S3Cache) put(key string, body any) (err error) { - b, err := json.Marshal(body) - if err != nil { - return err - } - params := &s3.PutObjectInput{ - Bucket: aws.String(c.bucketName), - Key: aws.String(key), - Body: bytes.NewReader(b), - } - _, err = c.s3Client.PutObject(context.TODO(), params) - if err != nil { - return xerrors.Errorf("unable to put object: %w", err) - } - // Index file due S3 caveat read after write consistency - _, err = c.s3Client.PutObject(context.TODO(), &s3.PutObjectInput{ - Bucket: aws.String(c.bucketName), - Key: aws.String(fmt.Sprintf("%s.index", key)), - }) - if err != nil { - return xerrors.Errorf("unable to put index object: %w", err) - } - return nil -} - -func (c S3Cache) GetBlob(blobID string) (types.BlobInfo, error) { - var blobInfo types.BlobInfo - buf := manager.NewWriteAtBuffer([]byte{}) - _, err := c.downloader.Download(context.TODO(), buf, &s3.GetObjectInput{ - Bucket: aws.String(c.bucketName), - Key: aws.String(fmt.Sprintf("%s/%s/%s", blobBucket, c.prefix, blobID)), - }) - if err != nil { - return types.BlobInfo{}, xerrors.Errorf("failed to get blob from the cache: %w", err) - } - err = json.Unmarshal(buf.Bytes(), &blobInfo) - if err != nil { - return types.BlobInfo{}, xerrors.Errorf("JSON unmarshal error: %w", err) - } - return blobInfo, nil -} - -func (c S3Cache) GetArtifact(artifactID string) (types.ArtifactInfo, error) { - var info types.ArtifactInfo - buf := manager.NewWriteAtBuffer([]byte{}) - _, err := c.downloader.Download(context.TODO(), buf, &s3.GetObjectInput{ - Bucket: aws.String(c.bucketName), - Key: aws.String(fmt.Sprintf("%s/%s/%s", artifactBucket, c.prefix, artifactID)), - }) - if err != nil { - return types.ArtifactInfo{}, xerrors.Errorf("failed to get artifact from the cache: %w", err) - } - err = json.Unmarshal(buf.Bytes(), &info) - if err != nil { - return types.ArtifactInfo{}, xerrors.Errorf("JSON unmarshal error: %w", err) - } - return info, nil -} - -func (c S3Cache) getIndex(key, keyType string) error { - _, err := c.s3Client.HeadObject(context.TODO(), &s3.HeadObjectInput{ - Key: aws.String(fmt.Sprintf("%s/%s/%s.index", keyType, c.prefix, key)), - Bucket: &c.bucketName, - }) - if err != nil { - return xerrors.Errorf("failed to get index from the cache: %w", err) - } - return nil -} - -func (c S3Cache) MissingBlobs(artifactID string, blobIDs []string) (bool, []string, error) { - var missingArtifact bool - var missingBlobIDs []string - for _, blobID := range blobIDs { - err := c.getIndex(blobID, blobBucket) - if err != nil { - // error means cache missed blob info - missingBlobIDs = append(missingBlobIDs, blobID) - continue - } - blobInfo, err := c.GetBlob(blobID) - if err != nil { - return true, missingBlobIDs, xerrors.Errorf("the blob object (%s) doesn't exist in S3 even though the index file exists: %w", blobID, err) - } - if blobInfo.SchemaVersion != types.BlobJSONSchemaVersion { - missingBlobIDs = append(missingBlobIDs, blobID) - } - } - // get artifact info - err := c.getIndex(artifactID, artifactBucket) - // error means cache missed artifact info - if err != nil { - return true, missingBlobIDs, nil - } - artifactInfo, err := c.GetArtifact(artifactID) - if err != nil { - return true, missingBlobIDs, xerrors.Errorf("the artifact object (%s) doesn't exist in S3 even though the index file exists: %w", artifactID, err) - } - if artifactInfo.SchemaVersion != types.ArtifactJSONSchemaVersion { - missingArtifact = true - } - return missingArtifact, missingBlobIDs, nil -} - -func (c S3Cache) Close() error { - return nil -} - -func (c S3Cache) Clear() error { - return nil -} diff --git a/pkg/fanal/cache/s3_test.go b/pkg/fanal/cache/s3_test.go deleted file mode 100644 index ed3da27b974f..000000000000 --- a/pkg/fanal/cache/s3_test.go +++ /dev/null @@ -1,312 +0,0 @@ -package cache - -import ( - "context" - "errors" - "reflect" - "testing" - "time" - - "github.com/aws/aws-sdk-go-v2/feature/s3/manager" - "github.com/aws/aws-sdk-go-v2/service/s3" - "golang.org/x/xerrors" - - "github.com/aquasecurity/trivy/pkg/fanal/types" -) - -type mockS3Client struct { - s3API -} - -const ( - correctHash = "sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7" -) - -func (m *mockS3Client) PutObject(ctx context.Context, in *s3.PutObjectInput, optFns ...func(*s3.Options)) (*s3.PutObjectOutput, error) { - return &s3.PutObjectOutput{}, nil -} - -func (m *mockS3Client) HeadObject(ctx context.Context, params *s3.HeadObjectInput, optFns ...func(*s3.Options)) (*s3.HeadObjectOutput, error) { - return &s3.HeadObjectOutput{}, nil -} - -func (m *mockS3Client) DeleteBucket(ctx context.Context, in *s3.DeleteBucketInput, optFns ...func(*s3.Options)) (*s3.DeleteBucketOutput, error) { - if in != nil && *in.Bucket == blobBucket+"/prefix/"+correctHash { - return &s3.DeleteBucketOutput{}, nil - } - return nil, errors.New("unknown bucket") -} - -func TestS3Cache_PutBlob(t *testing.T) { - mockSvc := &mockS3Client{} - - type fields struct { - S3 s3API - Downloader *manager.Downloader - BucketName string - Prefix string - } - type args struct { - blobID string - blobInfo types.BlobInfo - } - tests := []struct { - name string - fields fields - args args - wantErr bool - }{ - { - name: "happy path", - fields: fields{ - S3: mockSvc, - BucketName: "test", - Prefix: "prefix", - }, - args: args{ - blobID: "sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7", - blobInfo: types.BlobInfo{ - SchemaVersion: 1, - OS: types.OS{ - Family: "alpine", - Name: "3.10", - }, - }}, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := NewS3Cache(tt.fields.BucketName, tt.fields.Prefix, tt.fields.S3, tt.fields.Downloader) - if err := c.PutBlob(tt.args.blobID, tt.args.blobInfo); (err != nil) != tt.wantErr { - t.Errorf("S3Cache.PutBlob() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} - -func TestS3Cache_PutArtifact(t *testing.T) { - mockSvc := &mockS3Client{} - - type fields struct { - S3 s3API - Downloader *manager.Downloader - BucketName string - Prefix string - } - type args struct { - artifactID string - artifactConfig types.ArtifactInfo - } - tests := []struct { - name string - fields fields - args args - wantErr bool - }{ - { - name: "happy path", - fields: fields{ - S3: mockSvc, - BucketName: "test", - Prefix: "prefix", - }, - args: args{ - artifactID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4", - artifactConfig: types.ArtifactInfo{ - SchemaVersion: 1, - Architecture: "amd64", - Created: time.Date(2020, 1, 2, 3, 4, 5, 0, time.UTC), - DockerVersion: "18.06.1-ce", - OS: "linux", - HistoryPackages: []types.Package{ - { - Name: "musl", - Version: "1.2.3", - }, - }, - }}, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := NewS3Cache(tt.fields.BucketName, tt.fields.Prefix, tt.fields.S3, tt.fields.Downloader) - if err := c.PutArtifact(tt.args.artifactID, tt.args.artifactConfig); (err != nil) != tt.wantErr { - t.Errorf("S3Cache.PutArtifact() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} - -func TestS3Cache_getIndex(t *testing.T) { - mockSvc := &mockS3Client{} - - type fields struct { - S3 s3API - Downloader *manager.Downloader - BucketName string - Prefix string - } - type args struct { - key string - keyType string - } - tests := []struct { - name string - fields fields - args args - wantErr bool - }{ - { - name: "happy path", - fields: fields{ - S3: mockSvc, - BucketName: "test", - Prefix: "prefix", - }, - args: args{ - key: "key", - keyType: "artifactBucket", - }, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := NewS3Cache(tt.fields.BucketName, tt.fields.Prefix, tt.fields.S3, tt.fields.Downloader) - if err := c.getIndex(tt.args.key, tt.args.keyType); (err != nil) != tt.wantErr { - t.Errorf("S3Cache.getIndex() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} - -type mockS3ClientMissingBlobs struct { - s3API -} - -func (m *mockS3ClientMissingBlobs) PutObject(ctx context.Context, in *s3.PutObjectInput, optFns ...func(*s3.Options)) (*s3.PutObjectOutput, error) { - return &s3.PutObjectOutput{}, nil -} - -func (m *mockS3ClientMissingBlobs) HeadObject(ctx context.Context, params *s3.HeadObjectInput, optFns ...func(*s3.Options)) (*s3.HeadObjectOutput, error) { - return &s3.HeadObjectOutput{}, xerrors.Errorf("the object doesn't exist in S3") -} - -func TestS3Cache_MissingBlobs(t *testing.T) { - mockSvc := &mockS3ClientMissingBlobs{} - - type fields struct { - S3 s3API - Downloader *manager.Downloader - BucketName string - Prefix string - } - type args struct { - artifactID string - blobIDs []string - analyzerVersions map[string]int - configAnalyzerVersions map[string]int - } - tests := []struct { - name string - fields fields - args args - want bool - wantStringSlice []string - wantErr bool - }{{ - name: "happy path", - fields: fields{ - S3: mockSvc, - BucketName: "test", - Prefix: "prefix", - }, - args: args{ - artifactID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4/1", - blobIDs: []string{"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7/10011"}, - }, - want: true, - wantStringSlice: []string{"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7/10011"}, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := NewS3Cache(tt.fields.BucketName, tt.fields.Prefix, tt.fields.S3, tt.fields.Downloader) - got, got1, err := c.MissingBlobs(tt.args.artifactID, tt.args.blobIDs) - if (err != nil) != tt.wantErr { - t.Errorf("S3Cache.MissingBlobs() error = %v, wantErr %v", err, tt.wantErr) - return - } - if got != tt.want { - t.Errorf("S3Cache.MissingBlobs() got = %v, want %v", got, tt.want) - } - if !reflect.DeepEqual(got1, tt.wantStringSlice) { - t.Errorf("S3Cache.MissingBlobs() got1 = %v, want %v", got1, tt.wantStringSlice) - } - }) - } -} - -func TestS3Cache_DeleteBlobs(t *testing.T) { - mockSvc := &mockS3Client{} - - type fields struct { - S3 s3API - Downloader *manager.Downloader - BucketName string - Prefix string - } - type args struct { - blobIDs []string - } - tests := []struct { - name string - fields fields - args args - wantErr bool - }{ - { - name: "happy path", - fields: fields{ - S3: mockSvc, - BucketName: "test", - Prefix: "prefix", - }, - args: args{ - blobIDs: []string{correctHash}, - }, - }, - { - name: "delete blob with bad ID", - fields: fields{ - S3: mockSvc, - BucketName: "test", - Prefix: "prefix", - }, - args: args{ - blobIDs: []string{"unde"}, - }, - wantErr: true, - }, - { - name: "delete blobs with bad ID", - fields: fields{ - S3: mockSvc, - BucketName: "test", - Prefix: "prefix", - }, - args: args{ - blobIDs: []string{correctHash}, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := NewS3Cache(tt.fields.BucketName, tt.fields.Prefix, tt.fields.S3, tt.fields.Downloader) - if err := c.DeleteBlobs(tt.args.blobIDs); (err != nil) != tt.wantErr { - t.Errorf("S3Cache.PutBlob() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} diff --git a/pkg/fanal/handler/handler.go b/pkg/fanal/handler/handler.go index 10ce085b3547..84629b66e242 100644 --- a/pkg/fanal/handler/handler.go +++ b/pkg/fanal/handler/handler.go @@ -2,9 +2,9 @@ package handler import ( "context" + "slices" "sort" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" diff --git a/pkg/fanal/handler/sysfile/filter.go b/pkg/fanal/handler/sysfile/filter.go index 5222049c0d16..cbe1e84f18d4 100644 --- a/pkg/fanal/handler/sysfile/filter.go +++ b/pkg/fanal/handler/sysfile/filter.go @@ -2,10 +2,9 @@ package nodejs import ( "context" + "slices" "strings" - "golang.org/x/exp/slices" - "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/artifact" "github.com/aquasecurity/trivy/pkg/fanal/handler" diff --git a/pkg/fanal/handler/unpackaged/unpackaged.go b/pkg/fanal/handler/unpackaged/unpackaged.go index 119cae3e7dad..ed380de49cb0 100644 --- a/pkg/fanal/handler/unpackaged/unpackaged.go +++ b/pkg/fanal/handler/unpackaged/unpackaged.go @@ -4,8 +4,8 @@ import ( "bytes" "context" "errors" + "slices" - "golang.org/x/exp/slices" "golang.org/x/xerrors" sbomatt "github.com/aquasecurity/trivy/pkg/attestation/sbom" diff --git a/pkg/fanal/image/daemon/image.go b/pkg/fanal/image/daemon/image.go index 5d80cb93eee4..d3787cc0abc7 100644 --- a/pkg/fanal/image/daemon/image.go +++ b/pkg/fanal/image/daemon/image.go @@ -110,16 +110,25 @@ func (img *image) ConfigFile() (*v1.ConfigFile, error) { return nil, xerrors.Errorf("unable to get diff IDs: %w", err) } - created, err := time.Parse(time.RFC3339Nano, img.inspect.Created) - if err != nil { - return nil, xerrors.Errorf("failed parsing created %s: %w", img.inspect.Created, err) + var created v1.Time + // `Created` field can be empty. Skip parsing to avoid error. + // cf. https://github.com/moby/moby/blob/8e96db1c328d0467b015768e42a62c0f834970bb/api/types/types.go#L76-L77 + if img.inspect.Created != "" { + var t time.Time + t, err = time.Parse(time.RFC3339Nano, img.inspect.Created) + if err != nil { + return nil, xerrors.Errorf("failed parsing created %s: %w", img.inspect.Created, err) + } + created = v1.Time{ + Time: t, + } } return &v1.ConfigFile{ Architecture: img.inspect.Architecture, Author: img.inspect.Author, Container: img.inspect.Container, - Created: v1.Time{Time: created}, + Created: created, DockerVersion: img.inspect.DockerVersion, Config: img.imageConfig(img.inspect.Config), History: img.history, diff --git a/pkg/fanal/secret/scanner.go b/pkg/fanal/secret/scanner.go index 51ac0db707a8..cc022bb82db4 100644 --- a/pkg/fanal/secret/scanner.go +++ b/pkg/fanal/secret/scanner.go @@ -5,12 +5,12 @@ import ( "errors" "os" "regexp" + "slices" "sort" "strings" "sync" "github.com/samber/lo" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "gopkg.in/yaml.v3" diff --git a/pkg/fanal/test/integration/containerd_test.go b/pkg/fanal/test/integration/containerd_test.go index e0fdd7602e32..d16ad3dac059 100644 --- a/pkg/fanal/test/integration/containerd_test.go +++ b/pkg/fanal/test/integration/containerd_test.go @@ -27,11 +27,11 @@ import ( "github.com/testcontainers/testcontainers-go" "github.com/testcontainers/testcontainers-go/wait" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/applier" "github.com/aquasecurity/trivy/pkg/fanal/artifact" aimage "github.com/aquasecurity/trivy/pkg/fanal/artifact/image" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/image" "github.com/aquasecurity/trivy/pkg/fanal/types" ) diff --git a/pkg/fanal/test/integration/library_test.go b/pkg/fanal/test/integration/library_test.go index 9e2073185c60..f06a8c3f5c6c 100644 --- a/pkg/fanal/test/integration/library_test.go +++ b/pkg/fanal/test/integration/library_test.go @@ -20,11 +20,11 @@ import ( "github.com/aquasecurity/trivy/pkg/fanal/analyzer" + "github.com/aquasecurity/trivy/pkg/cache" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/all" "github.com/aquasecurity/trivy/pkg/fanal/applier" "github.com/aquasecurity/trivy/pkg/fanal/artifact" aimage "github.com/aquasecurity/trivy/pkg/fanal/artifact/image" - "github.com/aquasecurity/trivy/pkg/fanal/cache" _ "github.com/aquasecurity/trivy/pkg/fanal/handler/all" "github.com/aquasecurity/trivy/pkg/fanal/image" "github.com/aquasecurity/trivy/pkg/fanal/types" diff --git a/pkg/fanal/test/integration/registry_test.go b/pkg/fanal/test/integration/registry_test.go index 081f9df20d95..5b062e425729 100644 --- a/pkg/fanal/test/integration/registry_test.go +++ b/pkg/fanal/test/integration/registry_test.go @@ -20,12 +20,12 @@ import ( testcontainers "github.com/testcontainers/testcontainers-go" "github.com/testcontainers/testcontainers-go/wait" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/all" "github.com/aquasecurity/trivy/pkg/fanal/applier" "github.com/aquasecurity/trivy/pkg/fanal/artifact" aimage "github.com/aquasecurity/trivy/pkg/fanal/artifact/image" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/image" testdocker "github.com/aquasecurity/trivy/pkg/fanal/test/integration/docker" "github.com/aquasecurity/trivy/pkg/fanal/types" diff --git a/pkg/fanal/types/const.go b/pkg/fanal/types/const.go index 6874b8a40b06..7253404c0be1 100644 --- a/pkg/fanal/types/const.go +++ b/pkg/fanal/types/const.go @@ -43,37 +43,39 @@ const ( // Programming language dependencies const ( - Bundler LangType = "bundler" - GemSpec LangType = "gemspec" - Cargo LangType = "cargo" - Composer LangType = "composer" - Npm LangType = "npm" - NuGet LangType = "nuget" - DotNetCore LangType = "dotnet-core" - PackagesProps LangType = "packages-props" - Pip LangType = "pip" - Pipenv LangType = "pipenv" - Poetry LangType = "poetry" - CondaPkg LangType = "conda-pkg" - CondaEnv LangType = "conda-environment" - PythonPkg LangType = "python-pkg" - NodePkg LangType = "node-pkg" - Yarn LangType = "yarn" - Pnpm LangType = "pnpm" - Jar LangType = "jar" - Pom LangType = "pom" - Gradle LangType = "gradle" - GoBinary LangType = "gobinary" - GoModule LangType = "gomod" - JavaScript LangType = "javascript" - RustBinary LangType = "rustbinary" - Conan LangType = "conan" - Cocoapods LangType = "cocoapods" - Swift LangType = "swift" - Pub LangType = "pub" - Hex LangType = "hex" - Bitnami LangType = "bitnami" - Julia LangType = "julia" + Bundler LangType = "bundler" + GemSpec LangType = "gemspec" + Cargo LangType = "cargo" + Composer LangType = "composer" + ComposerVendor LangType = "composer-vendor" + Npm LangType = "npm" + NuGet LangType = "nuget" + DotNetCore LangType = "dotnet-core" + PackagesProps LangType = "packages-props" + Pip LangType = "pip" + Pipenv LangType = "pipenv" + Poetry LangType = "poetry" + CondaPkg LangType = "conda-pkg" + CondaEnv LangType = "conda-environment" + PythonPkg LangType = "python-pkg" + NodePkg LangType = "node-pkg" + Yarn LangType = "yarn" + Pnpm LangType = "pnpm" + Jar LangType = "jar" + Pom LangType = "pom" + Gradle LangType = "gradle" + Sbt LangType = "sbt" + GoBinary LangType = "gobinary" + GoModule LangType = "gomod" + JavaScript LangType = "javascript" + RustBinary LangType = "rustbinary" + Conan LangType = "conan" + Cocoapods LangType = "cocoapods" + Swift LangType = "swift" + Pub LangType = "pub" + Hex LangType = "hex" + Bitnami LangType = "bitnami" + Julia LangType = "julia" K8sUpstream LangType = "kubernetes" EKS LangType = "eks" // Amazon Elastic Kubernetes Service @@ -114,14 +116,16 @@ const ( GoSum = "go.sum" MavenPom = "pom.xml" + SbtLock = "build.sbt.lock" NpmPkg = "package.json" NpmPkgLock = "package-lock.json" YarnLock = "yarn.lock" PnpmLock = "pnpm-lock.yaml" - ComposerLock = "composer.lock" - ComposerJson = "composer.json" + ComposerLock = "composer.lock" + ComposerJson = "composer.json" + ComposerInstalledJson = "installed.json" PyProject = "pyproject.toml" PipRequirements = "requirements.txt" diff --git a/pkg/fanal/types/package.go b/pkg/fanal/types/package.go index 0a281326b35d..a0734651355d 100644 --- a/pkg/fanal/types/package.go +++ b/pkg/fanal/types/package.go @@ -170,7 +170,7 @@ type Package struct { SrcEpoch int `json:",omitempty"` Licenses []string `json:",omitempty"` Maintainer string `json:",omitempty"` - ExternalReferences []ExternalRef `json:"-"` + ExternalReferences []ExternalRef `json:"-" hash:"ignore"` Modularitylabel string `json:",omitempty"` // only for Red Hat based distributions BuildInfo *BuildInfo `json:",omitempty"` // only for Red Hat diff --git a/pkg/fanal/walker/fs_test.go b/pkg/fanal/walker/fs_test.go index 2b7b2117afb1..6eec99571073 100644 --- a/pkg/fanal/walker/fs_test.go +++ b/pkg/fanal/walker/fs_test.go @@ -6,12 +6,12 @@ import ( "os" "path/filepath" "runtime" + "slices" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "golang.org/x/exp/slices" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/walker" diff --git a/pkg/fanal/walker/vm.go b/pkg/fanal/walker/vm.go index 6ff32564cc0d..5d7336f1623c 100644 --- a/pkg/fanal/walker/vm.go +++ b/pkg/fanal/walker/vm.go @@ -5,13 +5,13 @@ import ( "io" "io/fs" "path/filepath" + "slices" "strings" "github.com/masahiro331/go-disk" "github.com/masahiro331/go-disk/gpt" "github.com/masahiro331/go-disk/mbr" "github.com/masahiro331/go-disk/types" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/vm/filesystem" diff --git a/pkg/flag/cache_flags.go b/pkg/flag/cache_flags.go index c259d5b9e963..074953c2ea44 100644 --- a/pkg/flag/cache_flags.go +++ b/pkg/flag/cache_flags.go @@ -1,12 +1,7 @@ package flag import ( - "fmt" - "strings" "time" - - "github.com/samber/lo" - "golang.org/x/xerrors" ) // e.g. config yaml: @@ -19,16 +14,18 @@ import ( // cert: cert.pem // key: key.pem var ( + // Deprecated ClearCacheFlag = Flag[bool]{ Name: "clear-cache", ConfigName: "cache.clear", Usage: "clear image caches without scanning", + Removed: `Use "trivy clean --scan-cache" instead`, } CacheBackendFlag = Flag[string]{ Name: "cache-backend", ConfigName: "cache.backend", Default: "fs", - Usage: "cache backend (e.g. redis://localhost:6379)", + Usage: "[EXPERIMENTAL] cache backend (e.g. redis://localhost:6379)", } CacheTTLFlag = Flag[time.Duration]{ Name: "cache-ttl", @@ -70,24 +67,19 @@ type CacheFlagGroup struct { } type CacheOptions struct { - ClearCache bool + ClearCache bool + CacheBackend string CacheTTL time.Duration RedisTLS bool - RedisOptions -} - -// RedisOptions holds the options for redis cache -type RedisOptions struct { - RedisCACert string - RedisCert string - RedisKey string + RedisCACert string + RedisCert string + RedisKey string } // NewCacheFlagGroup returns a default CacheFlagGroup func NewCacheFlagGroup() *CacheFlagGroup { return &CacheFlagGroup{ - ClearCache: ClearCacheFlag.Clone(), CacheBackend: CacheBackendFlag.Clone(), CacheTTL: CacheTTLFlag.Clone(), RedisTLS: RedisTLSFlag.Clone(), @@ -118,43 +110,12 @@ func (fg *CacheFlagGroup) ToOptions() (CacheOptions, error) { return CacheOptions{}, err } - cacheBackend := fg.CacheBackend.Value() - redisOptions := RedisOptions{ - RedisCACert: fg.RedisCACert.Value(), - RedisCert: fg.RedisCert.Value(), - RedisKey: fg.RedisKey.Value(), - } - - // "redis://" or "fs" are allowed for now - // An empty value is also allowed for testability - if !strings.HasPrefix(cacheBackend, "redis://") && - cacheBackend != "fs" && cacheBackend != "" { - return CacheOptions{}, xerrors.Errorf("unsupported cache backend: %s", cacheBackend) - } - // if one of redis option not nil, make sure CA, cert, and key provided - if !lo.IsEmpty(redisOptions) { - if redisOptions.RedisCACert == "" || redisOptions.RedisCert == "" || redisOptions.RedisKey == "" { - return CacheOptions{}, xerrors.Errorf("you must provide Redis CA, cert and key file path when using TLS") - } - } - return CacheOptions{ - ClearCache: fg.ClearCache.Value(), - CacheBackend: cacheBackend, + CacheBackend: fg.CacheBackend.Value(), CacheTTL: fg.CacheTTL.Value(), RedisTLS: fg.RedisTLS.Value(), - RedisOptions: redisOptions, + RedisCACert: fg.RedisCACert.Value(), + RedisCert: fg.RedisCert.Value(), + RedisKey: fg.RedisKey.Value(), }, nil } - -// CacheBackendMasked returns the redis connection string masking credentials -func (o *CacheOptions) CacheBackendMasked() string { - endIndex := strings.Index(o.CacheBackend, "@") - if endIndex == -1 { - return o.CacheBackend - } - - startIndex := strings.Index(o.CacheBackend, "//") - - return fmt.Sprintf("%s****%s", o.CacheBackend[:startIndex+2], o.CacheBackend[endIndex:]) -} diff --git a/pkg/flag/cache_flags_test.go b/pkg/flag/cache_flags_test.go deleted file mode 100644 index c795cdbd6715..000000000000 --- a/pkg/flag/cache_flags_test.go +++ /dev/null @@ -1,160 +0,0 @@ -package flag_test - -import ( - "testing" - "time" - - "github.com/spf13/viper" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/aquasecurity/trivy/pkg/flag" -) - -func TestCacheFlagGroup_ToOptions(t *testing.T) { - type fields struct { - ClearCache bool - CacheBackend string - CacheTTL time.Duration - RedisTLS bool - RedisCACert string - RedisCert string - RedisKey string - } - tests := []struct { - name string - fields fields - want flag.CacheOptions - assertion require.ErrorAssertionFunc - }{ - { - name: "fs", - fields: fields{ - CacheBackend: "fs", - }, - want: flag.CacheOptions{ - CacheBackend: "fs", - }, - assertion: require.NoError, - }, - { - name: "redis", - fields: fields{ - CacheBackend: "redis://localhost:6379", - }, - want: flag.CacheOptions{ - CacheBackend: "redis://localhost:6379", - }, - assertion: require.NoError, - }, - { - name: "redis tls", - fields: fields{ - CacheBackend: "redis://localhost:6379", - RedisCACert: "ca-cert.pem", - RedisCert: "cert.pem", - RedisKey: "key.pem", - }, - want: flag.CacheOptions{ - CacheBackend: "redis://localhost:6379", - RedisOptions: flag.RedisOptions{ - RedisCACert: "ca-cert.pem", - RedisCert: "cert.pem", - RedisKey: "key.pem", - }, - }, - assertion: require.NoError, - }, - { - name: "redis tls with public certificates", - fields: fields{ - CacheBackend: "redis://localhost:6379", - RedisTLS: true, - }, - want: flag.CacheOptions{ - CacheBackend: "redis://localhost:6379", - RedisTLS: true, - }, - assertion: require.NoError, - }, - { - name: "unknown backend", - fields: fields{ - CacheBackend: "unknown", - }, - assertion: func(t require.TestingT, err error, msgs ...any) { - require.ErrorContains(t, err, "unsupported cache backend") - }, - }, - { - name: "sad redis tls", - fields: fields{ - CacheBackend: "redis://localhost:6379", - RedisCACert: "ca-cert.pem", - }, - assertion: func(t require.TestingT, err error, msgs ...any) { - require.ErrorContains(t, err, "you must provide Redis CA") - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - viper.Set(flag.ClearCacheFlag.ConfigName, tt.fields.ClearCache) - viper.Set(flag.CacheBackendFlag.ConfigName, tt.fields.CacheBackend) - viper.Set(flag.CacheTTLFlag.ConfigName, tt.fields.CacheTTL) - viper.Set(flag.RedisTLSFlag.ConfigName, tt.fields.RedisTLS) - viper.Set(flag.RedisCACertFlag.ConfigName, tt.fields.RedisCACert) - viper.Set(flag.RedisCertFlag.ConfigName, tt.fields.RedisCert) - viper.Set(flag.RedisKeyFlag.ConfigName, tt.fields.RedisKey) - - f := &flag.CacheFlagGroup{ - ClearCache: flag.ClearCacheFlag.Clone(), - CacheBackend: flag.CacheBackendFlag.Clone(), - CacheTTL: flag.CacheTTLFlag.Clone(), - RedisTLS: flag.RedisTLSFlag.Clone(), - RedisCACert: flag.RedisCACertFlag.Clone(), - RedisCert: flag.RedisCertFlag.Clone(), - RedisKey: flag.RedisKeyFlag.Clone(), - } - - got, err := f.ToOptions() - tt.assertion(t, err) - assert.Equalf(t, tt.want, got, "ToOptions()") - }) - } -} - -func TestCacheOptions_CacheBackendMasked(t *testing.T) { - type fields struct { - backend string - } - tests := []struct { - name string - fields fields - want string - }{ - { - name: "redis cache backend masked", - fields: fields{ - backend: "redis://root:password@localhost:6379", - }, - want: "redis://****@localhost:6379", - }, - { - name: "redis cache backend masked does nothing", - fields: fields{ - backend: "redis://localhost:6379", - }, - want: "redis://localhost:6379", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := &flag.CacheOptions{ - CacheBackend: tt.fields.backend, - } - - assert.Equal(t, tt.want, c.CacheBackendMasked()) - }) - } -} diff --git a/pkg/flag/clean_flags.go b/pkg/flag/clean_flags.go new file mode 100644 index 000000000000..7a898c38ad63 --- /dev/null +++ b/pkg/flag/clean_flags.go @@ -0,0 +1,84 @@ +package flag + +var ( + CleanAll = Flag[bool]{ + Name: "all", + Shorthand: "a", + ConfigName: "clean.all", + Usage: "remove all caches", + } + CleanScanCache = Flag[bool]{ + Name: "scan-cache", + ConfigName: "clean.scan-cache", + Usage: "remove scan cache (container and VM image analysis results)", + } + CleanVulnerabilityDB = Flag[bool]{ + Name: "vuln-db", + ConfigName: "clean.vuln-db", + Usage: "remove vulnerability database", + } + CleanJavaDB = Flag[bool]{ + Name: "java-db", + ConfigName: "clean.java-db", + Usage: "remove Java database", + } + CleanChecksBundle = Flag[bool]{ + Name: "checks-bundle", + ConfigName: "clean.checks-bundle", + Usage: "remove checks bundle", + } +) + +type CleanFlagGroup struct { + CleanAll *Flag[bool] + CleanVulnerabilityDB *Flag[bool] + CleanJavaDB *Flag[bool] + CleanChecksBundle *Flag[bool] + CleanScanCache *Flag[bool] +} + +type CleanOptions struct { + CleanAll bool + CleanVulnerabilityDB bool + CleanJavaDB bool + CleanChecksBundle bool + CleanScanCache bool +} + +func NewCleanFlagGroup() *CleanFlagGroup { + return &CleanFlagGroup{ + CleanAll: CleanAll.Clone(), + CleanVulnerabilityDB: CleanVulnerabilityDB.Clone(), + CleanJavaDB: CleanJavaDB.Clone(), + CleanChecksBundle: CleanChecksBundle.Clone(), + CleanScanCache: CleanScanCache.Clone(), + } +} + +func (fg *CleanFlagGroup) Name() string { + return "Clean" +} + +func (fg *CleanFlagGroup) Flags() []Flagger { + return []Flagger{ + fg.CleanAll, + fg.CleanVulnerabilityDB, + fg.CleanJavaDB, + fg.CleanChecksBundle, + fg.CleanScanCache, + } +} + +func (fg *CleanFlagGroup) ToOptions() (CleanOptions, error) { + if err := parseFlags(fg); err != nil { + return CleanOptions{}, err + } + + return CleanOptions{ + CleanAll: fg.CleanAll.Value(), + CleanVulnerabilityDB: fg.CleanVulnerabilityDB.Value(), + CleanJavaDB: fg.CleanJavaDB.Value(), + CleanChecksBundle: fg.CleanChecksBundle.Value(), + CleanScanCache: fg.CleanScanCache.Value(), + }, nil +} diff --git a/pkg/flag/cloud_flags.go b/pkg/flag/cloud_flags.go deleted file mode 100644 index fd96c206d496..000000000000 --- a/pkg/flag/cloud_flags.go +++ /dev/null @@ -1,55 +0,0 @@ -package flag - -import "time" - -var ( - cloudUpdateCacheFlag = Flag[bool]{ - Name: "update-cache", - ConfigName: "cloud.update-cache", - Usage: "Update the cache for the applicable cloud provider instead of using cached results.", - } - cloudMaxCacheAgeFlag = Flag[time.Duration]{ - Name: "max-cache-age", - ConfigName: "cloud.max-cache-age", - Default: time.Hour * 24, - Usage: "The maximum age of the cloud cache. Cached data will be required from the cloud provider if it is older than this.", - } -) - -type CloudFlagGroup struct { - UpdateCache *Flag[bool] - MaxCacheAge *Flag[time.Duration] -} - -type CloudOptions struct { - MaxCacheAge time.Duration - UpdateCache bool -} - -func NewCloudFlagGroup() *CloudFlagGroup { - return &CloudFlagGroup{ - UpdateCache: cloudUpdateCacheFlag.Clone(), - MaxCacheAge: cloudMaxCacheAgeFlag.Clone(), - } -} - -func (f *CloudFlagGroup) Name() string { - return "Cloud" -} - -func (f *CloudFlagGroup) Flags() []Flagger { - return []Flagger{ - f.UpdateCache, - f.MaxCacheAge, - } -} - -func (f *CloudFlagGroup) ToOptions() (CloudOptions, error) { - if err := parseFlags(f); err != nil { - return CloudOptions{}, err - } - return CloudOptions{ - UpdateCache: f.UpdateCache.Value(), - MaxCacheAge: f.MaxCacheAge.Value(), - }, nil -} diff --git a/pkg/flag/db_flags.go b/pkg/flag/db_flags.go index fd426ae9ccbb..37685b104204 100644 --- a/pkg/flag/db_flags.go +++ b/pkg/flag/db_flags.go @@ -12,10 +12,12 @@ import ( ) var ( + // Deprecated ResetFlag = Flag[bool]{ Name: "reset", ConfigName: "reset", Usage: "remove all caches and database", + Removed: `Use "trivy clean --all" instead.`, } DownloadDBOnlyFlag = Flag[bool]{ Name: "download-db-only", @@ -64,7 +66,7 @@ var ( Name: "light", ConfigName: "db.light", Usage: "deprecated", - Deprecated: true, + Deprecated: `This flag is ignored.`, } ) @@ -90,7 +92,6 @@ type DBOptions struct { NoProgress bool DBRepository name.Reference JavaDBRepository name.Reference - Light bool // deprecated } // NewDBFlagGroup returns a default DBFlagGroup @@ -135,7 +136,6 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { skipJavaDBUpdate := f.SkipJavaDBUpdate.Value() downloadDBOnly := f.DownloadDBOnly.Value() downloadJavaDBOnly := f.DownloadJavaDBOnly.Value() - light := f.Light.Value() if downloadDBOnly && skipDBUpdate { return DBOptions{}, xerrors.New("--skip-db-update and --download-db-only options can not be specified both") @@ -143,9 +143,6 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { if downloadJavaDBOnly && skipJavaDBUpdate { return DBOptions{}, xerrors.New("--skip-java-db-update and --download-java-db-only options can not be specified both") } - if light { - log.Warn("'--light' option is deprecated and will be removed. See also: https://github.com/aquasecurity/trivy/discussions/1649") - } var dbRepository, javaDBRepository name.Reference var err error @@ -179,7 +176,6 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { SkipDBUpdate: skipDBUpdate, DownloadJavaDBOnly: downloadJavaDBOnly, SkipJavaDBUpdate: skipJavaDBUpdate, - Light: light, NoProgress: f.NoProgress.Value(), DBRepository: dbRepository, JavaDBRepository: javaDBRepository, diff --git a/pkg/flag/db_flags_test.go b/pkg/flag/db_flags_test.go index fb6b31effaf4..2bbd5b00198c 100644 --- a/pkg/flag/db_flags_test.go +++ b/pkg/flag/db_flags_test.go @@ -43,23 +43,6 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { }, assertion: require.NoError, }, - { - name: "light", - fields: fields{ - Light: true, - DBRepository: "ghcr.io/aquasecurity/trivy-db", - JavaDBRepository: "ghcr.io/aquasecurity/trivy-java-db", - }, - want: flag.DBOptions{ - Light: true, - DBRepository: name.Tag{}, // All fields are unexported - JavaDBRepository: name.Tag{}, // All fields are unexported - }, - wantLogs: []string{ - "'--light' option is deprecated and will be removed. See also: https://github.com/aquasecurity/trivy/discussions/1649", - }, - assertion: require.NoError, - }, { name: "sad", fields: fields{ @@ -88,7 +71,6 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { viper.Set(flag.SkipDBUpdateFlag.ConfigName, tt.fields.SkipDBUpdate) viper.Set(flag.DownloadDBOnlyFlag.ConfigName, tt.fields.DownloadDBOnly) - viper.Set(flag.LightFlag.ConfigName, tt.fields.Light) viper.Set(flag.DBRepositoryFlag.ConfigName, tt.fields.DBRepository) viper.Set(flag.JavaDBRepositoryFlag.ConfigName, tt.fields.JavaDBRepository) @@ -96,7 +78,6 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { f := &flag.DBFlagGroup{ DownloadDBOnly: flag.DownloadDBOnlyFlag.Clone(), SkipDBUpdate: flag.SkipDBUpdateFlag.Clone(), - Light: flag.LightFlag.Clone(), DBRepository: flag.DBRepositoryFlag.Clone(), JavaDBRepository: flag.JavaDBRepositoryFlag.Clone(), } diff --git a/pkg/flag/global_flags.go b/pkg/flag/global_flags.go index aa4851c657f0..ebd79bd5a06c 100644 --- a/pkg/flag/global_flags.go +++ b/pkg/flag/global_flags.go @@ -6,7 +6,8 @@ import ( "github.com/spf13/cobra" - "github.com/aquasecurity/trivy/pkg/utils/fsutils" + "github.com/aquasecurity/trivy/pkg/cache" + "github.com/aquasecurity/trivy/pkg/log" ) var ( @@ -55,7 +56,7 @@ var ( CacheDirFlag = Flag[string]{ Name: "cache-dir", ConfigName: "cache.dir", - Default: fsutils.CacheDir(), + Default: cache.DefaultDir(), Usage: "cache directory", Persistent: true, } @@ -144,6 +145,8 @@ func (f *GlobalFlagGroup) ToOptions() (GlobalOptions, error) { // Keep TRIVY_NON_SSL for backward compatibility insecure := f.Insecure.Value() || os.Getenv("TRIVY_NON_SSL") != "" + log.Debug("Cache dir", log.String("dir", f.CacheDir.Value())) + return GlobalOptions{ ConfigFile: f.ConfigFile.Value(), ShowVersion: f.ShowVersion.Value(), diff --git a/pkg/flag/kubernetes_flags.go b/pkg/flag/kubernetes_flags.go index 2683fa07b13a..6d0d64f4dc56 100644 --- a/pkg/flag/kubernetes_flags.go +++ b/pkg/flag/kubernetes_flags.go @@ -39,7 +39,7 @@ var ( NodeCollectorImageRef = Flag[string]{ Name: "node-collector-imageref", ConfigName: "kubernetes.node-collector.imageref", - Default: "ghcr.io/aquasecurity/node-collector:0.2.1", + Default: "ghcr.io/aquasecurity/node-collector:0.3.1", Usage: "indicate the image reference for the node-collector scan job", } ExcludeOwned = Flag[bool]{ diff --git a/pkg/flag/misconf_flags.go b/pkg/flag/misconf_flags.go index a7f929fc4590..fc7505fec393 100644 --- a/pkg/flag/misconf_flags.go +++ b/pkg/flag/misconf_flags.go @@ -15,10 +15,12 @@ import ( // config-policy: "custom-policy/policy" // policy-namespaces: "user" var ( + // Deprecated ResetChecksBundleFlag = Flag[bool]{ Name: "reset-checks-bundle", ConfigName: "misconfiguration.reset-checks-bundle", Usage: "remove checks bundle", + Removed: `Use "trivy clean --checks-bundle" instead`, Aliases: []Alias{ { Name: "reset-policy-bundle", diff --git a/pkg/flag/options.go b/pkg/flag/options.go index 69b3585226cc..33190fb76fbe 100644 --- a/pkg/flag/options.go +++ b/pkg/flag/options.go @@ -5,6 +5,7 @@ import ( "fmt" "io" "os" + "slices" "strings" "sync" "time" @@ -14,9 +15,9 @@ import ( "github.com/spf13/cobra" "github.com/spf13/pflag" "github.com/spf13/viper" - "golang.org/x/exp/slices" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" @@ -59,7 +60,10 @@ type Flag[T FlagType] struct { Persistent bool // Deprecated represents if the flag is deprecated - Deprecated bool + Deprecated string + + // Removed represents if the flag is removed and no longer works + Removed string // Aliases represents aliases Aliases []Alias @@ -107,6 +111,14 @@ func (f *Flag[T]) Parse() error { return xerrors.Errorf(`invalid argument "%s" for "--%s" flag: must be one of %q`, value, f.Name, f.Values) } + if f.Deprecated != "" && f.isSet() { + log.Warnf(`"--%s" is deprecated. %s`, f.Name, f.Deprecated) + } + if f.Removed != "" && f.isSet() { + log.Errorf(`"--%s" was removed. %s`, f.Name, f.Removed) + return xerrors.Errorf(`removed flag ("--%s")`, f.Name) + } + f.value = value return nil } @@ -229,8 +241,8 @@ func (f *Flag[T]) Add(cmd *cobra.Command) { flags.Float64P(f.Name, f.Shorthand, v, f.Usage) } - if f.Deprecated { - flags.MarkHidden(f.Name) // nolint: gosec + if f.Deprecated != "" || f.Removed != "" { + _ = flags.MarkHidden(f.Name) } } @@ -301,7 +313,7 @@ type Flags struct { GlobalFlagGroup *GlobalFlagGroup AWSFlagGroup *AWSFlagGroup CacheFlagGroup *CacheFlagGroup - CloudFlagGroup *CloudFlagGroup + CleanFlagGroup *CleanFlagGroup DBFlagGroup *DBFlagGroup ImageFlagGroup *ImageFlagGroup K8sFlagGroup *K8sFlagGroup @@ -324,7 +336,7 @@ type Options struct { GlobalOptions AWSOptions CacheOptions - CloudOptions + CleanOptions DBOptions ImageOptions K8sOptions @@ -353,8 +365,10 @@ type Options struct { } // Align takes consistency of options -func (o *Options) Align() error { - o.enableSBOM() +func (o *Options) Align(f *Flags) error { + if f.ScanFlagGroup != nil && f.ScanFlagGroup.Scanners != nil { + o.enableSBOM() + } if o.Compliance.Spec.ID != "" { if viper.IsSet(ScannersFlag.ConfigName) { @@ -373,7 +387,7 @@ func (o *Options) Align() error { o.Scanners = scanners o.ImageConfigScanners = nil // TODO: define image-config-scanners in the spec - if o.Compliance.Spec.ID == types.ComplianceDockerCIS { + if o.Compliance.Spec.ID == types.ComplianceDockerCIS160 { o.Scanners = types.Scanners{types.VulnerabilityScanner} o.ImageConfigScanners = types.Scanners{ types.MisconfigScanner, @@ -435,6 +449,28 @@ func (o *Options) FilterOpts() result.FilterOption { } } +// CacheOpts returns options for scan cache +func (o *Options) CacheOpts() cache.Options { + return cache.Options{ + Backend: o.CacheBackend, + CacheDir: o.CacheDir, + RedisCACert: o.RedisCACert, + RedisCert: o.RedisCert, + RedisKey: o.RedisKey, + RedisTLS: o.RedisTLS, + TTL: o.CacheTTL, + } +} + +// RemoteCacheOpts returns options for remote scan cache +func (o *Options) RemoteCacheOpts() cache.RemoteOptions { + return cache.RemoteOptions{ + ServerAddr: o.ServerAddr, + CustomHeaders: o.CustomHeaders, + Insecure: o.Insecure, + } +} + // SetOutputWriter sets an output writer. func (o *Options) SetOutputWriter(w io.Writer) { o.outputWriter = w @@ -497,6 +533,9 @@ func (f *Flags) groups() []FlagGroup { if f.CacheFlagGroup != nil { groups = append(groups, f.CacheFlagGroup) } + if f.CleanFlagGroup != nil { + groups = append(groups, f.CleanFlagGroup) + } if f.DBFlagGroup != nil { groups = append(groups, f.DBFlagGroup) } @@ -527,9 +566,6 @@ func (f *Flags) groups() []FlagGroup { if f.RegoFlagGroup != nil { groups = append(groups, f.RegoFlagGroup) } - if f.CloudFlagGroup != nil { - groups = append(groups, f.CloudFlagGroup) - } if f.AWSFlagGroup != nil { groups = append(groups, f.AWSFlagGroup) } @@ -619,17 +655,17 @@ func (f *Flags) ToOptions(args []string) (Options, error) { } } - if f.CloudFlagGroup != nil { - opts.CloudOptions, err = f.CloudFlagGroup.ToOptions() + if f.CacheFlagGroup != nil { + opts.CacheOptions, err = f.CacheFlagGroup.ToOptions() if err != nil { - return Options{}, xerrors.Errorf("cloud flag error: %w", err) + return Options{}, xerrors.Errorf("cache flag error: %w", err) } } - if f.CacheFlagGroup != nil { - opts.CacheOptions, err = f.CacheFlagGroup.ToOptions() + if f.CleanFlagGroup != nil { + opts.CleanOptions, err = f.CleanFlagGroup.ToOptions() if err != nil { - return Options{}, xerrors.Errorf("cache flag error: %w", err) + return Options{}, xerrors.Errorf("clean flag error: %w", err) } } @@ -738,7 +774,7 @@ func (f *Flags) ToOptions(args []string) (Options, error) { } } - if err := opts.Align(); err != nil { + if err := opts.Align(f); err != nil { return Options{}, xerrors.Errorf("align options error: %w", err) } diff --git a/pkg/flag/report_flags.go b/pkg/flag/report_flags.go index d359f1f1b5b6..ce833cc1b13e 100644 --- a/pkg/flag/report_flags.go +++ b/pkg/flag/report_flags.go @@ -1,11 +1,11 @@ package flag import ( + "slices" "strings" "github.com/mattn/go-shellwords" "github.com/samber/lo" - "golang.org/x/exp/slices" "golang.org/x/xerrors" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" diff --git a/pkg/flag/sbom_flags.go b/pkg/flag/sbom_flags.go index 388911abd83e..9af8414ed546 100644 --- a/pkg/flag/sbom_flags.go +++ b/pkg/flag/sbom_flags.go @@ -1,23 +1,17 @@ package flag -import ( - "golang.org/x/xerrors" - - "github.com/aquasecurity/trivy/pkg/log" -) - var ( ArtifactTypeFlag = Flag[string]{ Name: "artifact-type", ConfigName: "sbom.artifact-type", Usage: "deprecated", - Deprecated: true, + Removed: `Use 'trivy image' or other subcommands. See also https://github.com/aquasecurity/trivy/discussions/2407`, } SBOMFormatFlag = Flag[string]{ Name: "sbom-format", ConfigName: "sbom.format", Usage: "deprecated", - Deprecated: true, + Removed: `Use 'trivy image' or other subcommands. See also https://github.com/aquasecurity/trivy/discussions/2407`, } ) @@ -26,8 +20,7 @@ type SBOMFlagGroup struct { SBOMFormat *Flag[string] // deprecated } -type SBOMOptions struct { -} +type SBOMOptions struct{} func NewSBOMFlagGroup() *SBOMFlagGroup { return &SBOMFlagGroup{ @@ -52,14 +45,5 @@ func (f *SBOMFlagGroup) ToOptions() (SBOMOptions, error) { return SBOMOptions{}, err } - artifactType := f.ArtifactType.Value() - sbomFormat := f.SBOMFormat.Value() - - if artifactType != "" || sbomFormat != "" { - log.Error("'trivy sbom' is now for scanning SBOM. " + - "See https://github.com/aquasecurity/trivy/discussions/2407 for the detail") - return SBOMOptions{}, xerrors.New("'--artifact-type' and '--sbom-format' are no longer available") - } - return SBOMOptions{}, nil } diff --git a/pkg/flag/scan_flags.go b/pkg/flag/scan_flags.go index 102e16e2fdd4..07f14a08551a 100644 --- a/pkg/flag/scan_flags.go +++ b/pkg/flag/scan_flags.go @@ -73,7 +73,7 @@ var ( ConfigName: "scan.slow", Default: false, Usage: "scan over time with lower CPU and memory utilization", - Deprecated: true, + Deprecated: `Use "--parallel 1" instead.`, } ParallelFlag = Flag[int]{ Name: "parallel", diff --git a/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go b/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go index 6de47f302682..6770062affb2 100644 --- a/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go @@ -1,7 +1,7 @@ package ec2 import ( - "golang.org/x/exp/maps" + "github.com/samber/lo" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ec2" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" @@ -43,7 +43,7 @@ func getSecurityGroups(ctx parser.FileContext) []ec2.SecurityGroup { } if len(mGroups) > 0 { - return maps.Values(mGroups) + return lo.Values(mGroups) } return nil } diff --git a/pkg/iac/adapters/terraform/google/iam/adapt.go b/pkg/iac/adapters/terraform/google/iam/adapt.go index e63f9f272d7d..1dcc8a4cec68 100644 --- a/pkg/iac/adapters/terraform/google/iam/adapt.go +++ b/pkg/iac/adapters/terraform/google/iam/adapt.go @@ -1,7 +1,7 @@ package iam import ( - "golang.org/x/exp/maps" + "github.com/samber/lo" "github.com/aquasecurity/trivy/pkg/iac/providers/google/iam" "github.com/aquasecurity/trivy/pkg/iac/terraform" @@ -36,9 +36,9 @@ func (a *adapter) Adapt() iam.IAM { func (a *adapter) buildIAMOutput() iam.IAM { return iam.IAM{ - Organizations: fromPtrSlice(maps.Values(a.orgs)), - Folders: fromPtrSlice(maps.Values(a.folders)), - Projects: fromPtrSlice(maps.Values(a.projects)), + Organizations: fromPtrSlice(lo.Values(a.orgs)), + Folders: fromPtrSlice(lo.Values(a.folders)), + Projects: fromPtrSlice(lo.Values(a.projects)), WorkloadIdentityPoolProviders: a.workloadIdentityPoolProviders, } } diff --git a/pkg/iac/ignore/parse.go b/pkg/iac/ignore/parse.go index 889848907548..8a7a940c6b5c 100644 --- a/pkg/iac/ignore/parse.go +++ b/pkg/iac/ignore/parse.go @@ -19,11 +19,11 @@ type RuleSectionParser interface { } // Parse parses the configuration file and returns the Rules -func Parse(src, path string, parsers ...RuleSectionParser) Rules { +func Parse(src, path, sourcePrefix string, parsers ...RuleSectionParser) Rules { var rules Rules for i, line := range strings.Split(src, "\n") { line = strings.TrimSpace(line) - rng := types.NewRange(path, i+1, i+1, "", nil) + rng := types.NewRange(path, i+1, i+1, sourcePrefix, nil) lineIgnores := parseLine(line, rng, parsers) for _, lineIgnore := range lineIgnores { rules = append(rules, lineIgnore) diff --git a/pkg/iac/ignore/rule_test.go b/pkg/iac/ignore/rule_test.go index 7cd4d382a410..619d251eb750 100644 --- a/pkg/iac/ignore/rule_test.go +++ b/pkg/iac/ignore/rule_test.go @@ -239,7 +239,7 @@ test #trivy:ignore:rule-4 for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - rules := ignore.Parse(tt.src, filename) + rules := ignore.Parse(tt.src, "", filename) got := rules.Ignore(tt.args.metadata, tt.args.ids, nil) assert.Equal(t, tt.shouldIgnore, got) }) @@ -329,7 +329,7 @@ func TestRules_IgnoreWithCustomIgnorer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - rules := ignore.Parse(tt.src, filename, tt.parser) + rules := ignore.Parse(tt.src, filename, "", tt.parser) got := rules.Ignore(tt.args.metadata, tt.args.ids, tt.args.ignorers) assert.Equal(t, tt.shouldIgnore, got) }) diff --git a/pkg/iac/providers/provider.go b/pkg/iac/providers/provider.go index cef13ee8f205..46dbf19ec43c 100755 --- a/pkg/iac/providers/provider.go +++ b/pkg/iac/providers/provider.go @@ -26,6 +26,13 @@ const ( CloudStackProvider Provider = "cloudstack" ) +func AllProviders() []Provider { + return []Provider{ + AWSProvider, AzureProvider, DigitalOceanProvider, GitHubProvider, GoogleProvider, + KubernetesProvider, OracleProvider, OpenStackProvider, NifcloudProvider, CloudStackProvider, + } +} + func RuleProviderToString(provider Provider) string { return strings.ToUpper(string(provider)) } diff --git a/pkg/iac/rego/metadata.go b/pkg/iac/rego/metadata.go index dd2c1f104fcf..907b8450bdcb 100644 --- a/pkg/iac/rego/metadata.go +++ b/pkg/iac/rego/metadata.go @@ -90,15 +90,7 @@ func (sm *StaticMetadata) Update(meta map[string]any) error { if raw, ok := meta["url"]; ok { sm.References = append(sm.References, fmt.Sprintf("%s", raw)) } - if raw, ok := meta["frameworks"]; ok { - frameworks, ok := raw.(map[string][]string) - if !ok { - return fmt.Errorf("failed to parse framework metadata: not an object") - } - for fw, sections := range frameworks { - sm.Frameworks[framework.Framework(fw)] = sections - } - } + if raw, ok := meta["related_resources"]; ok { switch relatedResources := raw.(type) { case []map[string]any: @@ -112,6 +104,9 @@ func (sm *StaticMetadata) Update(meta map[string]any) error { } } + if err := sm.updateFrameworks(meta); err != nil { + return fmt.Errorf("failed to update frameworks: %w", err) + } sm.updateAliases(meta) var err error @@ -126,6 +121,28 @@ func (sm *StaticMetadata) Update(meta map[string]any) error { return nil } +func (sm *StaticMetadata) updateFrameworks(meta map[string]any) error { + if raw, ok := meta["frameworks"]; ok { + frameworks, ok := raw.(map[string]any) + if !ok { + return fmt.Errorf("frameworks metadata is not an object, got %T", raw) + } + for fw, rawIDs := range frameworks { + ids, ok := rawIDs.([]any) + if !ok { + return fmt.Errorf("framework ids is not an array, got %T", rawIDs) + } + fr := framework.Framework(fw) + for _, id := range ids { + if str, ok := id.(string); ok { + sm.Frameworks[fr] = append(sm.Frameworks[fr], str) + } + } + } + } + return nil +} + func (sm *StaticMetadata) updateAliases(meta map[string]any) { if raw, ok := meta["aliases"]; ok { if aliases, ok := raw.([]any); ok { @@ -172,8 +189,15 @@ func NewEngineMetadata(schema string, meta map[string]any) (*scan.EngineMetadata if val, ok := sMap["bad_examples"].(string); ok { em.BadExamples = []string{val} } - if val, ok := sMap["links"].(string); ok { - em.Links = []string{val} + switch links := sMap["links"].(type) { + case string: + em.Links = []string{links} + case []any: + for _, v := range links { + if str, ok := v.(string); ok { + em.Links = append(em.Links, str) + } + } } if val, ok := sMap["remediation_markdown"].(string); ok { em.RemediationMarkdown = val diff --git a/pkg/iac/rego/metadata_test.go b/pkg/iac/rego/metadata_test.go index 4ef5a7173062..6b4bb9773a92 100644 --- a/pkg/iac/rego/metadata_test.go +++ b/pkg/iac/rego/metadata_test.go @@ -46,8 +46,8 @@ func Test_UpdateStaticMetadata(t *testing.T) { "severity": "s_n", "library": true, "url": "r_n", - "frameworks": map[string][]string{ - "all": {"aa"}, + "frameworks": map[string]any{ + "all": []any{"aa"}, }, }, )) @@ -137,7 +137,7 @@ func Test_UpdateStaticMetadata(t *testing.T) { }) } -func Test_getEngineMetadata(t *testing.T) { +func Test_NewEngineMetadata(t *testing.T) { inputSchema := map[string]any{ "terraform": map[string]any{ "good_examples": `resource "aws_cloudtrail" "good_example" { @@ -153,8 +153,11 @@ func Test_getEngineMetadata(t *testing.T) { } } }`, + + "links": "https://avd.aquasec.com/avd/183", }, - "cloud_formation": map[string]any{"good_examples": `--- + "cloud_formation": map[string]any{ + "good_examples": `--- Resources: GoodExample: Type: AWS::CloudTrail::Trail @@ -164,15 +167,19 @@ Resources: S3BucketName: "CloudtrailBucket" S3KeyPrefix: "/trailing" TrailName: "Cloudtrail"`, - }} + "links": []any{"https://avd.aquasec.com/avd/183"}, + }, + } var testCases = []struct { schema string - want string + want *scan.EngineMetadata }{ { schema: "terraform", - want: `resource "aws_cloudtrail" "good_example" { + want: &scan.EngineMetadata{ + GoodExamples: []string{ + `resource "aws_cloudtrail" "good_example" { is_multi_region_trail = true event_selector { @@ -185,9 +192,15 @@ Resources: } } }`, + }, + Links: []string{"https://avd.aquasec.com/avd/183"}, + }, }, - {schema: "cloud_formation", - want: `--- + { + schema: "cloud_formation", + want: &scan.EngineMetadata{ + GoodExamples: []string{ + `--- Resources: GoodExample: Type: AWS::CloudTrail::Trail @@ -196,14 +209,18 @@ Resources: IsMultiRegionTrail: true S3BucketName: "CloudtrailBucket" S3KeyPrefix: "/trailing" - TrailName: "Cloudtrail"`}, + TrailName: "Cloudtrail"`, + }, + Links: []string{"https://avd.aquasec.com/avd/183"}, + }, + }, } for _, tc := range testCases { t.Run(tc.schema, func(t *testing.T) { em, err := NewEngineMetadata(tc.schema, inputSchema) require.NoError(t, err) - assert.Equal(t, tc.want, em.GoodExamples[0]) + assert.Equal(t, tc.want, em) }) } } diff --git a/pkg/iac/rego/scanner.go b/pkg/iac/rego/scanner.go index 723f4c02181b..2e0516761a02 100644 --- a/pkg/iac/rego/scanner.go +++ b/pkg/iac/rego/scanner.go @@ -17,12 +17,30 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/debug" "github.com/aquasecurity/trivy/pkg/iac/framework" + "github.com/aquasecurity/trivy/pkg/iac/providers" "github.com/aquasecurity/trivy/pkg/iac/rego/schemas" "github.com/aquasecurity/trivy/pkg/iac/scan" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/types" ) +var checkTypesWithSubtype = map[types.Source]struct{}{ + types.SourceCloud: {}, + types.SourceDefsec: {}, + types.SourceKubernetes: {}, +} + +var supportedProviders = makeSupportedProviders() + +func makeSupportedProviders() map[string]struct{} { + m := make(map[string]struct{}) + for _, p := range providers.AllProviders() { + m[string(p)] = struct{}{} + } + m["kind"] = struct{}{} // kubernetes + return m +} + var _ options.ConfigurableScanner = (*Scanner)(nil) type Scanner struct { @@ -295,12 +313,8 @@ func (s *Scanner) ScanInput(ctx context.Context, inputs ...Input) (scan.Results, } func isPolicyWithSubtype(sourceType types.Source) bool { - for _, s := range []types.Source{types.SourceCloud, types.SourceDefsec, types.SourceKubernetes} { - if sourceType == s { - return true - } - } - return false + _, exists := checkTypesWithSubtype[sourceType] + return exists } func checkSubtype(ii map[string]any, provider string, subTypes []SubType) bool { @@ -311,10 +325,11 @@ func checkSubtype(ii map[string]any, provider string, subTypes []SubType) bool { for _, st := range subTypes { switch services := ii[provider].(type) { case map[string]any: - for service := range services { - if (service == st.Service) && (st.Provider == provider) { - return true - } + if st.Provider != provider { + continue + } + if _, exists := services[st.Service]; exists { + return true } case string: // k8s - logic can be improved if strings.EqualFold(services, st.Group) || @@ -331,8 +346,7 @@ func isPolicyApplicable(staticMetadata *StaticMetadata, inputs ...Input) bool { for _, input := range inputs { if ii, ok := input.Contents.(map[string]any); ok { for provider := range ii { - // TODO(simar): Add other providers - if !strings.Contains(strings.Join([]string{"kind", "aws", "azure"}, ","), provider) { + if _, exists := supportedProviders[provider]; !exists { continue } diff --git a/pkg/iac/rules/register.go b/pkg/iac/rules/register.go index ab847de2e1dc..e07268255417 100755 --- a/pkg/iac/rules/register.go +++ b/pkg/iac/rules/register.go @@ -5,7 +5,7 @@ import ( "gopkg.in/yaml.v3" - "github.com/aquasecurity/trivy-checks/specs" + "github.com/aquasecurity/trivy-checks/pkg/specs" "github.com/aquasecurity/trivy/pkg/iac/framework" "github.com/aquasecurity/trivy/pkg/iac/scan" dftypes "github.com/aquasecurity/trivy/pkg/iac/types" diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_number_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_number_test.go index f0f7532c15a4..d2acf3b0a8d9 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/parse_number_test.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_number_test.go @@ -24,7 +24,7 @@ func Test_Number_IntToFloat(t *testing.T) { metadata := types.NewTestMetadata() err := Unmarshal(example, &output, &metadata) require.NoError(t, err) - assert.Equal(t, 123.0, output) + assert.InEpsilon(t, 123.0, output, 0.0001) } func Test_Number_FloatToFloat(t *testing.T) { @@ -33,7 +33,7 @@ func Test_Number_FloatToFloat(t *testing.T) { metadata := types.NewTestMetadata() err := Unmarshal(example, &output, &metadata) require.NoError(t, err) - assert.Equal(t, 123.456, output) + assert.InEpsilon(t, 123.456, output, 0.0001) } func Test_Number_FloatToInt(t *testing.T) { @@ -42,7 +42,7 @@ func Test_Number_FloatToInt(t *testing.T) { metadata := types.NewTestMetadata() err := Unmarshal(example, &output, &metadata) require.NoError(t, err) - assert.Equal(t, 123, output) + assert.InEpsilon(t, 123, output, 0.0001) } func Test_Number_FloatWithExponent(t *testing.T) { @@ -70,7 +70,7 @@ func Test_Number_FloatWithExponent(t *testing.T) { metadata := types.NewTestMetadata() err := Unmarshal(example, &output, &metadata) require.NoError(t, err) - assert.Equal(t, test.out, output) + assert.InEpsilon(t, test.out, output, 0.0001) }) } diff --git a/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go index 2971ae73de21..31ea685cef23 100644 --- a/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go +++ b/pkg/iac/scanners/azure/arm/parser/armjson/parse_object_test.go @@ -21,7 +21,7 @@ func Test_Object(t *testing.T) { metadata := types.NewTestMetadata() require.NoError(t, Unmarshal(example, &target, &metadata)) assert.Equal(t, "testing", target.Name) - assert.Equal(t, 3.14, target.Balance) + assert.InEpsilon(t, 3.14, target.Balance, 0.0001) } func Test_ObjectWithPointers(t *testing.T) { @@ -36,7 +36,7 @@ func Test_ObjectWithPointers(t *testing.T) { metadata := types.NewTestMetadata() require.NoError(t, Unmarshal(example, &target, &metadata)) assert.Equal(t, "testing", *target.Name) - assert.Equal(t, 3.14, *target.Balance) + assert.InEpsilon(t, 3.14, *target.Balance, 0.0001) } type nestedParent struct { diff --git a/pkg/iac/scanners/azure/value.go b/pkg/iac/scanners/azure/value.go index c511517b0a57..0adc02b84268 100644 --- a/pkg/iac/scanners/azure/value.go +++ b/pkg/iac/scanners/azure/value.go @@ -1,11 +1,10 @@ package azure import ( + "slices" "strings" "time" - "golang.org/x/exp/slices" - armjson2 "github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser/armjson" "github.com/aquasecurity/trivy/pkg/iac/types" ) @@ -249,7 +248,13 @@ func (v Value) AsBoolValue(defaultValue bool, metadata types.Metadata) types.Boo v.Resolve() if v.Kind == KindString { possibleValue := strings.ToLower(v.rLit.(string)) - if slices.Contains([]string{"true", "1", "yes", "on", "enabled"}, possibleValue) { + if slices.Contains([]string{ + "true", + "1", + "yes", + "on", + "enabled", + }, possibleValue) { return types.Bool(true, metadata) } } diff --git a/pkg/iac/scanners/cloudformation/parser/parser.go b/pkg/iac/scanners/cloudformation/parser/parser.go index 65bf1440432d..5aa760e19882 100644 --- a/pkg/iac/scanners/cloudformation/parser/parser.go +++ b/pkg/iac/scanners/cloudformation/parser/parser.go @@ -171,7 +171,7 @@ func (p *Parser) ParseFile(ctx context.Context, fsys fs.FS, path string) (fctx * if err := yaml.Unmarshal(content, fctx); err != nil { return nil, NewErrInvalidContent(path, err) } - fctx.Ignores = ignore.Parse(string(content), path) + fctx.Ignores = ignore.Parse(string(content), path, "") case JsonSourceFormat: if err := jfather.Unmarshal(content, fctx); err != nil { return nil, NewErrInvalidContent(path, err) diff --git a/pkg/iac/scanners/cloudformation/parser/parser_test.go b/pkg/iac/scanners/cloudformation/parser/parser_test.go index 396e12f1bf57..0d37440e2e1a 100644 --- a/pkg/iac/scanners/cloudformation/parser/parser_test.go +++ b/pkg/iac/scanners/cloudformation/parser/parser_test.go @@ -410,7 +410,7 @@ func TestJsonWithNumbers(t *testing.T) { file := files[0] assert.Equal(t, 1, file.Parameters["SomeIntParam"].Default()) - assert.Equal(t, 1.1, file.Parameters["SomeFloatParam"].Default()) + assert.InEpsilon(t, 1.1, file.Parameters["SomeFloatParam"].Default(), 0.0001) res := file.GetResourcesByType("Test::Resource") assert.NotNil(t, res) diff --git a/pkg/iac/scanners/json/parser/parser_test.go b/pkg/iac/scanners/json/parser/parser_test.go index ed7b87492d96..a47868fad0ed 100644 --- a/pkg/iac/scanners/json/parser/parser_test.go +++ b/pkg/iac/scanners/json/parser/parser_test.go @@ -34,7 +34,7 @@ func Test_Parser(t *testing.T) { y, ok := yRaw.(float64) require.True(t, ok) - assert.Equal(t, 123.0, y) + assert.InEpsilon(t, 123.0, y, 0.0001) zRaw, ok := xMsi["z"] require.True(t, ok) diff --git a/pkg/iac/scanners/terraform/ignore_test.go b/pkg/iac/scanners/terraform/ignore_test.go index 3b0a83428a50..6183469d5296 100644 --- a/pkg/iac/scanners/terraform/ignore_test.go +++ b/pkg/iac/scanners/terraform/ignore_test.go @@ -1,15 +1,19 @@ package terraform import ( + "context" "fmt" "strings" "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/aquasecurity/trivy/internal/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers" "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scan" + "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/severity" "github.com/aquasecurity/trivy/pkg/iac/terraform" ) @@ -748,3 +752,56 @@ func Test_IgnoreInlineByAVDID(t *testing.T) { } } } + +func TestIgnoreRemoteTerraformResource(t *testing.T) { + + fsys := testutil.CreateFS(t, map[string]string{ + "main.tf": `module "bucket" { + source = "git::https://github.com/test/bucket" +}`, + ".terraform/modules/modules.json": `{ + "Modules": [ + { "Key": "", "Source": "", "Dir": "." }, + { + "Key": "bucket", + "Source": "git::https://github.com/test/bucket", + "Dir": ".terraform/modules/bucket" + } + ] +} +`, + ".terraform/modules/bucket/main.tf": ` +# trivy:ignore:test-0001 +resource "aws_s3_bucket" "test" { + bucket = "" +} +`, + }) + + check := `# METADATA +# title: Test +# custom: +# id: test-0001 +# avdid: test-0001 + +package user.test0001 + +deny[res] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "" + res := result.new("Empty bucket name!", bucket) +}` + + localScanner := New( + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithRegoOnly(true), + options.ScannerWithPolicyNamespaces("user"), + options.ScannerWithPolicyReader(strings.NewReader(check)), + ScannerWithDownloadsAllowed(false), + ScannerWithSkipCachedModules(true), + ) + results, err := localScanner.ScanFS(context.TODO(), fsys, ".") + require.NoError(t, err) + assert.Empty(t, results.GetFailed()) +} diff --git a/pkg/iac/scanners/terraform/parser/evaluator.go b/pkg/iac/scanners/terraform/parser/evaluator.go index b93104f442cc..bf3e4f4ffc13 100644 --- a/pkg/iac/scanners/terraform/parser/evaluator.go +++ b/pkg/iac/scanners/terraform/parser/evaluator.go @@ -5,13 +5,13 @@ import ( "errors" "io/fs" "reflect" + "slices" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/typeexpr" "github.com/samber/lo" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" - "golang.org/x/exp/slices" "github.com/aquasecurity/trivy/pkg/iac/debug" "github.com/aquasecurity/trivy/pkg/iac/ignore" @@ -273,7 +273,12 @@ func (e *evaluator) expandDynamicBlock(b *terraform.Block) { } func isBlockSupportsForEachMetaArgument(block *terraform.Block) bool { - return slices.Contains([]string{"module", "resource", "data", "dynamic"}, block.Type()) + return slices.Contains([]string{ + "module", + "resource", + "data", + "dynamic", + }, block.Type()) } func (e *evaluator) expandBlockForEaches(blocks terraform.Blocks, isDynamic bool) terraform.Blocks { @@ -357,7 +362,11 @@ func (e *evaluator) expandBlockForEaches(blocks terraform.Blocks, isDynamic bool } func isBlockSupportsCountMetaArgument(block *terraform.Block) bool { - return slices.Contains([]string{"module", "resource", "data"}, block.Type()) + return slices.Contains([]string{ + "module", + "resource", + "data", + }, block.Type()) } func (e *evaluator) expandBlockCounts(blocks terraform.Blocks) terraform.Blocks { diff --git a/pkg/iac/scanners/terraform/parser/modules_test.go b/pkg/iac/scanners/terraform/parser/modules_test.go index 404a1effcfb1..29dcaa7c39af 100644 --- a/pkg/iac/scanners/terraform/parser/modules_test.go +++ b/pkg/iac/scanners/terraform/parser/modules_test.go @@ -8,7 +8,6 @@ import ( "github.com/samber/lo" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "golang.org/x/exp/maps" "github.com/aquasecurity/trivy/internal/testutil" ) @@ -43,7 +42,10 @@ module "this" { source = "../modules/s3" }`, }, - expected: []string{"code", "code/example"}, + expected: []string{ + "code", + "code/example", + }, }, { name: "without module block", @@ -51,7 +53,10 @@ module "this" { "code/infra1/main.tf": `resource "test" "this" {}`, "code/infra2/main.tf": `resource "test" "this" {}`, }, - expected: []string{"code/infra1", "code/infra2"}, + expected: []string{ + "code/infra1", + "code/infra2", + }, }, } @@ -60,7 +65,7 @@ module "this" { fsys := testutil.CreateFS(t, tt.files) parser := New(fsys, "", OptionStopOnHCLError(true)) - modules := lo.Map(maps.Keys(tt.files), func(p string, _ int) string { + modules := lo.Map(lo.Keys(tt.files), func(p string, _ int) string { return path.Dir(p) }) diff --git a/pkg/iac/scanners/terraform/parser/parser.go b/pkg/iac/scanners/terraform/parser/parser.go index 049be0e02c10..aec5ce0c31d7 100644 --- a/pkg/iac/scanners/terraform/parser/parser.go +++ b/pkg/iac/scanners/terraform/parser/parser.go @@ -301,6 +301,7 @@ func (p *Parser) readBlocks(files []sourceFile) (terraform.Blocks, ignore.Rules, fileIgnores := ignore.Parse( string(file.file.Bytes), file.path, + p.moduleSource, &ignore.StringMatchParser{ SectionKey: "ws", }, diff --git a/pkg/iac/types/compliance.go b/pkg/iac/types/compliance.go index 5928537e132a..42636fffe544 100644 --- a/pkg/iac/types/compliance.go +++ b/pkg/iac/types/compliance.go @@ -7,6 +7,9 @@ type ControlStatus string type SpecCheck struct { ID string `yaml:"id"` } +type Command struct { + ID string `yaml:"id"` +} // ComplianceSpec represent the compliance specification type ComplianceSpec struct { @@ -28,6 +31,7 @@ type Control struct { Name string `yaml:"name"` Description string `yaml:"description,omitempty"` Checks []SpecCheck `yaml:"checks"` + Commands []Command `yaml:"commands"` Severity Severity `yaml:"severity"` DefaultStatus ControlStatus `yaml:"defaultStatus,omitempty"` } diff --git a/pkg/javadb/client.go b/pkg/javadb/client.go index 408456e16500..ca055a0c333f 100644 --- a/pkg/javadb/client.go +++ b/pkg/javadb/client.go @@ -79,7 +79,7 @@ func (u *Updater) Update() error { return xerrors.Errorf("Java DB metadata update error: %w", err) } log.Info("The Java DB is cached for 3 days. If you want to update the database more frequently, " + - "the '--reset' flag clears the DB cache.") + `"trivy clean --java-db" command clears the DB cache.`) } return nil @@ -88,7 +88,7 @@ func (u *Updater) Update() error { func Init(cacheDir string, javaDBRepository name.Reference, skip, quiet bool, registryOption ftypes.RegistryOptions) { updater = &Updater{ repo: javaDBRepository, - dbDir: filepath.Join(cacheDir, "java-db"), + dbDir: dbDir(cacheDir), skip: skip, quiet: quiet, registryOption: registryOption, @@ -107,6 +107,14 @@ func Update() error { return err } +func Clear(ctx context.Context, cacheDir string) error { + return os.RemoveAll(dbDir(cacheDir)) +} + +func dbDir(cacheDir string) string { + return filepath.Join(cacheDir, "java-db") +} + type DB struct { driver db.DB } diff --git a/pkg/k8s/commands/cluster.go b/pkg/k8s/commands/cluster.go index 6b169771f1ca..179a089523df 100644 --- a/pkg/k8s/commands/cluster.go +++ b/pkg/k8s/commands/cluster.go @@ -5,9 +5,11 @@ import ( "golang.org/x/xerrors" + trivy_checks "github.com/aquasecurity/trivy-checks" k8sArtifacts "github.com/aquasecurity/trivy-kubernetes/pkg/artifacts" "github.com/aquasecurity/trivy-kubernetes/pkg/k8s" "github.com/aquasecurity/trivy-kubernetes/pkg/trivyk8s" + "github.com/aquasecurity/trivy/pkg/commands/operation" "github.com/aquasecurity/trivy/pkg/flag" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/types" @@ -35,11 +37,7 @@ func clusterRun(ctx context.Context, opts flag.Options, cluster k8s.Cluster) err trivyk8s.WithExcludeOwned(opts.ExcludeOwned), } if opts.Scanners.AnyEnabled(types.MisconfigScanner) && !opts.DisableNodeCollector { - artifacts, err = trivyk8s.New(cluster, k8sOpts...).ListArtifactAndNodeInfo(ctx, - trivyk8s.WithScanJobNamespace(opts.NodeCollectorNamespace), - trivyk8s.WithIgnoreLabels(opts.ExcludeNodes), - trivyk8s.WithScanJobImageRef(opts.NodeCollectorImageRef), - trivyk8s.WithTolerations(opts.Tolerations)) + artifacts, err = trivyk8s.New(cluster, k8sOpts...).ListArtifactAndNodeInfo(ctx, nodeCollectorOptions(opts)...) if err != nil { return xerrors.Errorf("get k8s artifacts with node info error: %w", err) } @@ -60,3 +58,48 @@ func clusterRun(ctx context.Context, opts flag.Options, cluster k8s.Cluster) err runner := newRunner(opts, cluster.GetCurrentContext()) return runner.run(ctx, artifacts) } + +func nodeCollectorOptions(opts flag.Options) []trivyk8s.NodeCollectorOption { + nodeCollectorOptions := []trivyk8s.NodeCollectorOption{ + trivyk8s.WithScanJobNamespace(opts.NodeCollectorNamespace), + trivyk8s.WithIgnoreLabels(opts.ExcludeNodes), + trivyk8s.WithScanJobImageRef(opts.NodeCollectorImageRef), + trivyk8s.WithTolerations(opts.Tolerations)} + + contentPath, err := operation.InitBuiltinPolicies(context.Background(), + opts.CacheDir, + opts.Quiet, + opts.SkipCheckUpdate, + opts.MisconfOptions.ChecksBundleRepository, + opts.RegistryOpts()) + + if err != nil { + log.Error("Falling back to embedded checks", log.Err(err)) + nodeCollectorOptions = append(nodeCollectorOptions, + []trivyk8s.NodeCollectorOption{ + trivyk8s.WithEmbeddedCommandFileSystem(trivy_checks.EmbeddedK8sCommandsFileSystem), + trivyk8s.WithEmbeddedNodeConfigFilesystem(trivy_checks.EmbeddedConfigCommandsFileSystem), + }...) + } + + complianceCommandsIDs := getComplianceCommands(opts) + nodeCollectorOptions = append(nodeCollectorOptions, []trivyk8s.NodeCollectorOption{ + trivyk8s.WithCommandPaths(contentPath), + trivyk8s.WithSpecCommandIds(complianceCommandsIDs), + }...) + return nodeCollectorOptions +} + +func getComplianceCommands(opts flag.Options) []string { + var commands []string + if opts.Compliance.Spec.ID != "" { + for _, control := range opts.Compliance.Spec.Controls { + for _, command := range control.Commands { + if command.ID != "" { + commands = append(commands, command.ID) + } + } + } + } + return commands +} diff --git a/pkg/k8s/commands/run.go b/pkg/k8s/commands/run.go index 7c34bb4feb2a..6a20d04aee10 100644 --- a/pkg/k8s/commands/run.go +++ b/pkg/k8s/commands/run.go @@ -3,6 +3,7 @@ package commands import ( "context" "errors" + "fmt" "github.com/spf13/viper" "golang.org/x/xerrors" @@ -18,6 +19,7 @@ import ( "github.com/aquasecurity/trivy/pkg/k8s/scanner" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/types" + "github.com/aquasecurity/trivy/pkg/version/doc" ) // Run runs a k8s scan @@ -39,7 +41,8 @@ func Run(ctx context.Context, args []string, opts flag.Options) error { defer func() { cancel() if errors.Is(err, context.DeadlineExceeded) { - log.WarnContext(ctx, "Provide a higher timeout value, see https://aquasecurity.github.io/trivy/latest/docs/configuration/") + // e.g. https://aquasecurity.github.io/trivy/latest/docs/configuration + log.WarnContext(ctx, fmt.Sprintf("Provide a higher timeout value, see %s", doc.URL("/docs/configuration/", ""))) } }() opts.K8sVersion = cluster.GetClusterVersion() diff --git a/pkg/k8s/inject.go b/pkg/k8s/inject.go index 31ffd2afffa7..e71b11fe57d9 100644 --- a/pkg/k8s/inject.go +++ b/pkg/k8s/inject.go @@ -6,7 +6,7 @@ package k8s import ( "github.com/google/wire" - "github.com/aquasecurity/trivy/pkg/fanal/cache" + "github.com/aquasecurity/trivy/pkg/cache" ) func initializeScanK8s(localArtifactCache cache.LocalArtifactCache) *ScanKubernetes { diff --git a/pkg/k8s/report/report.go b/pkg/k8s/report/report.go index 1db95394514e..e71c218bf864 100644 --- a/pkg/k8s/report/report.go +++ b/pkg/k8s/report/report.go @@ -4,10 +4,10 @@ import ( "errors" "fmt" "io" + "slices" "strings" - "golang.org/x/exp/maps" - "golang.org/x/exp/slices" + "github.com/samber/lo" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-kubernetes/pkg/artifacts" @@ -114,7 +114,7 @@ func (r Report) consolidate() ConsolidatedReport { index[key] = v } - consolidated.Findings = maps.Values(index) + consolidated.Findings = lo.Values(index) return consolidated } diff --git a/pkg/k8s/report/summary.go b/pkg/k8s/report/summary.go index a637a55cbd0d..9a3d6ee39371 100644 --- a/pkg/k8s/report/summary.go +++ b/pkg/k8s/report/summary.go @@ -3,11 +3,11 @@ package report import ( "fmt" "io" + "slices" "sort" "strconv" "strings" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/table" diff --git a/pkg/k8s/scanner/scanner_test.go b/pkg/k8s/scanner/scanner_test.go index c17b0e622ca2..3de4f0429ef5 100644 --- a/pkg/k8s/scanner/scanner_test.go +++ b/pkg/k8s/scanner/scanner_test.go @@ -6,9 +6,9 @@ import ( "testing" "github.com/package-url/packageurl-go" + "github.com/samber/lo" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "golang.org/x/exp/maps" "github.com/aquasecurity/trivy-kubernetes/pkg/artifacts" cmd "github.com/aquasecurity/trivy/pkg/commands/artifact" @@ -284,7 +284,7 @@ func TestScanner_Scan(t *testing.T) { got, err := scanner.Scan(ctx, tt.artifacts) require.NoError(t, err) - gotComponents := maps.Values(got.BOM.Components()) + gotComponents := lo.Values(got.BOM.Components()) require.Equal(t, len(tt.wantComponents), len(gotComponents)) sort.Slice(gotComponents, func(i, j int) bool { diff --git a/pkg/k8s/wire_gen.go b/pkg/k8s/wire_gen.go index 2b2343a654b7..e6c4f7e0dff7 100644 --- a/pkg/k8s/wire_gen.go +++ b/pkg/k8s/wire_gen.go @@ -8,8 +8,8 @@ package k8s import ( "github.com/aquasecurity/trivy-db/pkg/db" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/applier" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/scanner/langpkg" "github.com/aquasecurity/trivy/pkg/scanner/local" "github.com/aquasecurity/trivy/pkg/scanner/ospkg" diff --git a/pkg/licensing/expression/types.go b/pkg/licensing/expression/types.go index f5315f4ffb0f..f344c610ab4d 100644 --- a/pkg/licensing/expression/types.go +++ b/pkg/licensing/expression/types.go @@ -2,8 +2,7 @@ package expression import ( "fmt" - - "golang.org/x/exp/slices" + "slices" "github.com/aquasecurity/trivy/pkg/licensing" ) diff --git a/pkg/licensing/scanner.go b/pkg/licensing/scanner.go index b246a05b3a01..100358a5af6c 100644 --- a/pkg/licensing/scanner.go +++ b/pkg/licensing/scanner.go @@ -1,7 +1,7 @@ package licensing import ( - "golang.org/x/exp/slices" + "slices" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy/pkg/fanal/types" diff --git a/pkg/mapfs/fs.go b/pkg/mapfs/fs.go index c809f394b6fd..4cba59263c37 100644 --- a/pkg/mapfs/fs.go +++ b/pkg/mapfs/fs.go @@ -6,10 +6,10 @@ import ( "io/fs" "os" "path/filepath" + "slices" "strings" "time" - "golang.org/x/exp/slices" "golang.org/x/xerrors" xsync "github.com/aquasecurity/trivy/pkg/x/sync" diff --git a/pkg/module/module.go b/pkg/module/module.go index a37790941f79..69eb35df2c0b 100644 --- a/pkg/module/module.go +++ b/pkg/module/module.go @@ -8,13 +8,13 @@ import ( "os" "path/filepath" "regexp" + "slices" "sync" "github.com/samber/lo" "github.com/tetratelabs/wazero" "github.com/tetratelabs/wazero/api" wasi "github.com/tetratelabs/wazero/imports/wasi_snapshot_preview1" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" diff --git a/pkg/oci/artifact.go b/pkg/oci/artifact.go index bab9ec7065ab..8cd4460ec919 100644 --- a/pkg/oci/artifact.go +++ b/pkg/oci/artifact.go @@ -188,7 +188,8 @@ func (a *Artifact) download(ctx context.Context, layer v1.Layer, fileName, dir s } // Decompress the downloaded file if it is compressed and copy it into the dst - if err = downloader.Download(ctx, f.Name(), dir, dir); err != nil { + // NOTE: it's local copying, the insecure option doesn't matter. + if err = downloader.Download(ctx, f.Name(), dir, dir, false); err != nil { return xerrors.Errorf("download error: %w", err) } diff --git a/pkg/oci/artifact_test.go b/pkg/oci/artifact_test.go index d28dbe55bc1e..ddfe0304cc63 100644 --- a/pkg/oci/artifact_test.go +++ b/pkg/oci/artifact_test.go @@ -16,7 +16,6 @@ import ( ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/oci" - "github.com/aquasecurity/trivy/pkg/utils/fsutils" ) type fakeLayer struct { @@ -97,7 +96,6 @@ func TestArtifact_Download(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tempDir := t.TempDir() - fsutils.SetCacheDir(tempDir) // Mock image img := new(fakei.FakeImage) diff --git a/pkg/parallel/pipeline_test.go b/pkg/parallel/pipeline_test.go index 60b8cec100ab..4fb008c9e00e 100644 --- a/pkg/parallel/pipeline_test.go +++ b/pkg/parallel/pipeline_test.go @@ -3,7 +3,6 @@ package parallel_test import ( "context" "fmt" - "math" "testing" "github.com/stretchr/testify/assert" @@ -15,13 +14,13 @@ import ( func TestPipeline_Do(t *testing.T) { type field struct { numWorkers int - items []float64 - onItem func(context.Context, float64) (float64, error) + items []int + onItem func(context.Context, int) (int, error) } type testCase struct { name string field field - want float64 + want int wantErr require.ErrorAssertionFunc } tests := []testCase{ @@ -29,7 +28,7 @@ func TestPipeline_Do(t *testing.T) { name: "pow", field: field{ numWorkers: 5, - items: []float64{ + items: []int{ 1, 2, 3, @@ -41,44 +40,44 @@ func TestPipeline_Do(t *testing.T) { 9, 10, }, - onItem: func(_ context.Context, f float64) (float64, error) { - return math.Pow(f, 2), nil + onItem: func(_ context.Context, i int) (int, error) { + return i * i, nil }, }, want: 385, wantErr: require.NoError, }, { - name: "ceil", + name: "double", field: field{ numWorkers: 3, - items: []float64{ - 1.1, - 2.2, - 3.3, - 4.4, - 5.5, - -1.1, - -2.2, - -3.3, + items: []int{ + 1, + 2, + 3, + 4, + 5, + -1, + -2, + -3, }, - onItem: func(_ context.Context, f float64) (float64, error) { - return math.Round(f), nil + onItem: func(_ context.Context, i int) (int, error) { + return i * 2, nil }, }, - want: 10, + want: 18, wantErr: require.NoError, }, { name: "error in series", field: field{ numWorkers: 1, - items: []float64{ + items: []int{ 1, 2, 3, }, - onItem: func(_ context.Context, f float64) (float64, error) { + onItem: func(_ context.Context, _ int) (int, error) { return 0, fmt.Errorf("error") }, }, @@ -88,11 +87,11 @@ func TestPipeline_Do(t *testing.T) { name: "error in parallel", field: field{ numWorkers: 3, - items: []float64{ + items: []int{ 1, 2, }, - onItem: func(_ context.Context, f float64) (float64, error) { + onItem: func(_ context.Context, _ int) (int, error) { return 0, fmt.Errorf("error") }, }, @@ -101,8 +100,8 @@ func TestPipeline_Do(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - var got float64 - p := parallel.NewPipeline(tt.field.numWorkers, false, tt.field.items, tt.field.onItem, func(f float64) error { + var got int + p := parallel.NewPipeline(tt.field.numWorkers, false, tt.field.items, tt.field.onItem, func(f int) error { got += f return nil }) diff --git a/pkg/plugin/index.go b/pkg/plugin/index.go index d0df12e2036f..58beeaa5f9c7 100644 --- a/pkg/plugin/index.go +++ b/pkg/plugin/index.go @@ -32,9 +32,9 @@ type Index struct { } `yaml:"plugins"` } -func (m *Manager) Update(ctx context.Context) error { +func (m *Manager) Update(ctx context.Context, opts Options) error { m.logger.InfoContext(ctx, "Updating the plugin index...", log.String("url", m.indexURL)) - if err := downloader.Download(ctx, m.indexURL, filepath.Dir(m.indexPath), ""); err != nil { + if err := downloader.Download(ctx, m.indexURL, filepath.Dir(m.indexPath), "", opts.Insecure); err != nil { return xerrors.Errorf("unable to download the plugin index: %w", err) } return nil @@ -69,10 +69,10 @@ func (m *Manager) Search(ctx context.Context, keyword string) error { // tryIndex returns the repository URL if the plugin name is found in the index. // Otherwise, it returns the input name. -func (m *Manager) tryIndex(ctx context.Context, name string) string { +func (m *Manager) tryIndex(ctx context.Context, name string, opts Options) string { // If the index file does not exist, download it first. if !fsutils.FileExists(m.indexPath) { - if err := m.Update(ctx); err != nil { + if err := m.Update(ctx, opts); err != nil { m.logger.ErrorContext(ctx, "Failed to update the plugin index", log.Err(err)) return name } diff --git a/pkg/plugin/index_test.go b/pkg/plugin/index_test.go index 7e5621d5ca1c..5e3f4cd017bd 100644 --- a/pkg/plugin/index_test.go +++ b/pkg/plugin/index_test.go @@ -13,12 +13,11 @@ import ( "github.com/stretchr/testify/require" "github.com/aquasecurity/trivy/pkg/plugin" - "github.com/aquasecurity/trivy/pkg/utils/fsutils" ) func TestManager_Update(t *testing.T) { tempDir := t.TempDir() - fsutils.SetCacheDir(tempDir) + t.Setenv("XDG_DATA_HOME", tempDir) ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { _, err := w.Write([]byte(`this is index`)) @@ -27,10 +26,10 @@ func TestManager_Update(t *testing.T) { t.Cleanup(ts.Close) manager := plugin.NewManager(plugin.WithIndexURL(ts.URL + "/index.yaml")) - err := manager.Update(context.Background()) + err := manager.Update(context.Background(), plugin.Options{}) require.NoError(t, err) - indexPath := filepath.Join(tempDir, "plugin", "index.yaml") + indexPath := filepath.Join(tempDir, ".trivy", "plugins", "index.yaml") assert.FileExists(t, indexPath) b, err := os.ReadFile(indexPath) @@ -73,7 +72,7 @@ bar A bar plugin } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fsutils.SetCacheDir(tt.dir) + t.Setenv("XDG_DATA_HOME", tt.dir) var got bytes.Buffer m := plugin.NewManager(plugin.WithWriter(&got)) diff --git a/pkg/plugin/manager.go b/pkg/plugin/manager.go index 949c87525be7..c0f9bf431c87 100644 --- a/pkg/plugin/manager.go +++ b/pkg/plugin/manager.go @@ -58,12 +58,13 @@ type Manager struct { } func NewManager(opts ...ManagerOption) *Manager { + root := filepath.Join(fsutils.HomeDir(), pluginsRelativeDir) m := &Manager{ w: os.Stdout, indexURL: indexURL, logger: log.WithPrefix("plugin"), - pluginRoot: filepath.Join(fsutils.HomeDir(), pluginsRelativeDir), - indexPath: filepath.Join(fsutils.CacheDir(), "plugin", "index.yaml"), + pluginRoot: root, + indexPath: filepath.Join(root, "index.yaml"), } for _, opt := range opts { opt(m) @@ -91,13 +92,13 @@ func Upgrade(ctx context.Context, names []string) error { return defaultManager( func Uninstall(ctx context.Context, name string) error { return defaultManager().Uninstall(ctx, name) } func Information(name string) error { return defaultManager().Information(name) } func List(ctx context.Context) error { return defaultManager().List(ctx) } -func Update(ctx context.Context) error { return defaultManager().Update(ctx) } +func Update(ctx context.Context, opts Options) error { return defaultManager().Update(ctx, opts) } func Search(ctx context.Context, keyword string) error { return defaultManager().Search(ctx, keyword) } // Install installs a plugin func (m *Manager) Install(ctx context.Context, arg string, opts Options) (Plugin, error) { input := m.parseArg(ctx, arg) - input.name = m.tryIndex(ctx, input.name) + input.name = m.tryIndex(ctx, input.name, opts) // If the plugin is already installed, it skips installing the plugin. if p, installed := m.isInstalled(ctx, input.name, input.version); installed { @@ -110,7 +111,7 @@ func (m *Manager) Install(ctx context.Context, arg string, opts Options) (Plugin } func (m *Manager) install(ctx context.Context, src string, opts Options) (Plugin, error) { - tempDir, err := downloader.DownloadToTempDir(ctx, src) + tempDir, err := downloader.DownloadToTempDir(ctx, src, opts.Insecure) if err != nil { return Plugin{}, xerrors.Errorf("download failed: %w", err) } diff --git a/pkg/plugin/manager_unix_test.go b/pkg/plugin/manager_unix_test.go index 0250a80d7907..728d3c7cf041 100644 --- a/pkg/plugin/manager_unix_test.go +++ b/pkg/plugin/manager_unix_test.go @@ -200,7 +200,11 @@ func TestManager_Install(t *testing.T) { t.Setenv("XDG_DATA_HOME", dst) // For plugin index - fsutils.SetCacheDir("testdata") + pluginDir := filepath.Join(dst, ".trivy", "plugins") + err := os.MkdirAll(pluginDir, 0755) + require.NoError(t, err) + _, err = fsutils.CopyFile("testdata/.trivy/plugins/index.yaml", filepath.Join(pluginDir, "index.yaml")) + require.NoError(t, err) if tt.installed != nil { setupInstalledPlugin(t, dst, *tt.installed) diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index 68a50ae31780..56c33644f854 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -57,6 +57,7 @@ type Options struct { Args []string Stdin io.Reader // For output plugin Platform ftypes.Platform + Insecure bool } func (p *Plugin) Cmd(ctx context.Context, opts Options) (*exec.Cmd, error) { @@ -154,7 +155,7 @@ func (p *Plugin) install(ctx context.Context, dst, pwd string, opts Options) err p.Installed.Platform = lo.FromPtr(platform.Selector) log.DebugContext(ctx, "Downloading the execution file...", log.String("uri", platform.URI)) - if err = downloader.Download(ctx, platform.URI, dst, pwd); err != nil { + if err = downloader.Download(ctx, platform.URI, dst, pwd, opts.Insecure); err != nil { return xerrors.Errorf("unable to download the execution file (%s): %w", platform.URI, err) } return nil diff --git a/pkg/plugin/testdata/plugin/index.yaml b/pkg/plugin/testdata/.trivy/plugins/index.yaml similarity index 100% rename from pkg/plugin/testdata/plugin/index.yaml rename to pkg/plugin/testdata/.trivy/plugins/index.yaml diff --git a/pkg/policy/policy.go b/pkg/policy/policy.go index 950644c0084d..88af220d3cf6 100644 --- a/pkg/policy/policy.go +++ b/pkg/policy/policy.go @@ -237,7 +237,6 @@ func (c *Client) GetMetadata() (*Metadata, error) { } func (c *Client) Clear() error { - log.Info("Removing check bundle...") if err := os.RemoveAll(c.policyDir); err != nil { return xerrors.Errorf("failed to remove check bundle: %w", err) } diff --git a/pkg/purl/purl.go b/pkg/purl/purl.go index 92ce07be9741..12b27e6290e6 100644 --- a/pkg/purl/purl.go +++ b/pkg/purl/purl.go @@ -442,12 +442,14 @@ func parseJulia(pkgName, pkgUUID string) (string, string, packageurl.Qualifiers) func purlType(t ftypes.TargetType) string { switch t { - case ftypes.Jar, ftypes.Pom, ftypes.Gradle: + case ftypes.Jar, ftypes.Pom, ftypes.Gradle, ftypes.Sbt: return packageurl.TypeMaven case ftypes.Bundler, ftypes.GemSpec: return packageurl.TypeGem case ftypes.NuGet, ftypes.DotNetCore, ftypes.PackagesProps: return packageurl.TypeNuget + case ftypes.Composer, ftypes.ComposerVendor: + return packageurl.TypeComposer case ftypes.CondaPkg, ftypes.CondaEnv: return packageurl.TypeConda case ftypes.PythonPkg, ftypes.Pip, ftypes.Pipenv, ftypes.Poetry: @@ -468,13 +470,14 @@ func purlType(t ftypes.TargetType) string { return packageurl.TypePub case ftypes.RustBinary, ftypes.Cargo: return packageurl.TypeCargo - case ftypes.Alpine: + case ftypes.Alpine, ftypes.Chainguard, ftypes.Wolfi: return packageurl.TypeApk case ftypes.Debian, ftypes.Ubuntu: return packageurl.TypeDebian case ftypes.RedHat, ftypes.CentOS, ftypes.Rocky, ftypes.Alma, ftypes.Amazon, ftypes.Fedora, ftypes.Oracle, ftypes.OpenSUSE, - ftypes.OpenSUSELeap, ftypes.OpenSUSETumbleweed, ftypes.SLES, ftypes.Photon: + ftypes.OpenSUSELeap, ftypes.OpenSUSETumbleweed, ftypes.SLES, ftypes.Photon, + ftypes.CBLMariner: return packageurl.TypeRPM case TypeOCI: return packageurl.TypeOCI diff --git a/pkg/purl/purl_test.go b/pkg/purl/purl_test.go index ddcfc98222e6..25e9e7829d7b 100644 --- a/pkg/purl/purl_test.go +++ b/pkg/purl/purl_test.go @@ -51,6 +51,20 @@ func TestNewPackageURL(t *testing.T) { Version: "5.3.14", }, }, + { + name: "sbt package", + typ: ftypes.Sbt, + pkg: ftypes.Package{ + Name: "org.typelevel:cats-core_2.12", + Version: "2.9.0", + }, + want: &purl.PackageURL{ + Type: packageurl.TypeMaven, + Namespace: "org.typelevel", + Name: "cats-core_2.12", + Version: "2.9.0", + }, + }, { name: "yarn package", typ: ftypes.Yarn, diff --git a/pkg/rekor/client.go b/pkg/rekor/client.go index d7b0a35dd781..d748166d6d7f 100644 --- a/pkg/rekor/client.go +++ b/pkg/rekor/client.go @@ -3,6 +3,7 @@ package rekor import ( "context" "net/url" + "slices" httptransport "github.com/go-openapi/runtime/client" "github.com/go-openapi/strfmt" @@ -10,7 +11,6 @@ import ( eclient "github.com/sigstore/rekor/pkg/generated/client/entries" "github.com/sigstore/rekor/pkg/generated/client/index" "github.com/sigstore/rekor/pkg/generated/models" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/log" diff --git a/pkg/report/sarif.go b/pkg/report/sarif.go index ae84b8ff987f..a94dcccb2c9b 100644 --- a/pkg/report/sarif.go +++ b/pkg/report/sarif.go @@ -137,6 +137,7 @@ func (sw *SarifWriter) Write(ctx context.Context, report types.Report) error { "imageName": report.ArtifactName, "repoTags": report.Metadata.RepoTags, "repoDigests": report.Metadata.RepoDigests, + "imageID": report.Metadata.ImageID, } } if sw.Target != "" { diff --git a/pkg/report/sarif_test.go b/pkg/report/sarif_test.go index 14b5b6027a3b..9ce3363cc321 100644 --- a/pkg/report/sarif_test.go +++ b/pkg/report/sarif_test.go @@ -31,6 +31,7 @@ func TestReportWriter_Sarif(t *testing.T) { ArtifactName: "debian:9", ArtifactType: artifact.TypeContainerImage, Metadata: types.Metadata{ + ImageID: "sha256:7640c3f9e75002deb419d5e32738eeff82cf2b3edca3781b4fe1f1f626d11b20", RepoTags: []string{ "debian:9", }, @@ -177,6 +178,7 @@ func TestReportWriter_Sarif(t *testing.T) { PropertyBag: sarif.PropertyBag{ Properties: map[string]any{ "imageName": "debian:9", + "imageID": "sha256:7640c3f9e75002deb419d5e32738eeff82cf2b3edca3781b4fe1f1f626d11b20", "repoDigests": []any{"debian@sha256:a8cc1744bbdd5266678e3e8b3e6387e45c053218438897e86876f2eb104e5534"}, "repoTags": []any{"debian:9"}, }, diff --git a/pkg/report/table/table.go b/pkg/report/table/table.go index 51ee946e3814..8bfa75922013 100644 --- a/pkg/report/table/table.go +++ b/pkg/report/table/table.go @@ -6,10 +6,10 @@ import ( "io" "os" "runtime" + "slices" "strings" "github.com/fatih/color" - "golang.org/x/exp/slices" "github.com/aquasecurity/table" "github.com/aquasecurity/tml" diff --git a/pkg/report/table/vulnerability.go b/pkg/report/table/vulnerability.go index 562f667c8498..03435fd92b96 100644 --- a/pkg/report/table/vulnerability.go +++ b/pkg/report/table/vulnerability.go @@ -4,14 +4,13 @@ import ( "bytes" "fmt" "path/filepath" + "slices" "sort" "strings" "sync" "github.com/samber/lo" "github.com/xlab/treeprint" - "golang.org/x/exp/maps" - "golang.org/x/exp/slices" "github.com/aquasecurity/table" "github.com/aquasecurity/tml" @@ -288,7 +287,7 @@ func addParents(topItem treeprint.Tree, pkg ftypes.Package, parentMap map[string } // Omitted - rootIDs := lo.Filter(maps.Keys(roots), func(pkgID string, _ int) bool { + rootIDs := lo.Filter(lo.Keys(roots), func(pkgID string, _ int) bool { _, ok := seen[pkgID] return !ok }) @@ -338,7 +337,7 @@ func findAncestor(pkgID string, parentMap map[string]ftypes.Packages, seen map[s } } } - return maps.Keys(ancestors) + return lo.Keys(ancestors) } var jarExtensions = []string{ diff --git a/pkg/result/filter.go b/pkg/result/filter.go index 936ad8272b12..7d4ead524ccc 100644 --- a/pkg/result/filter.go +++ b/pkg/result/filter.go @@ -5,12 +5,11 @@ import ( "fmt" "os" "path/filepath" + "slices" "sort" "github.com/open-policy-agent/opa/rego" "github.com/samber/lo" - "golang.org/x/exp/maps" - "golang.org/x/exp/slices" "golang.org/x/xerrors" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" @@ -135,7 +134,7 @@ func filterVulnerabilities(result *types.Result, severities []string, ignoreStat } // Override the detected vulnerabilities - result.Vulnerabilities = maps.Values(uniqVulns) + result.Vulnerabilities = lo.Values(uniqVulns) if len(result.Vulnerabilities) == 0 { result.Vulnerabilities = nil } diff --git a/pkg/rpc/server/inject.go b/pkg/rpc/server/inject.go index 453e4e80a892..4c05df08b15e 100644 --- a/pkg/rpc/server/inject.go +++ b/pkg/rpc/server/inject.go @@ -6,7 +6,7 @@ package server import ( "github.com/google/wire" - "github.com/aquasecurity/trivy/pkg/fanal/cache" + "github.com/aquasecurity/trivy/pkg/cache" ) func initializeScanServer(localArtifactCache cache.LocalArtifactCache) *ScanServer { diff --git a/pkg/rpc/server/listen.go b/pkg/rpc/server/listen.go index 802afe68ae3d..7e2ebc6b8227 100644 --- a/pkg/rpc/server/listen.go +++ b/pkg/rpc/server/listen.go @@ -15,8 +15,8 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" "github.com/aquasecurity/trivy-db/pkg/metadata" + "github.com/aquasecurity/trivy/pkg/cache" dbc "github.com/aquasecurity/trivy/pkg/db" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/utils/fsutils" @@ -128,17 +128,17 @@ func withToken(base http.Handler, token, tokenHeader string) http.Handler { } type dbWorker struct { - dbClient dbc.Operation + dbClient *dbc.Client } -func newDBWorker(dbClient dbc.Operation) dbWorker { +func newDBWorker(dbClient *dbc.Client) dbWorker { return dbWorker{dbClient: dbClient} } func (w dbWorker) update(ctx context.Context, appVersion, cacheDir string, skipDBUpdate bool, dbUpdateWg, requestWg *sync.WaitGroup, opt types.RegistryOptions) error { log.Debug("Check for DB update...") - needsUpdate, err := w.dbClient.NeedsUpdate(appVersion, skipDBUpdate) + needsUpdate, err := w.dbClient.NeedsUpdate(ctx, appVersion, skipDBUpdate) if err != nil { return xerrors.Errorf("failed to check if db needs an update") } else if !needsUpdate { diff --git a/pkg/rpc/server/listen_test.go b/pkg/rpc/server/listen_test.go index e1ffaba23878..82c8b2669bc4 100644 --- a/pkg/rpc/server/listen_test.go +++ b/pkg/rpc/server/listen_test.go @@ -5,166 +5,102 @@ import ( "encoding/json" "net/http" "net/http/httptest" - "os" "path" "sync" "testing" "time" + "github.com/google/go-containerregistry/pkg/v1/types" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "golang.org/x/xerrors" - "github.com/aquasecurity/trivy-db/pkg/db" + trivydb "github.com/aquasecurity/trivy-db/pkg/db" "github.com/aquasecurity/trivy-db/pkg/metadata" - dbFile "github.com/aquasecurity/trivy/pkg/db" - "github.com/aquasecurity/trivy/pkg/fanal/cache" + "github.com/aquasecurity/trivy/internal/dbtest" + "github.com/aquasecurity/trivy/pkg/cache" + "github.com/aquasecurity/trivy/pkg/clock" + "github.com/aquasecurity/trivy/pkg/db" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/policy" - "github.com/aquasecurity/trivy/pkg/utils/fsutils" "github.com/aquasecurity/trivy/pkg/version" rpcCache "github.com/aquasecurity/trivy/rpc/cache" ) func Test_dbWorker_update(t *testing.T) { - timeNextUpdate := time.Date(3000, 1, 1, 0, 0, 0, 0, time.UTC) - timeUpdateAt := time.Date(3000, 1, 1, 0, 0, 0, 0, time.UTC) - - type needsUpdateInput struct { - appVersion string - skip bool - } - type needsUpdateOutput struct { - needsUpdate bool - err error - } - type needsUpdate struct { - input needsUpdateInput - output needsUpdateOutput + cachedMetadata := metadata.Metadata{ + Version: db.SchemaVersion, + NextUpdate: time.Date(2020, 10, 2, 0, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2020, 10, 1, 0, 0, 0, 0, time.UTC), + DownloadedAt: time.Date(2020, 10, 1, 1, 0, 0, 0, time.UTC), } - type download struct { - call bool - err error - } - - type args struct { - appVersion string - } tests := []struct { - name string - needsUpdate needsUpdate - download download - args args - want metadata.Metadata - wantErr string + name string + now time.Time + skipUpdate bool + layerMediaType types.MediaType + want metadata.Metadata + wantErr string }{ { - name: "happy path", - needsUpdate: needsUpdate{ - input: needsUpdateInput{ - appVersion: "1", - skip: false, - }, - output: needsUpdateOutput{needsUpdate: true}, - }, - download: download{ - call: true, - }, - args: args{appVersion: "1"}, + name: "update needed", + now: time.Date(2021, 10, 1, 0, 0, 0, 0, time.UTC), + skipUpdate: false, want: metadata.Metadata{ - Version: 1, - NextUpdate: timeNextUpdate, - UpdatedAt: timeUpdateAt, + Version: db.SchemaVersion, + NextUpdate: time.Date(3000, 1, 1, 0, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(3000, 1, 1, 0, 0, 0, 0, time.UTC), + DownloadedAt: time.Date(2021, 10, 1, 0, 0, 0, 0, time.UTC), }, }, { - name: "not update", - needsUpdate: needsUpdate{ - input: needsUpdateInput{ - appVersion: "1", - skip: false, - }, - output: needsUpdateOutput{needsUpdate: false}, - }, - args: args{appVersion: "1"}, + name: "not update needed", + now: time.Date(2019, 10, 1, 0, 0, 0, 0, time.UTC), + skipUpdate: false, + want: cachedMetadata, }, { - name: "skip update", - needsUpdate: needsUpdate{ - input: needsUpdateInput{ - appVersion: "1", - skip: true, - }, - output: needsUpdateOutput{needsUpdate: false}, - }, - args: args{appVersion: "1"}, + name: "skip update", + now: time.Date(2021, 10, 1, 0, 0, 0, 0, time.UTC), + skipUpdate: true, + want: cachedMetadata, }, { - name: "NeedsUpdate returns an error", - needsUpdate: needsUpdate{ - input: needsUpdateInput{ - appVersion: "1", - skip: false, - }, - output: needsUpdateOutput{err: xerrors.New("fail")}, - }, - args: args{appVersion: "1"}, - wantErr: "failed to check if db needs an update", - }, - { - name: "Download returns an error", - needsUpdate: needsUpdate{ - input: needsUpdateInput{ - appVersion: "1", - skip: false, - }, - output: needsUpdateOutput{needsUpdate: true}, - }, - download: download{ - call: true, - err: xerrors.New("fail"), - }, - args: args{appVersion: "1"}, - wantErr: "failed DB hot update", + name: "Download returns an error", + now: time.Date(2021, 10, 1, 0, 0, 0, 0, time.UTC), + skipUpdate: false, + layerMediaType: types.MediaType("unknown"), + wantErr: "failed DB hot update", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { cacheDir := t.TempDir() - require.NoError(t, db.Init(cacheDir), tt.name) - - mockDBClient := new(dbFile.MockOperation) - mockDBClient.On("NeedsUpdate", - tt.needsUpdate.input.appVersion, tt.needsUpdate.input.skip).Return( - tt.needsUpdate.output.needsUpdate, tt.needsUpdate.output.err) - - defer func() { _ = db.Close() }() + // Initialize the cache + meta := metadata.NewClient(cacheDir) + err := meta.Update(cachedMetadata) + require.NoError(t, err) - if tt.download.call { - mockDBClient.On("Download", mock.Anything, mock.Anything, mock.Anything).Run( - func(args mock.Arguments) { - // fake download: copy testdata/new.db to tmpDir/db/trivy.db - tmpDir := args.String(1) - err := os.MkdirAll(db.Dir(tmpDir), 0744) - require.NoError(t, err) + err = trivydb.Init(cacheDir) + require.NoError(t, err) - _, err = fsutils.CopyFile("testdata/new.db", db.Path(tmpDir)) - require.NoError(t, err) + defer func() { _ = trivydb.Close() }() - // fake download: copy testdata/metadata.json to tmpDir/db/metadata.json - _, err = fsutils.CopyFile("testdata/metadata.json", metadata.Path(tmpDir)) - require.NoError(t, err) - }).Return(tt.download.err) - } + // Set a fake time + ctx := clock.With(context.Background(), tt.now) - w := newDBWorker(mockDBClient) + // Set a fake DB + dbPath := dbtest.ArchiveDir(t, "testdata/newdb") + art := dbtest.NewFakeDB(t, dbPath, dbtest.FakeDBOptions{ + MediaType: tt.layerMediaType, + }) + client := db.NewClient(cacheDir, true, db.WithOCIArtifact(art)) + w := newDBWorker(client) var dbUpdateWg, requestWg sync.WaitGroup - err := w.update(context.Background(), tt.args.appVersion, cacheDir, - tt.needsUpdate.input.skip, &dbUpdateWg, &requestWg, ftypes.RegistryOptions{}) + err = w.update(ctx, "1.2.3", cacheDir, + tt.skipUpdate, &dbUpdateWg, &requestWg, ftypes.RegistryOptions{}) if tt.wantErr != "" { require.Error(t, err, tt.name) assert.Contains(t, err.Error(), tt.wantErr, tt.name) @@ -172,16 +108,10 @@ func Test_dbWorker_update(t *testing.T) { } require.NoError(t, err, tt.name) - if !tt.download.call { - return - } - mc := metadata.NewClient(cacheDir) got, err := mc.Get() require.NoError(t, err, tt.name) assert.Equal(t, tt.want, got, tt.name) - - mockDBClient.AssertExpectations(t) }) } } diff --git a/pkg/rpc/server/server.go b/pkg/rpc/server/server.go index 79801b3bd212..eb29683942f5 100644 --- a/pkg/rpc/server/server.go +++ b/pkg/rpc/server/server.go @@ -8,7 +8,7 @@ import ( "golang.org/x/xerrors" "google.golang.org/protobuf/types/known/emptypb" - "github.com/aquasecurity/trivy/pkg/fanal/cache" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/rpc" "github.com/aquasecurity/trivy/pkg/scanner" diff --git a/pkg/rpc/server/server_test.go b/pkg/rpc/server/server_test.go index 956db45249e3..78db0c06aac3 100644 --- a/pkg/rpc/server/server_test.go +++ b/pkg/rpc/server/server_test.go @@ -15,7 +15,7 @@ import ( dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/utils" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" - "github.com/aquasecurity/trivy/pkg/fanal/cache" + "github.com/aquasecurity/trivy/pkg/cache" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/scanner" "github.com/aquasecurity/trivy/pkg/types" diff --git a/pkg/rpc/server/testdata/metadata.json b/pkg/rpc/server/testdata/metadata.json deleted file mode 100644 index dfc2957b6295..000000000000 --- a/pkg/rpc/server/testdata/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"Version":1,"NextUpdate":"3000-01-01T0:00:00.0Z","UpdatedAt":"3000-01-01T0:00:00.0Z"} \ No newline at end of file diff --git a/pkg/rpc/server/testdata/newdb/metadata.json b/pkg/rpc/server/testdata/newdb/metadata.json new file mode 100644 index 000000000000..bb57f21428f4 --- /dev/null +++ b/pkg/rpc/server/testdata/newdb/metadata.json @@ -0,0 +1 @@ +{"Version":2,"NextUpdate":"3000-01-01T0:00:00.0Z","UpdatedAt":"3000-01-01T0:00:00.0Z"} \ No newline at end of file diff --git a/pkg/rpc/server/testdata/new.db b/pkg/rpc/server/testdata/newdb/trivy.db similarity index 100% rename from pkg/rpc/server/testdata/new.db rename to pkg/rpc/server/testdata/newdb/trivy.db diff --git a/pkg/rpc/server/wire_gen.go b/pkg/rpc/server/wire_gen.go index 81f5ba451a72..bcba35941b9e 100644 --- a/pkg/rpc/server/wire_gen.go +++ b/pkg/rpc/server/wire_gen.go @@ -8,8 +8,8 @@ package server import ( "github.com/aquasecurity/trivy-db/pkg/db" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/fanal/applier" - "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/scanner/langpkg" "github.com/aquasecurity/trivy/pkg/scanner/local" "github.com/aquasecurity/trivy/pkg/scanner/ospkg" diff --git a/pkg/sbom/core/bom.go b/pkg/sbom/core/bom.go index 1fb3078d0c6b..51875bff8738 100644 --- a/pkg/sbom/core/bom.go +++ b/pkg/sbom/core/bom.go @@ -194,7 +194,6 @@ type Relationship struct { type Vulnerability struct { dtypes.Vulnerability ID string - PkgID string PkgName string InstalledVersion string FixedVersion string diff --git a/pkg/sbom/cyclonedx/marshal.go b/pkg/sbom/cyclonedx/marshal.go index 7f4bb0c3b397..9f5437f2d292 100644 --- a/pkg/sbom/cyclonedx/marshal.go +++ b/pkg/sbom/cyclonedx/marshal.go @@ -3,6 +3,7 @@ package cyclonedx import ( "context" "fmt" + "net/url" "slices" "sort" "strconv" @@ -332,6 +333,10 @@ func (*Marshaler) affects(ref, version string) cdx.Affects { func (*Marshaler) advisories(refs []string) *[]cdx.Advisory { refs = lo.Uniq(refs) advs := lo.FilterMap(refs, func(ref string, _ int) (cdx.Advisory, bool) { + // There are cases when `ref` contains extra info + // But we need to use only URL. + // cf. https://github.com/aquasecurity/trivy/issues/6801 + ref = trimNonUrlInfo(ref) return cdx.Advisory{URL: ref}, ref != "" }) @@ -345,6 +350,17 @@ func (*Marshaler) advisories(refs []string) *[]cdx.Advisory { return &advs } +// trimNonUrlInfo returns first valid URL. +func trimNonUrlInfo(ref string) string { + ss := strings.Split(ref, " ") + for _, s := range ss { + if u, err := url.Parse(s); err == nil && u.Scheme != "" && u.Host != "" { + return s + } + } + return "" +} + func (m *Marshaler) marshalVulnerability(bomRef string, vuln core.Vulnerability) *cdx.Vulnerability { v := &cdx.Vulnerability{ ID: vuln.ID, diff --git a/pkg/sbom/cyclonedx/marshal_test.go b/pkg/sbom/cyclonedx/marshal_test.go index d1fc8a455a2a..9dc28a2ab812 100644 --- a/pkg/sbom/cyclonedx/marshal_test.go +++ b/pkg/sbom/cyclonedx/marshal_test.go @@ -24,6 +24,46 @@ import ( "github.com/aquasecurity/trivy/pkg/uuid" ) +var ( + binutilsIdentifier = ftypes.PkgIdentifier{ + UID: "7CC457C23685235A", + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeRPM, + Namespace: "centos", + Name: "binutils", + Version: "2.30-93.el8", + Qualifiers: packageurl.Qualifiers{ + { + Key: "arch", + Value: "aarch64", + }, + { + Key: "distro", + Value: "centos-8.3.2011", + }, + }, + }, + } + + actionpack700Identifier = ftypes.PkgIdentifier{ + UID: "DFF5FF40889105B2", + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeGem, + Name: "actionpack", + Version: "7.0.0", + }, + } + + actionpack701Identifier = ftypes.PkgIdentifier{ + UID: "6B0A6392BAA7D584", + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeGem, + Name: "actionpack", + Version: "7.0.1", + }, + } +) + func TestMarshaler_MarshalReport(t *testing.T) { testSBOM := core.NewBOM(core.Options{GenerateBOMRef: true}) testSBOM.AddComponent(&core.Component{ @@ -74,30 +114,13 @@ func TestMarshaler_MarshalReport(t *testing.T) { Type: ftypes.CentOS, Packages: []ftypes.Package{ { - ID: "binutils@2.30-93.el8", - Name: "binutils", - Version: "2.30", - Release: "93.el8", - Epoch: 0, - Arch: "aarch64", - Identifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeRPM, - Namespace: "centos", - Name: "binutils", - Version: "2.30-93.el8", - Qualifiers: packageurl.Qualifiers{ - { - Key: "arch", - Value: "aarch64", - }, - { - Key: "distro", - Value: "centos-8.3.2011", - }, - }, - }, - }, + ID: "binutils@2.30-93.el8", + Name: "binutils", + Version: "2.30", + Release: "93.el8", + Epoch: 0, + Arch: "aarch64", + Identifier: binutilsIdentifier, SrcName: "binutils", SrcVersion: "2.30", SrcRelease: "93.el8", @@ -124,6 +147,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Name: "Red Hat OVAL v2", URL: "https://www.redhat.com/security/data/oval/v2/", }, + PkgIdentifier: binutilsIdentifier, Vulnerability: dtypes.Vulnerability{ Title: "binutils: Use-after-free in the error function", Description: "In GNU Binutils 2.31.1, there is a use-after-free in the error function in elfcomm.c when called from the process_archive function in readelf.c via a crafted ELF file.", @@ -158,23 +182,18 @@ func TestMarshaler_MarshalReport(t *testing.T) { Packages: []ftypes.Package{ { // This package conflicts - ID: "actionpack@7.0.0", - Name: "actionpack", - Version: "7.0.0", - Identifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeGem, - Name: "actionpack", - Version: "7.0.0", - }, - }, - Indirect: false, + ID: "actionpack@7.0.0", + Name: "actionpack", + Version: "7.0.0", + Identifier: actionpack700Identifier, + Indirect: false, }, { ID: "actioncontroller@7.0.0", Name: "actioncontroller", Version: "7.0.0", Identifier: ftypes.PkgIdentifier{ + UID: "41ED2619CA718170", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGem, Name: "actioncontroller", @@ -195,16 +214,10 @@ func TestMarshaler_MarshalReport(t *testing.T) { Packages: []ftypes.Package{ { // This package conflicts - ID: "actionpack@7.0.0", - Name: "actionpack", - Version: "7.0.0", - Identifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeGem, - Name: "actionpack", - Version: "7.0.0", - }, - }, + ID: "actionpack@7.0.0", + Name: "actionpack", + Version: "7.0.0", + Identifier: actionpack700Identifier, }, }, }, @@ -218,6 +231,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Name: "Newtonsoft.Json", Version: "9.0.1", Identifier: ftypes.PkgIdentifier{ + UID: "94AB97F672F97AFB", PURL: &packageurl.PackageURL{ Type: packageurl.TypeNuget, Name: "Newtonsoft.Json", @@ -236,6 +250,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Name: "golang.org/x/crypto", Version: "v0.0.0-20210421170649-83a5a9bb288b", Identifier: ftypes.PkgIdentifier{ + UID: "B7183ED2CF7EB470", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGolang, Namespace: "golang.org/x", @@ -698,6 +713,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Epoch: 1, Arch: "aarch64", Identifier: ftypes.PkgIdentifier{ + UID: "2FF7A09FA4E6AA2E", PURL: &packageurl.PackageURL{ Type: packageurl.TypeRPM, Namespace: "centos", @@ -738,6 +754,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Epoch: 0, Arch: "aarch64", Identifier: ftypes.PkgIdentifier{ + UID: "2DCAB94016E57F8E", PURL: &packageurl.PackageURL{ Type: packageurl.TypeRPM, Namespace: "centos", @@ -771,32 +788,20 @@ func TestMarshaler_MarshalReport(t *testing.T) { Type: ftypes.GemSpec, Packages: []ftypes.Package{ { - ID: "actionpack@7.0.0", - Name: "actionpack", - Version: "7.0.0", - Identifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeGem, - Name: "actionpack", - Version: "7.0.0", - }, - }, + ID: "actionpack@7.0.0", + Name: "actionpack", + Version: "7.0.0", + Identifier: actionpack700Identifier, Layer: ftypes.Layer{ DiffID: "sha256:ccb64cf0b7ba2e50741d0b64cae324eb5de3b1e2f580bbf177e721b67df38488", }, FilePath: "tools/project-john/specifications/actionpack.gemspec", }, { - ID: "actionpack@7.0.1", - Name: "actionpack", - Version: "7.0.1", - Identifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeGem, - Name: "actionpack", - Version: "7.0.1", - }, - }, + ID: "actionpack@7.0.1", + Name: "actionpack", + Version: "7.0.1", + Identifier: actionpack701Identifier, Layer: ftypes.Layer{ DiffID: "sha256:ccb64cf0b7ba2e50741d0b64cae324eb5de3b1e2f580bbf177e721b67df38488", }, @@ -805,17 +810,11 @@ func TestMarshaler_MarshalReport(t *testing.T) { }, Vulnerabilities: []types.DetectedVulnerability{ { - VulnerabilityID: "CVE-2022-23633", - PkgID: "actionpack@7.0.0", - PkgName: "actionpack", - PkgPath: "tools/project-john/specifications/actionpack.gemspec", - PkgIdentifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeGem, - Name: "actionpack", - Version: "7.0.0", - }, - }, + VulnerabilityID: "CVE-2022-23633", + PkgID: "actionpack@7.0.0", + PkgName: "actionpack", + PkgPath: "tools/project-john/specifications/actionpack.gemspec", + PkgIdentifier: actionpack700Identifier, InstalledVersion: "7.0.0", FixedVersion: "~> 5.2.6, >= 5.2.6.2, ~> 6.0.4, >= 6.0.4.6, ~> 6.1.4, >= 6.1.4.6, >= 7.0.2.2", SeveritySource: vulnerability.RubySec, @@ -847,25 +846,19 @@ func TestMarshaler_MarshalReport(t *testing.T) { }, }, References: []string{ - "http://www.openwall.com/lists/oss-security/2022/02/11/5", - "https://access.redhat.com/security/cve/CVE-2022-23633", + " extraPrefix http://www.openwall.com/lists/oss-security/2022/02/11/5", + "https://access.redhat.com/security/cve/CVE-2022-23633 (extra suffix)", }, PublishedDate: lo.ToPtr(time.Date(2022, 2, 11, 21, 15, 0, 0, time.UTC)), LastModifiedDate: lo.ToPtr(time.Date(2022, 2, 22, 21, 47, 0, 0, time.UTC)), }, }, { - VulnerabilityID: "CVE-2022-23633", - PkgID: "actionpack@7.0.1", - PkgName: "actionpack", - PkgPath: "tools/project-doe/specifications/actionpack.gemspec", - PkgIdentifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeGem, - Name: "actionpack", - Version: "7.0.1", - }, - }, + VulnerabilityID: "CVE-2022-23633", + PkgID: "actionpack@7.0.1", + PkgName: "actionpack", + PkgPath: "tools/project-doe/specifications/actionpack.gemspec", + PkgIdentifier: actionpack701Identifier, InstalledVersion: "7.0.1", FixedVersion: "~> 5.2.6, >= 5.2.6.2, ~> 6.0.4, >= 6.0.4.6, ~> 6.1.4, >= 6.1.4.6, >= 7.0.2.2", SeveritySource: vulnerability.RubySec, @@ -1241,6 +1234,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Name: "actioncable", Version: "6.1.4.1", Identifier: ftypes.PkgIdentifier{ + UID: "2E6CF0E3CD6949BD", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGem, Name: "actioncable", @@ -1259,6 +1253,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Name: "org.springframework:spring-web", Version: "5.3.22", Identifier: ftypes.PkgIdentifier{ + UID: "38DDCC9B589D3124", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, Namespace: "org.springframework", @@ -1280,6 +1275,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Name: "@babel/helper-string-parser", Version: "7.23.4", Identifier: ftypes.PkgIdentifier{ + UID: "F4C833D7F3FD9ECF", PURL: &packageurl.PackageURL{ Type: packageurl.TypeNPM, Namespace: "@babel", @@ -1458,6 +1454,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Version: "2.13.4.1", Identifier: ftypes.PkgIdentifier{ BOMRef: "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.4.1?file_path=jackson-databind-2.13.4.1.jar", + UID: "9A5066570222D04C", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, Namespace: "com.fasterxml.jackson.core", @@ -1475,6 +1472,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { PkgPath: "jackson-databind-2.13.4.1.jar", PkgIdentifier: ftypes.PkgIdentifier{ BOMRef: "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.4.1?file_path=jackson-databind-2.13.4.1.jar", + UID: "9A5066570222D04C", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, Namespace: "com.fasterxml.jackson.core", @@ -1641,6 +1639,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Name: "org.apache.nifi:nifi-dbcp-base", Version: "1.20.0", Identifier: ftypes.PkgIdentifier{ + UID: "6F266C79E57ADC38", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, Namespace: "org.apache.nifi", @@ -1654,6 +1653,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Name: "org.apache.nifi:nifi-hikari-dbcp-service", Version: "1.20.0", Identifier: ftypes.PkgIdentifier{ + UID: "3EA16F0A4CAB50F9", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, Namespace: "org.apache.nifi", @@ -1670,6 +1670,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { PkgName: "org.apache.nifi:nifi-dbcp-base", PkgPath: "nifi-dbcp-base-1.20.0.jar", PkgIdentifier: ftypes.PkgIdentifier{ + UID: "6F266C79E57ADC38", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, Namespace: "org.apache.nifi", @@ -1720,6 +1721,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { PkgName: "org.apache.nifi:nifi-hikari-dbcp-service", PkgPath: "nifi-hikari-dbcp-service-1.20.0.jar", PkgIdentifier: ftypes.PkgIdentifier{ + UID: "3EA16F0A4CAB50F9", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, Namespace: "org.apache.nifi", @@ -1939,6 +1941,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { Name: "ruby-typeprof", Version: "0.20.1", Identifier: ftypes.PkgIdentifier{ + UID: "C861FD5FC7AC663F", PURL: &packageurl.PackageURL{ Type: packageurl.TypeNPM, Name: "ruby-typeprof", diff --git a/pkg/sbom/io/decode.go b/pkg/sbom/io/decode.go index 917684962d20..7544cf215a3e 100644 --- a/pkg/sbom/io/decode.go +++ b/pkg/sbom/io/decode.go @@ -10,7 +10,7 @@ import ( debver "github.com/knqyf263/go-deb-version" rpmver "github.com/knqyf263/go-rpm-version" "github.com/package-url/packageurl-go" - "golang.org/x/exp/maps" + "github.com/samber/lo" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency" @@ -256,10 +256,23 @@ func (m *Decoder) pkgName(pkg *ftypes.Package, c *core.Component) string { return pkg.Name } + // `maven purl type` has no restrictions on using lowercase letters. + // Also, `spdx-maven-plugin` uses `name` instead of `artifactId` for the `package name` field. + // So we need to use `purl` for maven/gradle packages + // See https://github.com/aquasecurity/trivy/issues/7007 for more information. + if p.Type == packageurl.TypeMaven || p.Type == packageurl.TypeGradle { + return pkg.Name + } + + // TODO(backward compatibility): Remove after 03/2025 + // Bitnami used different pkg.Name and the name from PURL. + // For backwards compatibility - we need to use PURL. + // cf. https://github.com/aquasecurity/trivy/issues/6981 + if c.PkgIdentifier.PURL.Type == packageurl.TypeBitnami { + return pkg.Name + } + if c.Group != "" { - if p.Type == packageurl.TypeMaven || p.Type == packageurl.TypeGradle { - return c.Group + ":" + c.Name - } return c.Group + "/" + c.Name } return c.Name @@ -363,7 +376,7 @@ func (m *Decoder) addOrphanPkgs(sbom *types.SBOM) error { } if len(osPkgMap) > 1 { - return xerrors.Errorf("multiple types of OS packages in SBOM are not supported (%q)", maps.Keys(osPkgMap)) + return xerrors.Errorf("multiple types of OS packages in SBOM are not supported (%q)", lo.Keys(osPkgMap)) } // Add OS packages only when OS is detected. diff --git a/pkg/sbom/io/encode.go b/pkg/sbom/io/encode.go index 9672f1648dc6..096abd026b86 100644 --- a/pkg/sbom/io/encode.go +++ b/pkg/sbom/io/encode.go @@ -85,7 +85,8 @@ func (e *Encoder) rootComponent(r types.Report) (*core.Component, error) { root.Type = core.TypeRepository case artifact.TypeCycloneDX, artifact.TypeSPDX: // When we scan SBOM file - if r.BOM != nil { + // If SBOM file doesn't contain root component - use filesystem + if r.BOM != nil && r.BOM.Root() != nil { return r.BOM.Root(), nil } // When we scan a `json` file (meaning a file in `json` format) which was created from the SBOM file. @@ -174,25 +175,33 @@ func (e *Encoder) encodePackages(parent *core.Component, result types.Result) { vulns := make(map[string][]core.Vulnerability) for _, vuln := range result.Vulnerabilities { v := e.vulnerability(vuln) - vulns[v.PkgID] = append(vulns[v.PkgID], v) + vulns[vuln.PkgIdentifier.UID] = append(vulns[vuln.PkgIdentifier.UID], v) } // Convert packages into components and add them to the BOM parentRelationship := core.RelationshipContains + + // UID => Package Component components := make(map[string]*core.Component, len(result.Packages)) + // PkgID => Package Component + dependencies := make(map[string]*core.Component, len(result.Packages)) for i, pkg := range result.Packages { pkgID := lo.Ternary(pkg.ID == "", fmt.Sprintf("%s@%s", pkg.Name, pkg.Version), pkg.ID) result.Packages[i].ID = pkgID // Convert packages to components c := e.component(result, pkg) - components[pkgID+pkg.FilePath] = c + components[pkg.Identifier.UID] = c + + // For dependencies: the key "pkgID" might be duplicated in aggregated packages, + // but it doesn't matter as they don't have "DependsOn". + dependencies[pkgID] = c // Add a component e.bom.AddComponent(c) // Add vulnerabilities - if vv := vulns[pkgID]; vv != nil { + if vv := vulns[pkg.Identifier.UID]; vv != nil { e.bom.AddVulnerabilities(c, vv) } @@ -211,7 +220,7 @@ func (e *Encoder) encodePackages(parent *core.Component, result types.Result) { if pkg.Relationship == ftypes.RelationshipRoot { continue } - c := components[pkg.ID+pkg.FilePath] + c := components[pkg.Identifier.UID] // Add a relationship between the parent and the package if needed if e.belongToParent(pkg, parents) { @@ -220,7 +229,7 @@ func (e *Encoder) encodePackages(parent *core.Component, result types.Result) { // Add relationships between the package and its dependencies for _, dep := range pkg.DependsOn { - dependsOn, ok := components[dep] + dependsOn, ok := dependencies[dep] if !ok { continue } @@ -369,7 +378,6 @@ func (*Encoder) vulnerability(vuln types.DetectedVulnerability) core.Vulnerabili return core.Vulnerability{ Vulnerability: vuln.Vulnerability, ID: vuln.VulnerabilityID, - PkgID: lo.Ternary(vuln.PkgID == "", fmt.Sprintf("%s@%s", vuln.PkgName, vuln.InstalledVersion), vuln.PkgID), PkgName: vuln.PkgName, InstalledVersion: vuln.InstalledVersion, FixedVersion: vuln.FixedVersion, diff --git a/pkg/sbom/io/encode_test.go b/pkg/sbom/io/encode_test.go index d165b64c3e80..80783827cee7 100644 --- a/pkg/sbom/io/encode_test.go +++ b/pkg/sbom/io/encode_test.go @@ -55,6 +55,7 @@ func TestEncoder_Encode(t *testing.T) { Name: "libc6", Version: "2.37-15.1", Identifier: ftypes.PkgIdentifier{ + UID: "33654D2C483FC3AD", PURL: &packageurl.PackageURL{ Type: packageurl.TypeDebian, Name: "libc6", @@ -67,6 +68,7 @@ func TestEncoder_Encode(t *testing.T) { Name: "curl", Version: "7.50.3-1", Identifier: ftypes.PkgIdentifier{ + UID: "51BA9E006222819D", PURL: &packageurl.PackageURL{ Type: packageurl.TypeDebian, Name: "curl", @@ -88,6 +90,9 @@ func TestEncoder_Encode(t *testing.T) { Vulnerability: dtypes.Vulnerability{ Severity: "HIGH", }, + PkgIdentifier: ftypes.PkgIdentifier{ + UID: "51BA9E006222819D", + }, }, }, }, @@ -97,20 +102,66 @@ func TestEncoder_Encode(t *testing.T) { Class: types.ClassLangPkg, Packages: []ftypes.Package{ { - ID: "org.apache.xmlgraphics/batik-anim:1.9.1", - Name: "org.apache.xmlgraphics/batik-anim", - Version: "1.9.1", - FilePath: "/app/batik-anim-1.9.1.jar", + ID: "com.fasterxml.jackson.core:jackson-databind:2.13.4", + Name: "com.fasterxml.jackson.core:jackson-databind", + Version: "2.13.4", + FilePath: "/foo/jackson-databind-2.13.4.jar", + Identifier: ftypes.PkgIdentifier{ + UID: "A6BD5A2FE5C00E10", + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeMaven, + Namespace: "com.fasterxml.jackson.core", + Name: "jackson-databind", + Version: "2.13.4", + }, + }, + }, + { + ID: "com.fasterxml.jackson.core:jackson-databind:2.13.4", + Name: "com.fasterxml.jackson.core:jackson-databind", + Version: "2.13.4", + FilePath: "/bar/jackson-databind-2.13.4.jar", Identifier: ftypes.PkgIdentifier{ + UID: "64244651208EC759", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, - Namespace: "org.apache.xmlgraphics", - Name: "batik-anim", - Version: "1.9.1", + Namespace: "com.fasterxml.jackson.core", + Name: "jackson-databind", + Version: "2.13.4", }, }, }, }, + Vulnerabilities: []types.DetectedVulnerability{ + { + PkgName: "com.fasterxml.jackson.core:jackson-databind", + PkgID: "com.fasterxml.jackson.core:jackson-databind:2.13.4", + VulnerabilityID: "CVE-2022-42003", + InstalledVersion: "2.13.4", + FixedVersion: "2.12.7.1, 2.13.4.2", + PkgPath: "/foo/jackson-databind-2.13.4.jar", + Vulnerability: dtypes.Vulnerability{ + Severity: "HIGH", + }, + PkgIdentifier: ftypes.PkgIdentifier{ + UID: "A6BD5A2FE5C00E10", + }, + }, + { + PkgName: "com.fasterxml.jackson.core:jackson-databind", + PkgID: "com.fasterxml.jackson.core:jackson-databind:2.13.4", + VulnerabilityID: "CVE-2022-42003", + InstalledVersion: "2.13.4", + FixedVersion: "2.12.7.1, 2.13.4.2", + PkgPath: "/bar/jackson-databind-2.13.4.jar", + Vulnerability: dtypes.Vulnerability{ + Severity: "HIGH", + }, + PkgIdentifier: ftypes.PkgIdentifier{ + UID: "64244651208EC759", + }, + }, + }, }, }, }, @@ -185,6 +236,7 @@ func TestEncoder_Encode(t *testing.T) { }, }, PkgIdentifier: ftypes.PkgIdentifier{ + UID: "33654D2C483FC3AD", PURL: &packageurl.PackageURL{ Type: packageurl.TypeDebian, Name: "libc6", @@ -208,6 +260,7 @@ func TestEncoder_Encode(t *testing.T) { }, }, PkgIdentifier: ftypes.PkgIdentifier{ + UID: "51BA9E006222819D", PURL: &packageurl.PackageURL{ Type: packageurl.TypeDebian, Name: "curl", @@ -218,22 +271,22 @@ func TestEncoder_Encode(t *testing.T) { }, uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000005"): { Type: core.TypeLibrary, - Group: "org.apache.xmlgraphics", - Name: "batik-anim", - Version: "1.9.1", + Group: "com.fasterxml.jackson.core", + Name: "jackson-databind", + Version: "2.13.4", Files: []core.File{ { - Path: "/app/batik-anim-1.9.1.jar", + Path: "/foo/jackson-databind-2.13.4.jar", }, }, Properties: []core.Property{ { Name: core.PropertyFilePath, - Value: "/app/batik-anim-1.9.1.jar", + Value: "/foo/jackson-databind-2.13.4.jar", }, { Name: core.PropertyPkgID, - Value: "org.apache.xmlgraphics/batik-anim:1.9.1", + Value: "com.fasterxml.jackson.core:jackson-databind:2.13.4", }, { Name: core.PropertyPkgType, @@ -241,13 +294,49 @@ func TestEncoder_Encode(t *testing.T) { }, }, PkgIdentifier: ftypes.PkgIdentifier{ + UID: "A6BD5A2FE5C00E10", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, - Namespace: "org.apache.xmlgraphics", - Name: "batik-anim", - Version: "1.9.1", + Namespace: "com.fasterxml.jackson.core", + Name: "jackson-databind", + Version: "2.13.4", }, - BOMRef: "pkg:maven/org.apache.xmlgraphics/batik-anim@1.9.1", + BOMRef: "3ff14136-e09f-4df9-80ea-000000000005", + }, + }, + uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000006"): { + Type: core.TypeLibrary, + Group: "com.fasterxml.jackson.core", + Name: "jackson-databind", + Version: "2.13.4", + Files: []core.File{ + { + Path: "/bar/jackson-databind-2.13.4.jar", + }, + }, + Properties: []core.Property{ + { + Name: core.PropertyFilePath, + Value: "/bar/jackson-databind-2.13.4.jar", + }, + { + Name: core.PropertyPkgID, + Value: "com.fasterxml.jackson.core:jackson-databind:2.13.4", + }, + { + Name: core.PropertyPkgType, + Value: "jar", + }, + }, + PkgIdentifier: ftypes.PkgIdentifier{ + UID: "64244651208EC759", + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeMaven, + Namespace: "com.fasterxml.jackson.core", + Name: "jackson-databind", + Version: "2.13.4", + }, + BOMRef: "3ff14136-e09f-4df9-80ea-000000000006", }, }, }, @@ -261,6 +350,10 @@ func TestEncoder_Encode(t *testing.T) { Dependency: uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000005"), Type: core.RelationshipContains, }, + { + Dependency: uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000006"), + Type: core.RelationshipContains, + }, }, uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000002"): { { @@ -280,12 +373,12 @@ func TestEncoder_Encode(t *testing.T) { }, }, uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000005"): nil, + uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000006"): nil, }, wantVulns: map[uuid.UUID][]core.Vulnerability{ uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000004"): { { ID: "CVE-2021-22876", - PkgID: "curl@7.50.3-1", PkgName: "curl", InstalledVersion: "7.50.3-1", FixedVersion: "7.50.3-1+deb9u1", @@ -294,6 +387,28 @@ func TestEncoder_Encode(t *testing.T) { }, }, }, + uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000005"): { + { + ID: "CVE-2022-42003", + PkgName: "com.fasterxml.jackson.core:jackson-databind", + InstalledVersion: "2.13.4", + FixedVersion: "2.12.7.1, 2.13.4.2", + Vulnerability: dtypes.Vulnerability{ + Severity: "HIGH", + }, + }, + }, + uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000006"): { + { + ID: "CVE-2022-42003", + PkgName: "com.fasterxml.jackson.core:jackson-databind", + InstalledVersion: "2.13.4", + FixedVersion: "2.12.7.1, 2.13.4.2", + Vulnerability: dtypes.Vulnerability{ + Severity: "HIGH", + }, + }, + }, }, }, { @@ -312,6 +427,7 @@ func TestEncoder_Encode(t *testing.T) { ID: "github.com/org/root", Name: "github.com/org/root", Identifier: ftypes.PkgIdentifier{ + UID: "03D528806D964D22", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGolang, Namespace: "github.com/org", @@ -328,6 +444,7 @@ func TestEncoder_Encode(t *testing.T) { Name: "github.com/org/direct", Version: "v1.0.0", Identifier: ftypes.PkgIdentifier{ + UID: "A74CADAD4D9805FF", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGolang, Namespace: "github.com/org", @@ -345,6 +462,7 @@ func TestEncoder_Encode(t *testing.T) { Name: "github.com/org/indirect", Version: "2.0.0", Identifier: ftypes.PkgIdentifier{ + UID: "955AB4E7E24AC085", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGolang, Namespace: "github.com/org", @@ -359,6 +477,7 @@ func TestEncoder_Encode(t *testing.T) { Name: "stdlib", Version: "1.22.1", Identifier: ftypes.PkgIdentifier{ + UID: "49728B9674E318A6", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGolang, Name: "stdlib", @@ -418,6 +537,7 @@ func TestEncoder_Encode(t *testing.T) { }, }, PkgIdentifier: ftypes.PkgIdentifier{ + UID: "03D528806D964D22", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGolang, Namespace: "github.com/org", @@ -442,6 +562,7 @@ func TestEncoder_Encode(t *testing.T) { }, }, PkgIdentifier: ftypes.PkgIdentifier{ + UID: "A74CADAD4D9805FF", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGolang, Namespace: "github.com/org", @@ -467,6 +588,7 @@ func TestEncoder_Encode(t *testing.T) { }, }, PkgIdentifier: ftypes.PkgIdentifier{ + UID: "955AB4E7E24AC085", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGolang, Namespace: "github.com/org", @@ -492,6 +614,7 @@ func TestEncoder_Encode(t *testing.T) { }, }, PkgIdentifier: ftypes.PkgIdentifier{ + UID: "49728B9674E318A6", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGolang, Name: "stdlib", @@ -552,6 +675,7 @@ func TestEncoder_Encode(t *testing.T) { Name: "org.apache.logging.log4j:log4j-core", Version: "2.23.1", Identifier: ftypes.PkgIdentifier{ + UID: "6C0AE96901617503", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, Namespace: "org.apache.logging.log4j", @@ -581,6 +705,53 @@ func TestEncoder_Encode(t *testing.T) { }, wantVulns: make(map[uuid.UUID][]core.Vulnerability), }, + { + name: "SBOM file without root component", + report: types.Report{ + SchemaVersion: 2, + ArtifactName: "report.cdx.json", + ArtifactType: artifact.TypeCycloneDX, + Results: []types.Result{ + { + Target: "Java", + Type: ftypes.Jar, + Class: types.ClassLangPkg, + Packages: []ftypes.Package{ + { + ID: "org.apache.logging.log4j:log4j-core:2.23.1", + Name: "org.apache.logging.log4j:log4j-core", + Version: "2.23.1", + Identifier: ftypes.PkgIdentifier{ + UID: "6C0AE96901617503", + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeMaven, + Namespace: "org.apache.logging.log4j", + Name: "log4j-core", + Version: "2.23.1", + }, + }, + FilePath: "log4j-core-2.23.1.jar", + }, + }, + }, + }, + BOM: newTestBOM2(t), + }, + wantComponents: map[uuid.UUID]*core.Component{ + uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000001"): fsComponent, + uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000002"): libComponent, + }, + wantRels: map[uuid.UUID][]core.Relationship{ + uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000001"): { + { + Dependency: uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000002"), + Type: core.RelationshipContains, + }, + }, + uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000002"): nil, + }, + wantVulns: make(map[uuid.UUID][]core.Vulnerability), + }, { name: "json file created from SBOM file (BOM is empty)", report: types.Report{ @@ -598,6 +769,7 @@ func TestEncoder_Encode(t *testing.T) { Name: "org.apache.logging.log4j:log4j-core", Version: "2.23.1", Identifier: ftypes.PkgIdentifier{ + UID: "6C0AE96901617503", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, Namespace: "org.apache.logging.log4j", @@ -698,6 +870,7 @@ var ( Group: "org.apache.logging.log4j", Version: "2.23.1", PkgIdentifier: ftypes.PkgIdentifier{ + UID: "6C0AE96901617503", BOMRef: "pkg:maven/org.apache.logging.log4j/log4j-core@2.23.1", PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, @@ -734,3 +907,11 @@ func newTestBOM(t *testing.T) *core.BOM { bom.AddComponent(appComponent) return bom } + +// BOM without root component +func newTestBOM2(t *testing.T) *core.BOM { + uuid.SetFakeUUID(t, "2ff14136-e09f-4df9-80ea-%012d") + bom := core.NewBOM(core.Options{}) + bom.AddComponent(libComponent) + return bom +} diff --git a/pkg/sbom/spdx/marshal.go b/pkg/sbom/spdx/marshal.go index 5923a5b34800..1c4a84d60109 100644 --- a/pkg/sbom/spdx/marshal.go +++ b/pkg/sbom/spdx/marshal.go @@ -3,6 +3,7 @@ package spdx import ( "context" "fmt" + "slices" "sort" "strings" "time" @@ -13,7 +14,6 @@ import ( "github.com/spdx/tools-golang/spdx" "github.com/spdx/tools-golang/spdx/v2/common" spdxutils "github.com/spdx/tools-golang/utils" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/clock" diff --git a/pkg/sbom/spdx/marshal_test.go b/pkg/sbom/spdx/marshal_test.go index 02585ceaa418..122e529c14d5 100644 --- a/pkg/sbom/spdx/marshal_test.go +++ b/pkg/sbom/spdx/marshal_test.go @@ -64,6 +64,7 @@ func TestMarshaler_Marshal(t *testing.T) { Epoch: 0, Arch: "aarch64", Identifier: ftypes.PkgIdentifier{ + UID: "F4C10A4371C93487", PURL: &packageurl.PackageURL{ Type: packageurl.TypeRPM, Namespace: "centos", @@ -101,6 +102,7 @@ func TestMarshaler_Marshal(t *testing.T) { Name: "actionpack", Version: "7.0.1", Identifier: ftypes.PkgIdentifier{ + UID: "B1A9DE534F2737AF", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGem, Name: "actionpack", @@ -112,6 +114,7 @@ func TestMarshaler_Marshal(t *testing.T) { Name: "actioncontroller", Version: "7.0.1", Identifier: ftypes.PkgIdentifier{ + UID: "1628B51BD543965D", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGem, Name: "actioncontroller", @@ -130,6 +133,7 @@ func TestMarshaler_Marshal(t *testing.T) { Name: "actionpack", Version: "7.0.1", Identifier: ftypes.PkgIdentifier{ + UID: "92D6B6D3FF6F8FF5", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGem, Name: "actionpack", @@ -387,6 +391,7 @@ func TestMarshaler_Marshal(t *testing.T) { Epoch: 1, Arch: "aarch64", Identifier: ftypes.PkgIdentifier{ + UID: "740219943F17B1DF", PURL: &packageurl.PackageURL{ Type: packageurl.TypeRPM, Namespace: "centos", @@ -427,6 +432,7 @@ func TestMarshaler_Marshal(t *testing.T) { Name: "actionpack", Version: "7.0.1", Identifier: ftypes.PkgIdentifier{ + UID: "E8DB2C6E35F8B990", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGem, Name: "actionpack", @@ -443,6 +449,7 @@ func TestMarshaler_Marshal(t *testing.T) { Name: "actionpack", Version: "7.0.1", Identifier: ftypes.PkgIdentifier{ + UID: "B3E70B2159CFAC50", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGem, Name: "actionpack", @@ -1063,6 +1070,7 @@ func TestMarshaler_Marshal(t *testing.T) { Name: "golang.org/x/crypto", Version: "v0.0.1", Identifier: ftypes.PkgIdentifier{ + UID: "161541A259EF014B", PURL: &packageurl.PackageURL{ Type: packageurl.TypeGolang, Namespace: "golang.org/x", diff --git a/pkg/scanner/local/scan.go b/pkg/scanner/local/scan.go index 9a73ad97dbfa..475fc1540086 100644 --- a/pkg/scanner/local/scan.go +++ b/pkg/scanner/local/scan.go @@ -4,13 +4,13 @@ import ( "context" "errors" "fmt" + "slices" "sort" "strings" "sync" "github.com/google/wire" "github.com/samber/lo" - "golang.org/x/exp/slices" "golang.org/x/xerrors" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" diff --git a/pkg/scanner/local/scan_test.go b/pkg/scanner/local/scan_test.go index 42a189ba5d68..f0d154a3904d 100644 --- a/pkg/scanner/local/scan_test.go +++ b/pkg/scanner/local/scan_test.go @@ -12,7 +12,7 @@ import ( "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" - "github.com/aquasecurity/trivy/pkg/dbtest" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/scanner/langpkg" diff --git a/pkg/scanner/scan.go b/pkg/scanner/scan.go index 7094e38c71fe..f1e4cf68c515 100644 --- a/pkg/scanner/scan.go +++ b/pkg/scanner/scan.go @@ -6,6 +6,7 @@ import ( "github.com/google/wire" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/cache" "github.com/aquasecurity/trivy/pkg/clock" "github.com/aquasecurity/trivy/pkg/fanal/artifact" aimage "github.com/aquasecurity/trivy/pkg/fanal/artifact/image" @@ -28,6 +29,11 @@ import ( // StandaloneSuperSet is used in the standalone mode var StandaloneSuperSet = wire.NewSet( + // Cache + cache.New, + wire.Bind(new(cache.ArtifactCache), new(cache.Cache)), + wire.Bind(new(cache.LocalArtifactCache), new(cache.Cache)), + local.SuperSet, wire.Bind(new(Driver), new(local.Scanner)), NewScanner, @@ -77,6 +83,10 @@ var StandaloneVMSet = wire.NewSet( // RemoteSuperSet is used in the client mode var RemoteSuperSet = wire.NewSet( + // Cache + cache.NewRemoteCache, + wire.Bind(new(cache.ArtifactCache), new(*cache.RemoteCache)), // No need for LocalArtifactCache + client.NewScanner, wire.Value([]client.Option(nil)), wire.Bind(new(Driver), new(client.Scanner)), diff --git a/pkg/types/report.go b/pkg/types/report.go index baaeaab0a0c3..6937f8ce7960 100644 --- a/pkg/types/report.go +++ b/pkg/types/report.go @@ -53,13 +53,15 @@ const ( ClassLicenseFile ResultClass = "license-file" // For detected licenses in files ClassCustom ResultClass = "custom" - ComplianceK8sNsa = Compliance("k8s-nsa") - ComplianceK8sCIS = Compliance("k8s-cis") - ComplianceK8sPSSBaseline = Compliance("k8s-pss-baseline") - ComplianceK8sPSSRestricted = Compliance("k8s-pss-restricted") - ComplianceAWSCIS12 = Compliance("aws-cis-1.2") - ComplianceAWSCIS14 = Compliance("aws-cis-1.4") - ComplianceDockerCIS = Compliance("docker-cis") + ComplianceK8sNsa10 = Compliance("k8s-nsa-1.0") + ComplianceK8sCIS123 = Compliance("k8s-cis-1.23") + ComplianceK8sPSSBaseline01 = Compliance("k8s-pss-baseline-0.1") + ComplianceK8sPSSRestricted01 = Compliance("k8s-pss-restricted-0.1") + ComplianceAWSCIS12 = Compliance("aws-cis-1.2") + ComplianceAWSCIS14 = Compliance("aws-cis-1.4") + ComplianceDockerCIS160 = Compliance("docker-cis-1.6.0") + ComplianceEksCIS14 = Compliance("eks-cis-1.4") + ComplianceRke2CIS124 = Compliance("rke2-cis-1.24") FormatTable Format = "table" FormatJSON Format = "json" @@ -91,13 +93,15 @@ var ( FormatGitHub, } SupportedCompliances = []string{ - ComplianceK8sNsa, - ComplianceK8sCIS, - ComplianceK8sPSSBaseline, - ComplianceK8sPSSRestricted, + ComplianceK8sNsa10, + ComplianceK8sCIS123, + ComplianceK8sPSSBaseline01, + ComplianceK8sPSSRestricted01, ComplianceAWSCIS12, ComplianceAWSCIS14, - ComplianceDockerCIS, + ComplianceDockerCIS160, + ComplianceEksCIS14, + ComplianceRke2CIS124, } ) diff --git a/pkg/types/target.go b/pkg/types/target.go index 26386b9d2429..bb2dce90fc52 100644 --- a/pkg/types/target.go +++ b/pkg/types/target.go @@ -1,7 +1,7 @@ package types import ( - "golang.org/x/exp/slices" + "slices" ) // VulnType represents vulnerability type diff --git a/pkg/utils/fsutils/fs.go b/pkg/utils/fsutils/fs.go index 6d0502d8cc18..b8efbf1cbc60 100644 --- a/pkg/utils/fsutils/fs.go +++ b/pkg/utils/fsutils/fs.go @@ -7,8 +7,8 @@ import ( "io/fs" "os" "path/filepath" + "slices" - "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/log" @@ -18,30 +18,6 @@ const ( xdgDataHome = "XDG_DATA_HOME" ) -var cacheDir string - -// defaultCacheDir returns/creates the cache-dir to be used for trivy operations -func defaultCacheDir() string { - tmpDir, err := os.UserCacheDir() - if err != nil { - tmpDir = os.TempDir() - } - return filepath.Join(tmpDir, "trivy") -} - -// CacheDir returns the directory used for caching -func CacheDir() string { - if cacheDir == "" { - return defaultCacheDir() - } - return cacheDir -} - -// SetCacheDir sets the trivy cacheDir -func SetCacheDir(dir string) { - cacheDir = dir -} - func HomeDir() string { dataHome := os.Getenv(xdgDataHome) if dataHome != "" { diff --git a/pkg/version/app/version.go b/pkg/version/app/version.go index 8a7013078c9a..d1c7bdbe7d3d 100644 --- a/pkg/version/app/version.go +++ b/pkg/version/app/version.go @@ -1,8 +1,6 @@ package app -var ( - ver = "dev" -) +var ver = "dev" func Version() string { return ver diff --git a/pkg/version/doc/doc.go b/pkg/version/doc/doc.go new file mode 100644 index 000000000000..c02dc1e7655a --- /dev/null +++ b/pkg/version/doc/doc.go @@ -0,0 +1,49 @@ +package doc + +import ( + "fmt" + "net/url" + "path" + "strings" + + "github.com/aquasecurity/go-version/pkg/semver" + "github.com/aquasecurity/trivy/pkg/version/app" +) + +const devVersion = "dev" + +// BaseURL returns the base URL for the versioned documentation +func BaseURL(ver string) *url.URL { + ver = canonicalVersion(ver) + return &url.URL{ + Scheme: "https", + Host: "aquasecurity.github.io", + Path: path.Join("trivy", ver), + } +} + +// URL returns the URL for the versioned documentation with the given path +func URL(rawPath, fragment string) string { + base := BaseURL(app.Version()) + base.Path = path.Join(base.Path, rawPath) + base.Fragment = fragment + return base.String() +} + +func canonicalVersion(ver string) string { + if ver == devVersion { + return ver + } + ver = strings.TrimPrefix(ver, "v") + v, err := semver.Parse(ver) + if err != nil { + return devVersion + } + // Replace pre-release with "dev" + // e.g. v0.34.0-beta1+snapshot-1 + if v.IsPreRelease() || v.Metadata() != "" { + return devVersion + } + // Add "v" prefix and cut a patch number, "0.34.0" => "v0.34" for the URL + return fmt.Sprintf("v%d.%d", v.Major(), v.Minor()) +} diff --git a/pkg/version/doc/doc_test.go b/pkg/version/doc/doc_test.go new file mode 100644 index 000000000000..3107172962eb --- /dev/null +++ b/pkg/version/doc/doc_test.go @@ -0,0 +1,96 @@ +package doc_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/pkg/version/doc" +) + +func TestBaseURL(t *testing.T) { + tests := []struct { + name string + ver string + want string + }{ + { + name: "dev", + ver: "dev", + want: "https://aquasecurity.github.io/trivy/dev", + }, + { + name: "semver", + ver: "0.52.0", + want: "https://aquasecurity.github.io/trivy/v0.52", + }, + { + name: "with v prefix", + ver: "v0.52.0", + want: "https://aquasecurity.github.io/trivy/v0.52", + }, + { + name: "pre-release", + ver: "0.52.0-beta1", + want: "https://aquasecurity.github.io/trivy/dev", + }, + { + name: "non-semver", + ver: "1", + want: "https://aquasecurity.github.io/trivy/dev", + }, + { + name: "empty", + ver: "", + want: "https://aquasecurity.github.io/trivy/dev", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := doc.BaseURL(tt.ver) + require.Equal(t, tt.want, got.String()) + }) + } +} + +func TestURL(t *testing.T) { + tests := []struct { + name string + rawPath string + fragment string + want string + }{ + { + name: "path without slash", + rawPath: "foo", + want: "https://aquasecurity.github.io/trivy/dev/foo", + }, + { + name: "path with leading slash", + rawPath: "/foo", + want: "https://aquasecurity.github.io/trivy/dev/foo", + }, + { + name: "path with slash", + rawPath: "foo/bar", + want: "https://aquasecurity.github.io/trivy/dev/foo/bar", + }, + { + name: "path with fragment", + rawPath: "foo", + fragment: "bar", + want: "https://aquasecurity.github.io/trivy/dev/foo#bar", + }, + { + name: "empty", + rawPath: "", + want: "https://aquasecurity.github.io/trivy/dev", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := doc.URL(tt.rawPath, tt.fragment) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/pkg/vulnerability/vulnerability.go b/pkg/vulnerability/vulnerability.go index 8e27ee4733fc..6c1e35427a64 100644 --- a/pkg/vulnerability/vulnerability.go +++ b/pkg/vulnerability/vulnerability.go @@ -2,14 +2,17 @@ package vulnerability import ( "strings" + "sync" "github.com/google/wire" + "github.com/samber/lo" "github.com/aquasecurity/trivy-db/pkg/db" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/types" + "github.com/aquasecurity/trivy/pkg/version/doc" ) var ( @@ -46,6 +49,13 @@ var SuperSet = wire.NewSet( NewClient, ) +// Show warning if we use severity from another vendor +// cf. https://github.com/aquasecurity/trivy/issues/6714 +var onceWarn = sync.OnceFunc(func() { + // e.g. https://aquasecurity.github.io/trivy/latest/docs/scanner/vulnerability/#severity-selection + log.Warnf("Using severities from other vendors for some vulnerabilities. Read %s for details.", doc.URL("/docs/scanner/vulnerability/", "severity-selection")) +}) + // Client manipulates vulnerabilities type Client struct { dbc db.Operation @@ -77,13 +87,10 @@ func (c Client) FillInfo(vulns []types.DetectedVulnerability) { } // Detect the data source - var source dbTypes.SourceID - if vulns[i].DataSource != nil { - source = vulns[i].DataSource.ID - } + dataSource := lo.FromPtr(vulns[i].DataSource) - // Select the severity according to the detected source. - severity, severitySource := c.getVendorSeverity(vulnID, &vuln, source) + // Select the severity according to the detected sourceID. + severity, severitySource := c.getVendorSeverity(vulnID, &vuln, dataSource) // The vendor might provide package-specific severity like Debian. // For example, CVE-2015-2328 in Debian has "unimportant" for mongodb and "low" for pcre3. @@ -105,13 +112,13 @@ func (c Client) FillInfo(vulns []types.DetectedVulnerability) { vulns[i].Severity = severity vulns[i].SeveritySource = severitySource - vulns[i].PrimaryURL = c.getPrimaryURL(vulnID, vuln.References, source) + vulns[i].PrimaryURL = c.getPrimaryURL(vulnID, vuln.References, dataSource.ID) } } -func (c Client) getVendorSeverity(vulnID string, vuln *dbTypes.Vulnerability, source dbTypes.SourceID) (string, dbTypes.SourceID) { - if vs, ok := vuln.VendorSeverity[source]; ok { - return vs.String(), source +func (c Client) getVendorSeverity(vulnID string, vuln *dbTypes.Vulnerability, dataSource dbTypes.DataSource) (string, dbTypes.SourceID) { + if vs, ok := vuln.VendorSeverity[dataSource.ID]; ok { + return vs.String(), dataSource.ID } // use severity from GitHub for all GHSA-xxx vulnerabilities @@ -130,6 +137,7 @@ func (c Client) getVendorSeverity(vulnID string, vuln *dbTypes.Vulnerability, so return dbTypes.SeverityUnknown.String(), "" } + onceWarn() return vuln.Severity, "" } diff --git a/pkg/vulnerability/vulnerability_test.go b/pkg/vulnerability/vulnerability_test.go index 11dac691503e..d377ef6e4723 100644 --- a/pkg/vulnerability/vulnerability_test.go +++ b/pkg/vulnerability/vulnerability_test.go @@ -9,7 +9,7 @@ import ( dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy-db/pkg/utils" "github.com/aquasecurity/trivy-db/pkg/vulnsrc/vulnerability" - "github.com/aquasecurity/trivy/pkg/dbtest" + "github.com/aquasecurity/trivy/internal/dbtest" "github.com/aquasecurity/trivy/pkg/types" vuln "github.com/aquasecurity/trivy/pkg/vulnerability" ) diff --git a/pkg/x/path/path.go b/pkg/x/path/path.go index 8bf5e335365b..d7c66ecadc8b 100644 --- a/pkg/x/path/path.go +++ b/pkg/x/path/path.go @@ -1,9 +1,8 @@ package path import ( + "slices" "strings" - - "golang.org/x/exp/slices" ) // Contains reports whether the path contains the subpath.