diff --git a/cmd/trivy/main.go b/cmd/trivy/main.go index e2c545975315..e3118ae8e97f 100644 --- a/cmd/trivy/main.go +++ b/cmd/trivy/main.go @@ -15,7 +15,7 @@ import ( func main() { if err := run(); err != nil { - log.Fatal(err) + log.Fatal("Fatal error", log.Err(err)) } } diff --git a/magefiles/cloud_actions.go b/magefiles/cloud_actions.go index 045586c8837a..195bdde349e8 100644 --- a/magefiles/cloud_actions.go +++ b/magefiles/cloud_actions.go @@ -206,10 +206,10 @@ func main() { // GenAllowedActions generates the list of valid actions for wildcard support func GenAllowedActions() error { - log.Logger.Info("Start parsing actions") + log.Info("Start parsing actions") startTime := time.Now() defer func() { - log.Logger.Infof("Parsing is completed. Duration %fs\n", time.Since(startTime).Seconds()) + log.Info("Parsing is completed", log.Duration(time.Since(startTime).Seconds())) }() doc, err := htmlquery.LoadURL(serviceActionReferencesURL) diff --git a/magefiles/docs.go b/magefiles/docs.go index b69e813690af..1a59007de229 100644 --- a/magefiles/docs.go +++ b/magefiles/docs.go @@ -24,6 +24,6 @@ func main() { cmd := commands.NewApp() cmd.DisableAutoGenTag = true if err := doc.GenMarkdownTree(cmd, "./docs/docs/references/configuration/cli"); err != nil { - log.Fatal(err) + log.Fatal("Fatal error", log.Err(err)) } } diff --git a/pkg/attestation/sbom/rekor.go b/pkg/attestation/sbom/rekor.go index 7336def6b511..a5872152c8f5 100644 --- a/pkg/attestation/sbom/rekor.go +++ b/pkg/attestation/sbom/rekor.go @@ -39,7 +39,7 @@ func (r *Rekor) RetrieveSBOM(ctx context.Context, digest string) ([]byte, error) return nil, ErrNoSBOMAttestation } - log.Logger.Debugf("Found matching Rekor entries: %s", entryIDs) + log.Debug("Found matching Rekor entries", log.Any("entry_ids", entryIDs)) for _, ids := range lo.Chunk[rekor.EntryID](entryIDs, rekor.MaxGetEntriesLimit) { entries, err := r.client.GetEntries(ctx, ids) diff --git a/pkg/attestation/sbom/rekor_test.go b/pkg/attestation/sbom/rekor_test.go index db97d781568c..a5aee5a71707 100644 --- a/pkg/attestation/sbom/rekor_test.go +++ b/pkg/attestation/sbom/rekor_test.go @@ -31,7 +31,7 @@ func TestRekor_RetrieveSBOM(t *testing.T) { }, } - require.NoError(t, log.InitLogger(false, true)) + log.InitLogger(false, true) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ts := rekortest.NewServer(t) diff --git a/pkg/cloud/aws/commands/run.go b/pkg/cloud/aws/commands/run.go index 23406aeafda5..58744e752c79 100644 --- a/pkg/cloud/aws/commands/run.go +++ b/pkg/cloud/aws/commands/run.go @@ -23,7 +23,7 @@ import ( var allSupportedServicesFunc = awsScanner.AllSupportedServices func getAccountIDAndRegion(ctx context.Context, region, endpoint string) (string, string, error) { - log.Logger.Debug("Looking for AWS credentials provider...") + log.DebugContext(ctx, "Looking for AWS credentials provider...") cfg, err := config.LoadDefaultAWSConfig(ctx, region, endpoint) if err != nil { @@ -32,7 +32,7 @@ func getAccountIDAndRegion(ctx context.Context, region, endpoint string) (string svc := sts.NewFromConfig(cfg) - log.Logger.Debug("Looking up AWS caller identity...") + log.DebugContext(ctx, "Looking up AWS caller identity...") result, err := svc.GetCallerIdentity(ctx, &sts.GetCallerIdentityInput{}) if err != nil { return "", "", xerrors.Errorf("failed to discover AWS caller identity: %w", err) @@ -40,7 +40,7 @@ func getAccountIDAndRegion(ctx context.Context, region, endpoint string) (string if result.Account == nil { return "", "", xerrors.Errorf("missing account id for aws account") } - log.Logger.Debugf("Verified AWS credentials for account %s!", *result.Account) + log.DebugContext(ctx, "Verified AWS credentials for account!", log.String("account", *result.Account)) return *result.Account, cfg.Region, nil } @@ -85,22 +85,22 @@ func processOptions(ctx context.Context, opt *flag.Options) error { } } - err := filterServices(opt) + err := filterServices(ctx, opt) if err != nil { return err } - log.Logger.Debug("scanning services: ", opt.Services) + log.DebugContext(ctx, "Scanning services", log.Any("services", opt.Services)) return nil } -func filterServices(opt *flag.Options) error { +func filterServices(ctx context.Context, opt *flag.Options) error { switch { case len(opt.Services) == 0 && len(opt.SkipServices) == 0: - log.Logger.Debug("No service(s) specified, scanning all services...") + log.DebugContext(ctx, "No service(s) specified, scanning all services...") opt.Services = allSupportedServicesFunc() case len(opt.SkipServices) > 0: - log.Logger.Debug("excluding services: ", opt.SkipServices) + log.DebugContext(ctx, "Excluding services", log.Any("services", opt.SkipServices)) for _, s := range allSupportedServicesFunc() { if slices.Contains(opt.SkipServices, s) { continue @@ -110,7 +110,8 @@ func filterServices(opt *flag.Options) error { } } case len(opt.Services) > 0: - log.Logger.Debugf("Specific services were requested: [%s]...", strings.Join(opt.Services, ", ")) + log.DebugContext(ctx, "Specific services were requested...", + log.String("services", strings.Join(opt.Services, ", "))) for _, service := range opt.Services { var found bool supported := allSupportedServicesFunc() @@ -132,10 +133,12 @@ func Run(ctx context.Context, opt flag.Options) error { ctx, cancel := context.WithTimeout(ctx, opt.GlobalOptions.Timeout) defer cancel() + ctx = log.WithContextPrefix(ctx, "aws") + var err error defer func() { if errors.Is(err, context.DeadlineExceeded) { - log.Logger.Warn("Increase --timeout value") + log.Warn("Increase --timeout value") } }() @@ -148,14 +151,14 @@ func Run(ctx context.Context, opt flag.Options) error { var aerr errs.AdapterError if errors.As(err, &aerr) { for _, e := range aerr.Errors() { - log.Logger.Warnf("Adapter error: %s", e) + log.WarnContext(ctx, "Adapter error", log.Err(e)) } } else { return xerrors.Errorf("aws scan error: %w", err) } } - log.Logger.Debug("Writing report to output...") + log.DebugContext(ctx, "Writing report to output...") res := results.GetFailed() if opt.MisconfOptions.IncludeNonFailures { diff --git a/pkg/cloud/aws/scanner/scanner.go b/pkg/cloud/aws/scanner/scanner.go index 84b5cf6c640e..d1efe2b78d4a 100644 --- a/pkg/cloud/aws/scanner/scanner.go +++ b/pkg/cloud/aws/scanner/scanner.go @@ -20,10 +20,13 @@ import ( ) type AWSScanner struct { + logger *log.Logger } func NewScanner() *AWSScanner { - return &AWSScanner{} + return &AWSScanner{ + logger: log.WithPrefix("aws"), + } } func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Results, bool, error) { @@ -31,7 +34,7 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result awsCache := cache.New(option.CacheDir, option.MaxCacheAge, option.Account, option.Region) included, missing := awsCache.ListServices(option.Services) - prefixedLogger := &log.PrefixedLogger{Name: "aws"} + prefixedLogger := log.NewWriteLogger(log.WithPrefix("aws")) var scannerOpts []options.ScannerOption if !option.NoProgress { @@ -72,10 +75,10 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result downloadedPolicyPaths, err = operation.InitBuiltinPolicies(context.Background(), option.CacheDir, option.Quiet, option.SkipPolicyUpdate, option.MisconfOptions.PolicyBundleRepository, option.RegistryOpts()) if err != nil { if !option.SkipPolicyUpdate { - log.Logger.Errorf("Falling back to embedded policies: %s", err) + s.logger.Error("Falling back to embedded policies", log.Err(err)) } } else { - log.Logger.Debug("Policies successfully loaded from disk") + s.logger.Debug("Policies successfully loaded from disk") policyPaths = append(policyPaths, downloadedPolicyPaths...) scannerOpts = append(scannerOpts, options.ScannerWithEmbeddedPolicies(false), @@ -95,7 +98,7 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result dataFS, dataPaths, err := misconf.CreateDataFS(option.RegoOptions.DataPaths) if err != nil { - log.Logger.Errorf("Could not load config data: %s", err) + s.logger.Error("Could not load config data", err) } scannerOpts = append(scannerOpts, options.ScannerWithDataDirs(dataPaths...), diff --git a/pkg/commands/app.go b/pkg/commands/app.go index 41d1d2ff645d..902b92b78087 100644 --- a/pkg/commands/app.go +++ b/pkg/commands/app.go @@ -114,7 +114,7 @@ func loadPluginCommands() []*cobra.Command { var commands []*cobra.Command plugins, err := plugin.LoadAll() if err != nil { - log.Logger.Debugf("no plugins were loaded") + log.Debug("No plugins loaded") return nil } for _, p := range plugins { @@ -142,12 +142,12 @@ func initConfig(configFile string) error { viper.SetConfigType("yaml") if err := viper.ReadInConfig(); err != nil { if errors.Is(err, os.ErrNotExist) { - log.Logger.Debugf("config file %q not found", configFile) + log.Debug("Config file not found", log.String("file_path", configFile)) return nil } return xerrors.Errorf("config file %q loading error: %s", configFile, err) } - log.Logger.Infof("Loaded %s", configFile) + log.Info("Loaded", log.String("file_path", configFile)) return nil } @@ -196,9 +196,7 @@ func NewRootCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { } // Initialize logger - if err := log.InitLogger(globalOptions.Debug, globalOptions.Quiet); err != nil { - return err - } + log.InitLogger(globalOptions.Debug, globalOptions.Quiet) return nil }, @@ -570,7 +568,7 @@ func NewClientCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { return validateArgs(cmd, args) }, RunE: func(cmd *cobra.Command, args []string) error { - log.Logger.Warn("'client' subcommand is deprecated now. See https://github.com/aquasecurity/trivy/discussions/2119") + log.Warn("'client' subcommand is deprecated now. See https://github.com/aquasecurity/trivy/discussions/2119") if err := clientFlags.Bind(cmd); err != nil { return xerrors.Errorf("flag bind error: %w", err) @@ -1040,7 +1038,7 @@ The following services are supported: } if opts.Timeout < time.Hour { opts.Timeout = time.Hour - log.Logger.Debug("Timeout is set to less than 1 hour - upgrading to 1 hour for this command.") + log.Info("Timeout is set to less than 1 hour - upgrading to 1 hour for this command.") } return awscommands.Run(cmd.Context(), opts) }, @@ -1106,7 +1104,7 @@ func NewVMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { } if options.Timeout < time.Minute*30 { options.Timeout = time.Minute * 30 - log.Logger.Debug("Timeout is set to less than 30 min - upgrading to 30 min for this command.") + log.Info("Timeout is set to less than 30 min - upgrading to 30 min for this command.") } return artifact.Run(cmd.Context(), options, artifact.TargetVM) }, diff --git a/pkg/commands/artifact/run.go b/pkg/commands/artifact/run.go index c54f0fe2fe75..b156ce44754a 100644 --- a/pkg/commands/artifact/run.go +++ b/pkg/commands/artifact/run.go @@ -360,7 +360,7 @@ func (r *runner) initCache(opts flag.Options) error { if err != nil { return xerrors.Errorf("unable to initialize the cache: %w", err) } - log.Logger.Debugf("cache dir: %s", fsutils.CacheDir()) + log.Debug("Cache dir", log.String("dir", fsutils.CacheDir())) if opts.Reset { defer cacheClient.Close() @@ -400,12 +400,12 @@ func Run(ctx context.Context, opts flag.Options, targetKind TargetKind) (err err defer func() { if errors.Is(err, context.DeadlineExceeded) { - log.Logger.Warn("Increase --timeout value") + log.Warn("Increase --timeout value") } }() if opts.GenerateDefaultConfig { - log.Logger.Info("Writing the default config to trivy-default.yaml...") + log.Info("Writing the default config to trivy-default.yaml...") return viper.SafeWriteConfigAs("trivy-default.yaml") } @@ -484,7 +484,8 @@ func disabledAnalyzers(opts flag.Options) []analyzer.Type { // Filter only enabled misconfiguration scanners ma, err := filterMisconfigAnalyzers(opts.MisconfigScanners, analyzer.TypeConfigFiles) if err != nil { - log.Logger.Errorf("Invalid misconfig scanners specified: %s defaulting to use all misconfig scanners", opts.MisconfigScanners) + log.Error("Invalid misconfiguration scanners specified, defaulting to use all misconfig scanners", + log.Any("scanners", opts.MisconfigScanners)) } else { analyzers = append(analyzers, ma...) } @@ -528,7 +529,7 @@ func filterMisconfigAnalyzers(included, all []analyzer.Type) ([]analyzer.Type, e return nil, xerrors.Errorf("invalid misconfiguration scanner specified %s valid scanners: %s", missing, all) } - log.Logger.Debugf("Enabling misconfiguration scanners: %s", included) + log.Debug("Enabling misconfiguration scanners", log.Any("scanners", included)) return lo.Without(all, included...), nil } @@ -569,28 +570,28 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi } if len(opts.ImageConfigScanners) != 0 { - log.Logger.Infof("Container image config scanners: %q", opts.ImageConfigScanners) + log.Info("Container image config scanners", log.Any("scanners", opts.ImageConfigScanners)) } if opts.Scanners.Enabled(types.VulnerabilityScanner) { - log.Logger.Info("Vulnerability scanning is enabled") - log.Logger.Debugf("Vulnerability type: %s", scanOptions.VulnType) + log.Info("Vulnerability scanning is enabled") + log.Debug("Vulnerability type", log.Any("type", scanOptions.VulnType)) } // ScannerOption is filled only when config scanning is enabled. var configScannerOptions misconf.ScannerOption if opts.Scanners.Enabled(types.MisconfigScanner) || opts.ImageConfigScanners.Enabled(types.MisconfigScanner) { - log.Logger.Info("Misconfiguration scanning is enabled") + log.Info("Misconfiguration scanning is enabled") var downloadedPolicyPaths []string var disableEmbedded bool downloadedPolicyPaths, err := operation.InitBuiltinPolicies(context.Background(), opts.CacheDir, opts.Quiet, opts.SkipPolicyUpdate, opts.MisconfOptions.PolicyBundleRepository, opts.RegistryOpts()) if err != nil { if !opts.SkipPolicyUpdate { - log.Logger.Errorf("Falling back to embedded policies: %s", err) + log.Error("Falling back to embedded policies", log.Err(err)) } } else { - log.Logger.Debug("Policies successfully loaded from disk") + log.Debug("Policies successfully loaded from disk") disableEmbedded = true } configScannerOptions = misconf.ScannerOption{ @@ -617,18 +618,18 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi // Do not load config file for secret scanning if opts.Scanners.Enabled(types.SecretScanner) { ver := canonicalVersion(opts.AppVersion) - log.Logger.Info("Secret scanning is enabled") - log.Logger.Info("If your scanning is slow, please try '--scanners vuln' to disable secret scanning") - log.Logger.Infof("Please see also https://aquasecurity.github.io/trivy/%s/docs/scanner/secret/#recommendation for faster secret detection", ver) + log.Info("Secret scanning is enabled") + log.Info("If your scanning is slow, please try '--scanners vuln' to disable secret scanning") + log.Infof("Please see also https://aquasecurity.github.io/trivy/%s/docs/scanner/secret/#recommendation for faster secret detection", ver) } else { opts.SecretConfigPath = "" } if opts.Scanners.Enabled(types.LicenseScanner) { if opts.LicenseFull { - log.Logger.Info("Full license scanning is enabled") + log.Info("Full license scanning is enabled") } else { - log.Logger.Info("License scanning is enabled") + log.Info("License scanning is enabled") } } diff --git a/pkg/commands/convert/run.go b/pkg/commands/convert/run.go index 9045e54bfa3d..34e799f7a061 100644 --- a/pkg/commands/convert/run.go +++ b/pkg/commands/convert/run.go @@ -39,7 +39,7 @@ func Run(ctx context.Context, opts flag.Options) (err error) { return xerrors.Errorf("unable to filter results: %w", err) } - log.Logger.Debug("Writing report to output...") + log.Debug("Writing report to output...") if err = report.Write(ctx, r, opts); err != nil { return xerrors.Errorf("unable to write results: %w", err) } diff --git a/pkg/commands/operation/operation.go b/pkg/commands/operation/operation.go index 8f8561a7c290..2b4e2a7f5ffa 100644 --- a/pkg/commands/operation/operation.go +++ b/pkg/commands/operation/operation.go @@ -42,7 +42,7 @@ type Cache struct { // NewCache is the factory method for Cache func NewCache(c flag.CacheOptions) (Cache, error) { if strings.HasPrefix(c.CacheBackend, "redis://") { - log.Logger.Infof("Redis cache: %s", c.CacheBackendMasked()) + log.Info("Redis cache", log.String("url", c.CacheBackendMasked())) options, err := redis.ParseURL(c.CacheBackend) if err != nil { return Cache{}, err @@ -70,7 +70,7 @@ func NewCache(c flag.CacheOptions) (Cache, error) { } if c.CacheTTL != 0 { - log.Logger.Warn("'--cache-ttl' is only available with Redis cache backend") + log.Warn("'--cache-ttl' is only available with Redis cache backend") } // standalone mode @@ -94,7 +94,7 @@ func (c Cache) Reset() (err error) { // ClearDB clears the DB cache func (c Cache) ClearDB() (err error) { - log.Logger.Info("Removing DB file...") + log.Info("Removing DB file...") if err = os.RemoveAll(fsutils.CacheDir()); err != nil { return xerrors.Errorf("failed to remove the directory (%s) : %w", fsutils.CacheDir(), err) } @@ -103,7 +103,7 @@ func (c Cache) ClearDB() (err error) { // ClearArtifacts clears the artifact cache func (c Cache) ClearArtifacts() error { - log.Logger.Info("Removing artifact caches...") + log.Info("Removing artifact caches...") if err := c.Clear(); err != nil { return xerrors.Errorf("failed to remove the cache: %w", err) } @@ -123,9 +123,8 @@ func DownloadDB(ctx context.Context, appVersion, cacheDir string, dbRepository n } if needsUpdate { - log.Logger.Info("Need to update DB") - log.Logger.Infof("DB Repository: %s", dbRepository) - log.Logger.Info("Downloading DB...") + log.Info("Need to update DB") + log.Info("Downloading DB...", log.String("repository", dbRepository.String())) if err = client.Download(ctx, cacheDir, opt); err != nil { return xerrors.Errorf("failed to download vulnerability DB: %w", err) } @@ -144,8 +143,8 @@ func showDBInfo(cacheDir string) error { if err != nil { return xerrors.Errorf("something wrong with DB: %w", err) } - log.Logger.Debugf("DB Schema: %d, UpdatedAt: %s, NextUpdate: %s, DownloadedAt: %s", - meta.Version, meta.UpdatedAt, meta.NextUpdate, meta.DownloadedAt) + log.Debug("DB info", log.Int("schema", meta.Version), log.Time("updated_at", meta.UpdatedAt), + log.Time("next_update", meta.NextUpdate), log.Time("downloaded_at", meta.DownloadedAt)) return nil } @@ -168,8 +167,8 @@ func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate } if needsUpdate { - log.Logger.Info("Need to update the built-in policies") - log.Logger.Info("Downloading the built-in policies...") + log.Info("Need to update the built-in policies") + log.Info("Downloading the built-in policies...") if err = client.DownloadBuiltinPolicies(ctx, registryOpts); err != nil { return nil, xerrors.Errorf("failed to download built-in policies: %w", err) } @@ -179,7 +178,7 @@ func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate if err != nil { if skipUpdate { msg := "No downloadable policies were loaded as --skip-policy-update is enabled" - log.Logger.Info(msg) + log.Info(msg) return nil, xerrors.Errorf(msg) } return nil, xerrors.Errorf("policy load error: %w", err) @@ -213,7 +212,8 @@ func Exit(opts flag.Options, failedResults bool) { func ExitOnEOL(opts flag.Options, m types.Metadata) { if opts.ExitOnEOL != 0 && m.OS != nil && m.OS.Eosl { - log.Logger.Errorf("Detected EOL OS: %s %s", m.OS.Family, m.OS.Name) + log.Error("Detected EOL OS", log.String("family", string(m.OS.Family)), + log.String("version", m.OS.Name)) os.Exit(opts.ExitOnEOL) } } diff --git a/pkg/commands/server/run.go b/pkg/commands/server/run.go index 03b8f144170a..70788db6a6f3 100644 --- a/pkg/commands/server/run.go +++ b/pkg/commands/server/run.go @@ -16,9 +16,7 @@ import ( // Run runs the scan func Run(ctx context.Context, opts flag.Options) (err error) { - if err = log.InitLogger(opts.Debug, opts.Quiet); err != nil { - return xerrors.Errorf("failed to initialize a logger: %w", err) - } + log.InitLogger(opts.Debug, opts.Quiet) // configure cache dir fsutils.SetCacheDir(opts.CacheDir) @@ -27,7 +25,7 @@ func Run(ctx context.Context, opts flag.Options) (err error) { return xerrors.Errorf("server cache error: %w", err) } defer cache.Close() - log.Logger.Debugf("cache dir: %s", fsutils.CacheDir()) + log.Debug("Cache", log.String("dir", fsutils.CacheDir())) if opts.Reset { return cache.ClearDB() diff --git a/pkg/db/db.go b/pkg/db/db.go index 9ecb281b064e..5ac539f203e8 100644 --- a/pkg/db/db.go +++ b/pkg/db/db.go @@ -96,22 +96,22 @@ func NewClient(cacheDir string, quiet bool, opts ...Option) *Client { func (c *Client) NeedsUpdate(cliVersion string, skip bool) (bool, error) { meta, err := c.metadata.Get() if err != nil { - log.Logger.Debugf("There is no valid metadata file: %s", err) + log.Debug("There is no valid metadata file", log.Err(err)) if skip { - log.Logger.Error("The first run cannot skip downloading DB") + log.Error("The first run cannot skip downloading DB") return false, xerrors.New("--skip-update cannot be specified on the first run") } meta = metadata.Metadata{Version: db.SchemaVersion} } if db.SchemaVersion < meta.Version { - log.Logger.Errorf("Trivy version (%s) is old. Update to the latest version.", cliVersion) + log.Error("The Trivy version is old. Update to the latest version.", log.String("version", cliVersion)) return false, xerrors.Errorf("the version of DB schema doesn't match. Local DB: %d, Expected: %d", meta.Version, db.SchemaVersion) } if skip { - log.Logger.Debug("Skipping DB update...") + log.Debug("Skipping DB update...") if err = c.validate(meta); err != nil { return false, xerrors.Errorf("validate error: %w", err) } @@ -119,7 +119,8 @@ func (c *Client) NeedsUpdate(cliVersion string, skip bool) (bool, error) { } if db.SchemaVersion != meta.Version { - log.Logger.Debugf("The local DB schema version (%d) does not match with supported version schema (%d).", meta.Version, db.SchemaVersion) + log.Debug("The local DB schema version does not match with supported version schema.", + log.Int("local_version", meta.Version), log.Int("supported_version", db.SchemaVersion)) return true, nil } @@ -128,7 +129,7 @@ func (c *Client) NeedsUpdate(cliVersion string, skip bool) (bool, error) { func (c *Client) validate(meta metadata.Metadata) error { if db.SchemaVersion != meta.Version { - log.Logger.Error("The local DB has an old schema version which is not supported by the current version of Trivy CLI. DB needs to be updated.") + log.Error("The local DB has an old schema version which is not supported by the current version of Trivy CLI. DB needs to be updated.") return xerrors.Errorf("--skip-update cannot be specified with the old DB schema. Local DB: %d, Expected: %d", meta.Version, db.SchemaVersion) } @@ -137,12 +138,12 @@ func (c *Client) validate(meta metadata.Metadata) error { func (c *Client) isNewDB(meta metadata.Metadata) bool { if c.clock.Now().Before(meta.NextUpdate) { - log.Logger.Debug("DB update was skipped because the local DB is the latest") + log.Debug("DB update was skipped because the local DB is the latest") return true } if c.clock.Now().Before(meta.DownloadedAt.Add(time.Hour)) { - log.Logger.Debug("DB update was skipped because the local DB was downloaded during the last hour") + log.Debug("DB update was skipped because the local DB was downloaded during the last hour") return true } return false @@ -152,7 +153,7 @@ func (c *Client) isNewDB(meta metadata.Metadata) bool { func (c *Client) Download(ctx context.Context, dst string, opt types.RegistryOptions) error { // Remove the metadata file under the cache directory before downloading DB if err := c.metadata.Delete(); err != nil { - log.Logger.Debug("no metadata file") + log.Debug("No metadata file") } art, err := c.initOCIArtifact(opt) @@ -171,7 +172,7 @@ func (c *Client) Download(ctx context.Context, dst string, opt types.RegistryOpt } func (c *Client) updateDownloadedAt(dst string) error { - log.Logger.Debug("Updating database metadata...") + log.Debug("Updating database metadata...") // We have to initialize a metadata client here // since the destination may be different from the cache directory. @@ -201,7 +202,7 @@ func (c *Client) initOCIArtifact(opt types.RegistryOptions) (*oci.Artifact, erro for _, diagnostic := range terr.Errors { // For better user experience if diagnostic.Code == transport.DeniedErrorCode || diagnostic.Code == transport.UnauthorizedErrorCode { - log.Logger.Warn("See https://aquasecurity.github.io/trivy/latest/docs/references/troubleshooting/#db") + log.Warn("See https://aquasecurity.github.io/trivy/latest/docs/references/troubleshooting/#db") break } } diff --git a/pkg/dependency/parser/c/conan/parse.go b/pkg/dependency/parser/c/conan/parse.go index 78e3bdc09636..2020377a4663 100644 --- a/pkg/dependency/parser/c/conan/parse.go +++ b/pkg/dependency/parser/c/conan/parse.go @@ -30,10 +30,14 @@ type Node struct { EndLine int } -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("conan"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -60,7 +64,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, } lib, err := parseRef(node) if err != nil { - log.Logger.Debug(err) + p.logger.Debug("Parse ref error", log.Err(err)) continue } diff --git a/pkg/dependency/parser/dotnet/core_deps/parse.go b/pkg/dependency/parser/dotnet/core_deps/parse.go index c4bf533a87df..399c38736779 100644 --- a/pkg/dependency/parser/dotnet/core_deps/parse.go +++ b/pkg/dependency/parser/dotnet/core_deps/parse.go @@ -12,10 +12,14 @@ import ( xio "github.com/aquasecurity/trivy/pkg/x/io" ) -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("dotnet"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -38,7 +42,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, split := strings.Split(nameVer, "/") if len(split) != 2 { // Invalid name - log.Logger.Warnf("Cannot parse .NET library version from: %s", nameVer) + p.logger.Warn("Cannot parse .NET library version", log.String("library", nameVer)) continue } diff --git a/pkg/dependency/parser/hex/mix/parse.go b/pkg/dependency/parser/hex/mix/parse.go index edc43fd284c6..ed5543ca4507 100644 --- a/pkg/dependency/parser/hex/mix/parse.go +++ b/pkg/dependency/parser/hex/mix/parse.go @@ -14,13 +14,17 @@ import ( ) // Parser is a parser for mix.lock -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("mix"), + } } -func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { +func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { var libs []types.Library scanner := bufio.NewScanner(r) var lineNumber int // It is used to save dependency location @@ -43,9 +47,9 @@ func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, er // git repository doesn't have dependency version // skip these dependencies if !strings.Contains(ss[0], ":git") { - log.Logger.Warnf("Cannot parse dependency: %s", line) + p.logger.Warn("Cannot parse dependency", log.String("line", line)) } else { - log.Logger.Debugf("Skip git dependencies: %s", name) + p.logger.Debug("Skip git dependencies", log.String("name", name)) } continue } diff --git a/pkg/dependency/parser/java/jar/parse.go b/pkg/dependency/parser/java/jar/parse.go index d5f1f6df0a4d..06f130c6b07c 100644 --- a/pkg/dependency/parser/java/jar/parse.go +++ b/pkg/dependency/parser/java/jar/parse.go @@ -15,7 +15,6 @@ import ( "strings" "github.com/samber/lo" - "go.uber.org/zap" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency/types" @@ -34,6 +33,7 @@ type Client interface { } type Parser struct { + logger *log.Logger rootFilePath string offline bool size int64 @@ -63,6 +63,7 @@ func WithSize(size int64) Option { func NewParser(c Client, opts ...Option) types.Parser { p := &Parser{ + logger: log.WithPrefix("jar"), client: c, } @@ -82,7 +83,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, } func (p *Parser) parseArtifact(filePath string, size int64, r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { - log.Logger.Debugw("Parsing Java artifacts...", zap.String("file", filePath)) + p.logger.Debug("Parsing Java artifacts...", log.String("file", filePath)) // Try to extract artifactId and version from the file name // e.g. spring-core-5.3.4-SNAPSHOT.jar => sprint-core, 5.3.4-SNAPSHOT @@ -103,7 +104,7 @@ func (p *Parser) parseArtifact(filePath string, size int64, r xio.ReadSeekerAt) if p.offline { // In offline mode, we will not check if the artifact information is correct. if !manifestProps.Valid() { - log.Logger.Debugw("Unable to identify POM in offline mode", zap.String("file", fileName)) + p.logger.Debug("Unable to identify POM in offline mode", log.String("file", fileName)) return libs, nil, nil } return append(libs, manifestProps.Library()), nil, nil @@ -126,7 +127,7 @@ func (p *Parser) parseArtifact(filePath string, size int64, r xio.ReadSeekerAt) return nil, nil, xerrors.Errorf("failed to search by SHA1: %w", err) } - log.Logger.Debugw("No such POM in the central repositories", zap.String("file", fileName)) + p.logger.Debug("No such POM in the central repositories", log.String("file", fileName)) // Return when artifactId or version from the file name are empty if fileProps.ArtifactID == "" || fileProps.Version == "" { @@ -137,8 +138,8 @@ func (p *Parser) parseArtifact(filePath string, size int64, r xio.ReadSeekerAt) // When some artifacts have the same groupIds, it might result in false detection. fileProps.GroupID, err = p.client.SearchByArtifactID(fileProps.ArtifactID, fileProps.Version) if err == nil { - log.Logger.Debugw("POM was determined in a heuristic way", zap.String("file", fileName), - zap.String("artifact", fileProps.String())) + p.logger.Debug("POM was determined in a heuristic way", log.String("file", fileName), + log.String("artifact", fileProps.String())) libs = append(libs, fileProps.Library()) } else if !errors.Is(err, ArtifactNotFoundErr) { return nil, nil, xerrors.Errorf("failed to search by artifact id: %w", err) @@ -182,7 +183,7 @@ func (p *Parser) traverseZip(filePath string, size int64, r xio.ReadSeekerAt, fi case isArtifact(fileInJar.Name): innerLibs, _, err := p.parseInnerJar(fileInJar, filePath) // TODO process inner deps if err != nil { - log.Logger.Debugf("Failed to parse %s: %s", fileInJar.Name, err) + p.logger.Debug("Failed to parse", log.String("file", fileInJar.Name), log.Err(err)) continue } libs = append(libs, innerLibs...) diff --git a/pkg/dependency/parser/java/jar/sonatype/log.go b/pkg/dependency/parser/java/jar/sonatype/log.go index 9d4ef0b7db87..78f7f04824c3 100644 --- a/pkg/dependency/parser/java/jar/sonatype/log.go +++ b/pkg/dependency/parser/java/jar/sonatype/log.go @@ -1,32 +1,30 @@ package sonatype -import "github.com/aquasecurity/trivy/pkg/log" +import ( + "context" + "log/slog" + + "github.com/aquasecurity/trivy/pkg/log" +) // logger implements LeveledLogger // https://github.com/hashicorp/go-retryablehttp/blob/991b9d0a42d13014e3689dd49a94c02be01f4237/client.go#L285-L290 -type logger struct{} - -func (logger) Error(msg string, keysAndValues ...interface{}) { - // Use Debugw to suppress errors on failure - if msg == "request failed" { - log.Logger.Debugw(msg, keysAndValues...) - return - } - log.Logger.Errorw(msg, keysAndValues) +type handler struct { + slog.Handler } -func (logger) Info(msg string, keysAndValues ...interface{}) { - log.Logger.Infow(msg, keysAndValues...) +func newLogger() *log.Logger { + return log.New(&handler{slog.Default().Handler()}).With(log.Prefix("sonatype")) } -func (logger) Debug(msg string, keysAndValues ...interface{}) { - // This message is displayed too much - if msg == "performing request" { - return +func (h *handler) Handle(ctx context.Context, r slog.Record) error { + switch r.Message { + case "request failed": + // Use Debug to suppress errors on failure + r.Level = log.LevelDebug + case "performing request": + // This message is displayed too much + return nil } - log.Logger.Debugw(msg, keysAndValues...) -} - -func (logger) Warn(msg string, keysAndValues ...interface{}) { - log.Logger.Warnw(msg, keysAndValues...) + return h.Handler.Handle(ctx, r) } diff --git a/pkg/dependency/parser/java/jar/sonatype/sonatype.go b/pkg/dependency/parser/java/jar/sonatype/sonatype.go index 63cae20b670f..ebf13f96e42b 100644 --- a/pkg/dependency/parser/java/jar/sonatype/sonatype.go +++ b/pkg/dependency/parser/java/jar/sonatype/sonatype.go @@ -57,7 +57,7 @@ func WithHTTPClient(client *http.Client) Option { func New(opts ...Option) Sonatype { // for HTTP retry retryClient := retryablehttp.NewClient() - retryClient.Logger = logger{} + retryClient.Logger = newLogger() retryClient.RetryWaitMin = 20 * time.Second retryClient.RetryWaitMax = 5 * time.Minute retryClient.RetryMax = 5 diff --git a/pkg/dependency/parser/java/pom/artifact.go b/pkg/dependency/parser/java/pom/artifact.go index 7cbab3b5b651..5f94dbfc3739 100644 --- a/pkg/dependency/parser/java/pom/artifact.go +++ b/pkg/dependency/parser/java/pom/artifact.go @@ -158,5 +158,5 @@ func printLoopedPropertiesStack(env string, usedProps []string) { for _, prop := range usedProps { s += fmt.Sprintf("%s -> ", prop) } - log.Logger.Warnf("Lopped properties were detected: %s%s", s, env) + log.Warn("Lopped properties were detected", log.String("prop", s+env)) } diff --git a/pkg/dependency/parser/java/pom/parse.go b/pkg/dependency/parser/java/pom/parse.go index 955f8cfd9e33..18a62bffbf94 100644 --- a/pkg/dependency/parser/java/pom/parse.go +++ b/pkg/dependency/parser/java/pom/parse.go @@ -14,7 +14,6 @@ import ( multierror "github.com/hashicorp/go-multierror" "github.com/samber/lo" - "go.uber.org/zap" "golang.org/x/net/html/charset" "golang.org/x/xerrors" @@ -50,6 +49,7 @@ func WithRemoteRepos(repos []string) option { } type parser struct { + logger *log.Logger rootPath string cache pomCache localRepository string @@ -76,6 +76,7 @@ func NewParser(filePath string, opts ...option) types.Parser { } return &parser{ + logger: log.WithPrefix("pom"), rootPath: filepath.Clean(filePath), cache: newPOMCache(), localRepository: localRepository, @@ -186,7 +187,8 @@ func (p *parser) parseRoot(root artifact, uniqModules map[string]struct{}) ([]ty for _, relativePath := range result.modules { moduleArtifact, err := p.parseModule(result.filePath, relativePath) if err != nil { - log.Logger.Debugf("Unable to parse %q module: %s", result.filePath, err) + p.logger.Debug("Unable to parse the module", + log.String("file_path", result.filePath), log.Err(err)) continue } @@ -283,10 +285,11 @@ func (p *parser) resolve(art artifact, rootDepManagement []pomDependency) (analy return *result, nil } - log.Logger.Debugf("Resolving %s:%s:%s...", art.GroupID, art.ArtifactID, art.Version) + p.logger.Debug("Resolving...", log.String("group_id", art.GroupID), + log.String("artifact_id", art.ArtifactID), log.String("version", art.Version.String())) pomContent, err := p.tryRepository(art.GroupID, art.ArtifactID, art.Version.String()) if err != nil { - log.Logger.Debug(err) + p.logger.Debug("Repository error", log.Err(err)) } result, err := p.analyze(pomContent, analysisOptions{ exclusions: art.Exclusions, @@ -472,10 +475,10 @@ func (p *parser) parseParent(currentPath string, parent pomParent) (analysisResu if target.IsEmpty() && !isProperty(parent.Version) { return analysisResult{}, nil } - log.Logger.Debugf("Start parent: %s", target.String()) - defer func() { - log.Logger.Debugf("Exit parent: %s", target.String()) - }() + + logger := p.logger.With("artifact", target.String()) + logger.Debug("Start parent") + defer logger.Debug("Exit parent") // If the artifact is found in cache, it is returned. if result := p.cache.get(target); result != nil { @@ -484,7 +487,7 @@ func (p *parser) parseParent(currentPath string, parent pomParent) (analysisResu parentPOM, err := p.retrieveParent(currentPath, parent.RelativePath, target) if err != nil { - log.Logger.Debugf("parent POM not found: %s", err) + logger.Debug("Parent POM not found", log.Err(err)) } result, err := p.analyze(parentPOM, analysisOptions{}) @@ -630,13 +633,13 @@ func (p *parser) loadPOMFromLocalRepository(paths []string) (*pom, error) { func (p *parser) fetchPOMFromRemoteRepositories(paths []string) (*pom, error) { // Do not try fetching pom.xml from remote repositories in offline mode if p.offline { - log.Logger.Debug("Fetching the remote pom.xml is skipped") + p.logger.Debug("Fetching the remote pom.xml is skipped") return nil, xerrors.New("offline mode") } // try all remoteRepositories for _, repo := range p.remoteRepositories { - fetched, err := fetchPOMFromRemoteRepository(repo, paths) + fetched, err := p.fetchPOMFromRemoteRepository(repo, paths) if err != nil { return nil, xerrors.Errorf("fetch repository error: %w", err) } else if fetched == nil { @@ -647,20 +650,21 @@ func (p *parser) fetchPOMFromRemoteRepositories(paths []string) (*pom, error) { return nil, xerrors.Errorf("the POM was not found in remote remoteRepositories") } -func fetchPOMFromRemoteRepository(repo string, paths []string) (*pom, error) { +func (p *parser) fetchPOMFromRemoteRepository(repo string, paths []string) (*pom, error) { repoURL, err := url.Parse(repo) if err != nil { - log.Logger.Errorw("URL parse error", zap.String("repo", repo)) + p.logger.Error("URL parse error", log.String("repo", repo)) return nil, nil } paths = append([]string{repoURL.Path}, paths...) repoURL.Path = path.Join(paths...) + logger := p.logger.With(log.String("host", repoURL.Host), log.String("path", repoURL.Path)) client := &http.Client{} req, err := http.NewRequest("GET", repoURL.String(), http.NoBody) if err != nil { - log.Logger.Debugf("Request failed for %s%s", repoURL.Host, repoURL.Path) + logger.Debug("HTTP request failed") return nil, nil } if repoURL.User != nil { @@ -670,7 +674,7 @@ func fetchPOMFromRemoteRepository(repo string, paths []string) (*pom, error) { resp, err := client.Do(req) if err != nil || resp.StatusCode != http.StatusOK { - log.Logger.Debugf("Failed to fetch from %s%s", repoURL.Host, repoURL.Path) + logger.Debug("Failed to fetch") return nil, nil } defer resp.Body.Close() diff --git a/pkg/dependency/parser/java/pom/pom.go b/pkg/dependency/parser/java/pom/pom.go index 8b610cc5925b..89adabe0ff3e 100644 --- a/pkg/dependency/parser/java/pom/pom.go +++ b/pkg/dependency/parser/java/pom/pom.go @@ -116,6 +116,7 @@ func (p pom) licenses() []string { } func (p pom) repositories(servers []Server) []string { + logger := log.WithPrefix("pom") var urls []string for _, rep := range p.content.Repositories.Repository { // Add only enabled repositories @@ -125,7 +126,7 @@ func (p pom) repositories(servers []Server) []string { repoURL, err := url.Parse(rep.URL) if err != nil { - log.Logger.Debugf("Unable to parse remote repository url: %s", err) + logger.Debug("Unable to parse remote repository url", log.Err(err)) continue } @@ -138,7 +139,7 @@ func (p pom) repositories(servers []Server) []string { } } - log.Logger.Debugf("Adding repository %s: %s", rep.ID, rep.URL) + logger.Debug("Adding repository", log.String("id", rep.ID), log.String("url", rep.URL)) urls = append(urls, repoURL.String()) } return urls diff --git a/pkg/dependency/parser/nodejs/npm/parse.go b/pkg/dependency/parser/nodejs/npm/parse.go index b74cfa5ce2f5..46b62c609502 100644 --- a/pkg/dependency/parser/nodejs/npm/parse.go +++ b/pkg/dependency/parser/nodejs/npm/parse.go @@ -52,10 +52,14 @@ type Package struct { EndLine int } -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("npm"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -85,13 +89,14 @@ func (p *Parser) parseV2(packages map[string]Package) ([]types.Library, []types. // Resolve links first // https://docs.npmjs.com/cli/v9/configuring-npm/package-lock-json#packages - resolveLinks(packages) + p.resolveLinks(packages) directDeps := make(map[string]struct{}) for name, version := range lo.Assign(packages[""].Dependencies, packages[""].OptionalDependencies, packages[""].DevDependencies) { pkgPath := joinPaths(nodeModulesDir, name) if _, ok := packages[pkgPath]; !ok { - log.Logger.Debugf("Unable to find the direct dependency: '%s@%s'", name, version) + p.logger.Debug("Unable to find the direct dependency", + log.String("name", name), log.String("version", version)) continue } // Store the package paths of direct dependencies @@ -107,7 +112,7 @@ func (p *Parser) parseV2(packages map[string]Package) ([]types.Library, []types. // pkg.Name exists when package name != folder name pkgName := pkg.Name if pkgName == "" { - pkgName = pkgNameFromPath(pkgPath) + pkgName = p.pkgNameFromPath(pkgPath) } pkgID := packageID(pkgName, pkg.Version) @@ -164,7 +169,8 @@ func (p *Parser) parseV2(packages map[string]Package) ([]types.Library, []types. for depName, depVersion := range dependencies { depID, err := findDependsOn(pkgPath, depName, packages) if err != nil { - log.Logger.Warnf("Cannot resolve the version: '%s@%s'", depName, depVersion) + p.logger.Debug("Unable to resolve the version", + log.String("name", depName), log.String("version", depVersion)) continue } dependsOn = append(dependsOn, depID) @@ -186,7 +192,7 @@ func (p *Parser) parseV2(packages map[string]Package) ([]types.Library, []types. // function/func1 -> target of package // node_modules/func1 -> link to target // see `package-lock_v3_with_workspace.json` to better understanding -func resolveLinks(packages map[string]Package) { +func (p *Parser) resolveLinks(packages map[string]Package) { links := lo.PickBy(packages, func(_ string, pkg Package) bool { return pkg.Link }) @@ -218,8 +224,8 @@ func resolveLinks(packages map[string]Package) { // Delete the target package delete(packages, pkgPath) - if isWorkspace(pkgPath, workspaces) { - rootPkg.Dependencies[pkgNameFromPath(linkPath)] = pkg.Version + if p.isWorkspace(pkgPath, workspaces) { + rootPkg.Dependencies[p.pkgNameFromPath(linkPath)] = pkg.Version } break } @@ -227,10 +233,11 @@ func resolveLinks(packages map[string]Package) { packages[""] = rootPkg } -func isWorkspace(pkgPath string, workspaces []string) bool { +func (p *Parser) isWorkspace(pkgPath string, workspaces []string) bool { for _, workspace := range workspaces { if match, err := path.Match(workspace, pkgPath); err != nil { - log.Logger.Debugf("unable to parse workspace %q for %s", workspace, pkgPath) + p.logger.Debug("Unable to parse workspace", + log.String("workspace", workspace), log.String("pkg_path", pkgPath)) } else if match { return true } @@ -309,7 +316,8 @@ func (p *Parser) parseV1(dependencies map[string]Dependency, versions map[string } // It should not reach here. - log.Logger.Warnf("Cannot resolve the version: %s@%s", libName, requiredVer) + p.logger.Warn("Unable to resolve the version", + log.String("name", libName), log.String("version", requiredVer)) } if len(dependsOn) > 0 { @@ -330,6 +338,20 @@ func (p *Parser) parseV1(dependencies map[string]Dependency, versions map[string return libs, deps } +func (p *Parser) pkgNameFromPath(pkgPath string) string { + // lock file contains path to dependency in `node_modules`. e.g.: + // node_modules/string-width + // node_modules/string-width/node_modules/strip-ansi + // we renamed to `node_modules` directory prefixes `workspace` when resolving Links + // node_modules/function1 + // node_modules/nested_func/node_modules/debug + if index := strings.LastIndex(pkgPath, nodeModulesDir); index != -1 { + return pkgPath[index+len(nodeModulesDir)+1:] + } + p.logger.Warn("Package path doesn't have `node_modules` prefix", log.String("pkg_path", pkgPath)) + return pkgPath +} + func uniqueDeps(deps []types.Dependency) []types.Dependency { var uniqDeps []types.Dependency unique := make(map[string]struct{}) @@ -357,20 +379,6 @@ func isIndirectLib(pkgPath string, directDeps map[string]struct{}) bool { return true } -func pkgNameFromPath(pkgPath string) string { - // lock file contains path to dependency in `node_modules`. e.g.: - // node_modules/string-width - // node_modules/string-width/node_modules/strip-ansi - // we renamed to `node_modules` directory prefixes `workspace` when resolving Links - // node_modules/function1 - // node_modules/nested_func/node_modules/debug - if index := strings.LastIndex(pkgPath, nodeModulesDir); index != -1 { - return pkgPath[index+len(nodeModulesDir)+1:] - } - log.Logger.Warnf("npm %q package path doesn't have `node_modules` prefix", pkgPath) - return pkgPath -} - func joinPaths(paths ...string) string { return strings.Join(paths, "/") } diff --git a/pkg/dependency/parser/nodejs/pnpm/parse.go b/pkg/dependency/parser/nodejs/pnpm/parse.go index 9e93be6a89c1..26012b747c4e 100644 --- a/pkg/dependency/parser/nodejs/pnpm/parse.go +++ b/pkg/dependency/parser/nodejs/pnpm/parse.go @@ -36,10 +36,14 @@ type LockFile struct { Packages map[string]PackageInfo `yaml:"packages,omitempty"` } -type Parser struct{} +type Parser struct { + logger *log.Logger +} -func NewParser() types.Parser { - return &Parser{} +func NewParser() *Parser { + return &Parser{ + logger: log.WithPrefix("pnpm"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -48,7 +52,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, return nil, nil, xerrors.Errorf("decode error: %w", err) } - lockVer := parseLockfileVersion(lockFile) + lockVer := p.parseLockfileVersion(lockFile) if lockVer < 0 { return nil, nil, nil } @@ -76,7 +80,7 @@ func (p *Parser) parse(lockVer float64, lockFile LockFile) ([]types.Library, []t version := info.Version if name == "" { - name, version = parsePackage(depPath, lockVer) + name, version = p.parsePackage(depPath, lockVer) } pkgID := packageID(name, version) @@ -103,7 +107,7 @@ func (p *Parser) parse(lockVer float64, lockFile LockFile) ([]types.Library, []t return libs, deps } -func parseLockfileVersion(lockFile LockFile) float64 { +func (p *Parser) parseLockfileVersion(lockFile LockFile) float64 { switch v := lockFile.LockfileVersion.(type) { // v5 case float64: @@ -111,33 +115,29 @@ func parseLockfileVersion(lockFile LockFile) float64 { // v6+ case string: if lockVer, err := strconv.ParseFloat(v, 64); err != nil { - log.Logger.Debugf("Unable to convert the lock file version to float: %s", err) + p.logger.Debug("Unable to convert the lock file version to float", log.Err(err)) return -1 } else { return lockVer } default: - log.Logger.Debugf("Unknown type for the lock file version: %s", lockFile.LockfileVersion) + p.logger.Debug("Unknown type for the lock file version", + log.Any("version", lockFile.LockfileVersion)) return -1 } } -func isIndirectLib(name string, directDeps map[string]interface{}) bool { - _, ok := directDeps[name] - return !ok -} - // cf. https://github.com/pnpm/pnpm/blob/ce61f8d3c29eee46cee38d56ced45aea8a439a53/packages/dependency-path/src/index.ts#L112-L163 -func parsePackage(depPath string, lockFileVersion float64) (string, string) { +func (p *Parser) parsePackage(depPath string, lockFileVersion float64) (string, string) { // The version separator is different between v5 and v6+. versionSep := "@" if lockFileVersion < 6 { versionSep = "/" } - return parseDepPath(depPath, versionSep) + return p.parseDepPath(depPath, versionSep) } -func parseDepPath(depPath, versionSep string) (string, string) { +func (p *Parser) parseDepPath(depPath, versionSep string) (string, string) { // Skip registry // e.g. // - "registry.npmjs.org/lodash/4.17.10" => "lodash/4.17.10" @@ -171,12 +171,18 @@ func parseDepPath(depPath, versionSep string) (string, string) { version = version[:idx] } if _, err := semver.Parse(version); err != nil { - log.Logger.Debugf("Skip %q package. %q doesn't match semver: %s", depPath, version, err) + p.logger.Debug("Skip non-semver package", log.String("pkg_path", depPath), + log.String("version", version), log.Err(err)) return "", "" } return name, version } +func isIndirectLib(name string, directDeps map[string]interface{}) bool { + _, ok := directDeps[name] + return !ok +} + func packageID(name, version string) string { return dependency.ID(ftypes.Pnpm, name, version) } diff --git a/pkg/dependency/parser/nodejs/pnpm/parse_test.go b/pkg/dependency/parser/nodejs/pnpm/parse_test.go index 19851a2c21c0..606bc37de54d 100644 --- a/pkg/dependency/parser/nodejs/pnpm/parse_test.go +++ b/pkg/dependency/parser/nodejs/pnpm/parse_test.go @@ -215,7 +215,8 @@ func Test_parsePackage(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - gotName, gotVersion := parsePackage(tt.pkg, tt.lockFileVer) + p := NewParser() + gotName, gotVersion := p.parsePackage(tt.pkg, tt.lockFileVer) assert.Equal(t, tt.wantName, gotName) assert.Equal(t, tt.wantVersion, gotVersion) }) diff --git a/pkg/dependency/parser/nodejs/yarn/parse.go b/pkg/dependency/parser/nodejs/yarn/parse.go index 9b8394eb57c8..d1d195d59641 100644 --- a/pkg/dependency/parser/nodejs/yarn/parse.go +++ b/pkg/dependency/parser/nodejs/yarn/parse.go @@ -142,13 +142,17 @@ func parseResults(patternIDs map[string]string, dependsOn map[string][]string) ( return deps } -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("yarn"), + } } -func scanBlocks(data []byte, atEOF bool) (advance int, token []byte, err error) { +func (p *Parser) scanBlocks(data []byte, atEOF bool) (advance int, token []byte, err error) { if atEOF && len(data) == 0 { return 0, nil, nil } @@ -167,7 +171,7 @@ func scanBlocks(data []byte, atEOF bool) (advance int, token []byte, err error) return 0, nil, nil } -func parseBlock(block []byte, lineNum int) (lib Library, deps []string, newLine int, err error) { +func (p *Parser) parseBlock(block []byte, lineNum int) (lib Library, deps []string, newLine int, err error) { var ( emptyLines int // lib can start with empty lines first skipBlock bool @@ -228,7 +232,7 @@ func parseBlock(block []byte, lineNum int) (lib Library, deps []string, newLine // in case an unsupported protocol is detected // show warning and continue parsing if err != nil { - log.Logger.Warnf("Yarn protocol error: %s", err) + p.logger.Warn("Protocol error", log.Err(err)) return Library{}, nil, scanner.LineNum(lineNum), nil } @@ -275,11 +279,11 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, patternIDs := make(map[string]string) scanner := bufio.NewScanner(r) - scanner.Split(scanBlocks) + scanner.Split(p.scanBlocks) dependsOn := make(map[string][]string) for scanner.Scan() { block := scanner.Bytes() - lib, deps, newLine, err := parseBlock(block, lineNumber) + lib, deps, newLine, err := p.parseBlock(block, lineNumber) lineNumber = newLine + 2 if err != nil { return nil, nil, err diff --git a/pkg/dependency/parser/php/composer/parse.go b/pkg/dependency/parser/php/composer/parse.go index 1fbf4316db9a..a41f998a5eb1 100644 --- a/pkg/dependency/parser/php/composer/parse.go +++ b/pkg/dependency/parser/php/composer/parse.go @@ -28,10 +28,14 @@ type packageInfo struct { EndLine int } -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("composer"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -85,7 +89,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, dependsOn = append(dependsOn, lib.ID) continue } - log.Logger.Debugf("unable to find version of %s", depName) + p.logger.Debug("Unable to find version", log.String("name", depName)) } sort.Strings(dependsOn) deps = append(deps, types.Dependency{ diff --git a/pkg/dependency/parser/python/packaging/parse.go b/pkg/dependency/parser/python/packaging/parse.go index 41514872fbb7..495e0d4d78ab 100644 --- a/pkg/dependency/parser/python/packaging/parse.go +++ b/pkg/dependency/parser/python/packaging/parse.go @@ -14,15 +14,19 @@ import ( xio "github.com/aquasecurity/trivy/pkg/x/io" ) -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("python"), + } } // Parse parses egg and wheel metadata. // e.g. .egg-info/PKG-INFO and dist-info/METADATA -func (*Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { +func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { rd := textproto.NewReader(bufio.NewReader(r)) h, err := rd.ReadMIMEHeader() if e := textproto.ProtocolError(""); errors.As(err, &e) { @@ -30,7 +34,7 @@ func (*Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, e // cf. https://cs.opensource.google/go/go/+/a6642e67e16b9d769a0c08e486ba08408064df19 // However, our required key/value could have been correctly parsed, // so we continue with the subsequent process. - log.Logger.Debugf("MIME protocol error: %s", err) + p.logger.Debug("MIME protocol error", log.Err(err)) } else if err != nil && err != io.EOF { return nil, nil, xerrors.Errorf("read MIME error: %w", err) } @@ -65,7 +69,8 @@ func (*Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, e if l := h.Get("License"); l != "" { if len(licenses) != 0 { - log.Logger.Infof("License acquired from METADATA classifiers may be subject to additional terms for [%s:%s]", name, version) + p.logger.Info("License acquired from METADATA classifiers may be subject to additional terms", + log.String("name", name), log.String("version", version)) } else { license = l } diff --git a/pkg/dependency/parser/python/poetry/parse.go b/pkg/dependency/parser/python/poetry/parse.go index e476b8c18d93..30708cc67add 100644 --- a/pkg/dependency/parser/python/poetry/parse.go +++ b/pkg/dependency/parser/python/poetry/parse.go @@ -29,10 +29,14 @@ type Lockfile struct { } `toml:"package"` } -type Parser struct{} +type Parser struct { + logger *log.Logger +} -func NewParser() types.Parser { - return &Parser{} +func NewParser() *Parser { + return &Parser{ + logger: log.WithPrefix("poetry"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -42,7 +46,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, } // Keep all installed versions - libVersions := parseVersions(lockfile) + libVersions := p.parseVersions(lockfile) var libs []types.Library var deps []types.Dependency @@ -58,7 +62,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, Version: pkg.Version, }) - dependsOn := parseDependencies(pkg.Dependencies, libVersions) + dependsOn := p.parseDependencies(pkg.Dependencies, libVersions) if len(dependsOn) != 0 { deps = append(deps, types.Dependency{ ID: pkgID, @@ -71,7 +75,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, // parseVersions stores all installed versions of libraries for use in dependsOn // as the dependencies of libraries use version range. -func parseVersions(lockfile Lockfile) map[string][]string { +func (p *Parser) parseVersions(lockfile Lockfile) map[string][]string { libVersions := make(map[string][]string) for _, pkg := range lockfile.Packages { if pkg.Category == "dev" { @@ -86,11 +90,11 @@ func parseVersions(lockfile Lockfile) map[string][]string { return libVersions } -func parseDependencies(deps map[string]any, libVersions map[string][]string) []string { +func (p *Parser) parseDependencies(deps map[string]any, libVersions map[string][]string) []string { var dependsOn []string for name, versRange := range deps { - if dep, err := parseDependency(name, versRange, libVersions); err != nil { - log.Logger.Debugf("failed to parse poetry dependency: %s", err) + if dep, err := p.parseDependency(name, versRange, libVersions); err != nil { + p.logger.Debug("Failed to parse poetry dependency", log.Err(err)) } else if dep != "" { dependsOn = append(dependsOn, dep) } @@ -101,7 +105,7 @@ func parseDependencies(deps map[string]any, libVersions map[string][]string) []s return dependsOn } -func parseDependency(name string, versRange any, libVersions map[string][]string) (string, error) { +func (p *Parser) parseDependency(name string, versRange any, libVersions map[string][]string) (string, error) { name = normalizePkgName(name) vers, ok := libVersions[name] if !ok { diff --git a/pkg/dependency/parser/python/poetry/parse_test.go b/pkg/dependency/parser/python/poetry/parse_test.go index c02999a8eff8..d7f7adf630eb 100644 --- a/pkg/dependency/parser/python/poetry/parse_test.go +++ b/pkg/dependency/parser/python/poetry/parse_test.go @@ -46,7 +46,7 @@ func TestParser_Parse(t *testing.T) { require.NoError(t, err) defer f.Close() - p := &Parser{} + p := NewParser() gotLibs, gotDeps, err := p.Parse(f) if !tt.wantErr(t, err, fmt.Sprintf("Parse(%v)", tt.file)) { return @@ -116,7 +116,7 @@ func TestParseDependency(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := parseDependency(tt.packageName, tt.versionRange, tt.libsVersions) + got, err := NewParser().parseDependency(tt.packageName, tt.versionRange, tt.libsVersions) if tt.wantErr != "" { assert.ErrorContains(t, err, tt.wantErr) return diff --git a/pkg/dependency/parser/rust/cargo/parse.go b/pkg/dependency/parser/rust/cargo/parse.go index 282e25152d04..2fd6686224bc 100644 --- a/pkg/dependency/parser/rust/cargo/parse.go +++ b/pkg/dependency/parser/rust/cargo/parse.go @@ -26,10 +26,14 @@ type Lockfile struct { Packages []cargoPkg `toml:"package"` } -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("cargo"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -71,7 +75,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, } libs = append(libs, lib) - dep := parseDependencies(pkgID, pkg, pkgs) + dep := p.parseDependencies(pkgID, pkg, pkgs) if dep != nil { deps = append(deps, *dep) } @@ -80,7 +84,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, sort.Sort(types.Dependencies(deps)) return libs, deps, nil } -func parseDependencies(pkgId string, pkg cargoPkg, pkgs map[string]cargoPkg) *types.Dependency { +func (p *Parser) parseDependencies(pkgId string, pkg cargoPkg, pkgs map[string]cargoPkg) *types.Dependency { var dependOn []string for _, pkgDep := range pkg.Dependencies { @@ -99,7 +103,7 @@ func parseDependencies(pkgId string, pkg cargoPkg, pkgs map[string]cargoPkg) *ty name := fields[0] version, ok := pkgs[name] if !ok { - log.Logger.Debugf("can't find version for %s", name) + p.logger.Debug("Cannot find version", log.String("name", name)) continue } dependOn = append(dependOn, packageID(name, version.Version)) @@ -108,7 +112,7 @@ func parseDependencies(pkgId string, pkg cargoPkg, pkgs map[string]cargoPkg) *ty case 2, 3: dependOn = append(dependOn, packageID(fields[0], fields[1])) default: - log.Logger.Debugf("wrong dependency format for %s", pkgDep) + p.logger.Debug("Wrong dependency format", log.String("dep", pkgDep)) continue } } diff --git a/pkg/dependency/parser/swift/cocoapods/parse.go b/pkg/dependency/parser/swift/cocoapods/parse.go index 7b2a580fd74c..ae71bc09a3d7 100644 --- a/pkg/dependency/parser/swift/cocoapods/parse.go +++ b/pkg/dependency/parser/swift/cocoapods/parse.go @@ -16,17 +16,21 @@ import ( xio "github.com/aquasecurity/trivy/pkg/x/io" ) -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("cocoapods"), + } } type lockFile struct { Pods []any `yaml:"PODS"` // pod can be string or map[string]interface{} } -func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { +func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { lock := &lockFile{} decoder := yaml.NewDecoder(r) if err := decoder.Decode(&lock); err != nil { @@ -36,19 +40,19 @@ func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, er parsedDeps := make(map[string]types.Library) // dependency name => Library directDeps := make(map[string][]string) // dependency name => slice of child dependency names for _, pod := range lock.Pods { - switch p := pod.(type) { + switch dep := pod.(type) { case string: // dependency with version number - lib, err := parseDep(p) + lib, err := parseDep(dep) if err != nil { - log.Logger.Debug(err) + p.logger.Debug("Dependency parse error", log.Err(err)) continue } parsedDeps[lib.Name] = lib case map[string]interface{}: // dependency with its child dependencies - for dep, childDeps := range p { + for dep, childDeps := range dep { lib, err := parseDep(dep) if err != nil { - log.Logger.Debug(err) + p.logger.Debug("Dependency parse error", log.Err(err)) continue } parsedDeps[lib.Name] = lib diff --git a/pkg/dependency/parser/swift/swift/parse.go b/pkg/dependency/parser/swift/swift/parse.go index daeb8d3ef243..74a507f847fb 100644 --- a/pkg/dependency/parser/swift/swift/parse.go +++ b/pkg/dependency/parser/swift/swift/parse.go @@ -17,13 +17,17 @@ import ( ) // Parser is a parser for Package.resolved files -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("swift"), + } } -func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { +func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { var lockFile LockFile input, err := io.ReadAll(r) if err != nil { @@ -43,7 +47,7 @@ func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, er // Skip packages for which we cannot resolve the version if pin.State.Version == "" && pin.State.Branch == "" { - log.Logger.Warnf("Unable to resolve %q. Both the version and branch fields are empty.", name) + p.logger.Warn("Unable to resolve. Both the version and branch fields are empty.", log.String("name", name)) continue } diff --git a/pkg/detector/library/compare/compare.go b/pkg/detector/library/compare/compare.go index 60829deb8141..9f6f82c26de4 100644 --- a/pkg/detector/library/compare/compare.go +++ b/pkg/detector/library/compare/compare.go @@ -31,7 +31,7 @@ func IsVulnerable(pkgVer string, advisory dbTypes.Advisory, match matchVersion) if len(advisory.VulnerableVersions) != 0 { matched, err = match(pkgVer, strings.Join(advisory.VulnerableVersions, " || ")) if err != nil { - log.Logger.Warn(err) + log.Warn("Version matching error", log.Err(err)) return false } else if !matched { // the version is not vulnerable @@ -48,7 +48,7 @@ func IsVulnerable(pkgVer string, advisory dbTypes.Advisory, match matchVersion) matched, err = match(pkgVer, strings.Join(secureVersions, " || ")) if err != nil { - log.Logger.Warn(err) + log.Warn("Version matching error", log.Err(err)) return false } return !matched diff --git a/pkg/detector/library/driver.go b/pkg/detector/library/driver.go index b2f5b6babc38..e94f2b4db89f 100644 --- a/pkg/detector/library/driver.go +++ b/pkg/detector/library/driver.go @@ -73,7 +73,7 @@ func NewDriver(libType ftypes.LangType) (Driver, bool) { ecosystem = vulnerability.Cocoapods comparer = rubygems.Comparer{} case ftypes.CondaPkg: - log.Logger.Warn("Conda package is supported for SBOM, not for vulnerability scanning") + log.Warn("Conda package is supported for SBOM, not for vulnerability scanning") return Driver{}, false case ftypes.Bitnami: ecosystem = vulnerability.Bitnami @@ -82,7 +82,8 @@ func NewDriver(libType ftypes.LangType) (Driver, bool) { ecosystem = vulnerability.Kubernetes comparer = compare.GenericComparer{} default: - log.Logger.Warnf("The %q library type is not supported for vulnerability scanning", libType) + log.Warn("The library type is not supported for vulnerability scanning", + log.String("type", string(libType))) return Driver{}, false } return Driver{ diff --git a/pkg/detector/ospkg/alma/alma.go b/pkg/detector/ospkg/alma/alma.go index 67465f04a1b0..ba38c89b7236 100644 --- a/pkg/detector/ospkg/alma/alma.go +++ b/pkg/detector/ospkg/alma/alma.go @@ -38,12 +38,10 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using AlmaLinux scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting AlmaLinux vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Major(osVer) - log.Logger.Debugf("AlmaLinux: os version: %s", osVer) - log.Logger.Debugf("AlmaLinux: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability var skipPkgs []string @@ -79,7 +77,8 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa } } if len(skipPkgs) > 0 { - log.Logger.Infof("Skipped detection of these packages: %q because modular packages cannot be detected correctly due to a bug in AlmaLinux. See also: https://bugs.almalinux.org/view.php?id=173", skipPkgs) + log.InfoContext(ctx, "Skipped detection of the packages because modular packages cannot be detected correctly due to a bug in AlmaLinux. See also: https://bugs.almalinux.org/view.php?id=173", + log.Any("packages", skipPkgs)) } return vulns, nil diff --git a/pkg/detector/ospkg/alma/alma_test.go b/pkg/detector/ospkg/alma/alma_test.go index bd70079fd189..cd7a318d8f10 100644 --- a/pkg/detector/ospkg/alma/alma_test.go +++ b/pkg/detector/ospkg/alma/alma_test.go @@ -162,7 +162,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := alma.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/alpine/alpine.go b/pkg/detector/ospkg/alpine/alpine.go index 4be5cf128431..48f7abcfbec6 100644 --- a/pkg/detector/ospkg/alpine/alpine.go +++ b/pkg/detector/ospkg/alpine/alpine.go @@ -64,21 +64,20 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using Alpine scanner -func (s *Scanner) Detect(osVer string, repo *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Alpine vulnerabilities...") +func (s *Scanner) Detect(ctx context.Context, osVer string, repo *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Minor(osVer) repoRelease := s.repoRelease(repo) - log.Logger.Debugf("alpine: os version: %s", osVer) - log.Logger.Debugf("alpine: package repository: %s", repoRelease) - log.Logger.Debugf("alpine: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.String("repository", repoRelease), log.Int("pkg_num", len(pkgs))) stream := osVer if repoRelease != "" && osVer != repoRelease { // Prefer the repository release. Use OS version only when the repository is not detected. stream = repoRelease if repoRelease != "edge" { // TODO: we should detect the current edge version. - log.Logger.Warnf("Mixing Alpine versions is unsupported, OS: '%s', repository: '%s'", osVer, repoRelease) + log.WarnContext(ctx, "Mixing Alpine versions is unsupported", + log.String("os", osVer), log.String("repository", repoRelease)) } } @@ -95,12 +94,12 @@ func (s *Scanner) Detect(osVer string, repo *ftypes.Repository, pkgs []ftypes.Pa sourceVersion, err := version.NewVersion(utils.FormatSrcVersion(pkg)) if err != nil { - log.Logger.Debugf("failed to parse Alpine Linux installed package version: %s", err) + log.DebugContext(ctx, "Failed to parse the installed package version", log.Err(err)) continue } for _, adv := range advisories { - if !s.isVulnerable(sourceVersion, adv) { + if !s.isVulnerable(ctx, sourceVersion, adv) { continue } vulns = append(vulns, types.DetectedVulnerability{ @@ -119,7 +118,7 @@ func (s *Scanner) Detect(osVer string, repo *ftypes.Repository, pkgs []ftypes.Pa return vulns, nil } -func (s *Scanner) isVulnerable(installedVersion version.Version, adv dbTypes.Advisory) bool { +func (s *Scanner) isVulnerable(ctx context.Context, installedVersion version.Version, adv dbTypes.Advisory) bool { // This logic is for unfixed vulnerabilities, but Trivy DB doesn't have advisories for unfixed vulnerabilities for now // because Alpine just provides potentially vulnerable packages. It will cause a lot of false positives. // This is for Aqua commercial products. @@ -127,7 +126,8 @@ func (s *Scanner) isVulnerable(installedVersion version.Version, adv dbTypes.Adv // AffectedVersion means which version introduced this vulnerability. affectedVersion, err := version.NewVersion(adv.AffectedVersion) if err != nil { - log.Logger.Debugf("failed to parse Alpine Linux affected package version: %s", err) + log.DebugContext(ctx, "Failed to parse the affected package version", + log.String("version", adv.AffectedVersion), log.Err(err)) return false } if affectedVersion.GreaterThan(installedVersion) { @@ -144,7 +144,8 @@ func (s *Scanner) isVulnerable(installedVersion version.Version, adv dbTypes.Adv // Compare versions for fixed vulnerabilities fixedVersion, err := version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("failed to parse Alpine Linux fixed version: %s", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) return false } diff --git a/pkg/detector/ospkg/alpine/alpine_test.go b/pkg/detector/ospkg/alpine/alpine_test.go index f420cf5576ab..eba7258e1392 100644 --- a/pkg/detector/ospkg/alpine/alpine_test.go +++ b/pkg/detector/ospkg/alpine/alpine_test.go @@ -251,7 +251,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := alpine.NewScanner() - got, err := s.Detect(tt.args.osVer, tt.args.repo, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, tt.args.repo, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/amazon/amazon.go b/pkg/detector/ospkg/amazon/amazon.go index 0c5a35f13a8f..c1ec250ec45b 100644 --- a/pkg/detector/ospkg/amazon/amazon.go +++ b/pkg/detector/ospkg/amazon/amazon.go @@ -40,17 +40,16 @@ func NewScanner() *Scanner { } // Detect scans the packages using amazon scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Amazon Linux vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = strings.Fields(osVer)[0] // The format `2023.xxx.xxxx` can be used. osVer = osver.Major(osVer) if osVer != "2" && osVer != "2022" && osVer != "2023" { osVer = "1" } - log.Logger.Debugf("amazon: os version: %s", osVer) - log.Logger.Debugf("amazon: the number of packages: %d", len(pkgs)) + + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { @@ -66,14 +65,16 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa installedVersion, err := version.NewVersion(installed) if err != nil { - log.Logger.Debugf("failed to parse Amazon Linux installed package version: %s", err) + log.DebugContext(ctx, "Failed to parse the installed package version", + log.String("version", installed), log.Err(err)) continue } for _, adv := range advisories { fixedVersion, err := version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("failed to parse Amazon Linux package version: %s", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) continue } diff --git a/pkg/detector/ospkg/amazon/amazon_test.go b/pkg/detector/ospkg/amazon/amazon_test.go index c9c5a3c65840..83100e98c72c 100644 --- a/pkg/detector/ospkg/amazon/amazon_test.go +++ b/pkg/detector/ospkg/amazon/amazon_test.go @@ -177,7 +177,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := amazon.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/chainguard/chainguard.go b/pkg/detector/ospkg/chainguard/chainguard.go index bba9642c481c..6874c4c93b91 100644 --- a/pkg/detector/ospkg/chainguard/chainguard.go +++ b/pkg/detector/ospkg/chainguard/chainguard.go @@ -27,10 +27,8 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using Chainguard scanner -func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Chainguard vulnerabilities...") - - log.Logger.Debugf("chainguard: the number of packages: %d", len(pkgs)) +func (s *Scanner) Detect(ctx context.Context, _ string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { + log.InfoContext(ctx, "Detecting Chainguard vulnerabilities...", log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { @@ -46,12 +44,12 @@ func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) installed := utils.FormatVersion(pkg) installedVersion, err := version.NewVersion(installed) if err != nil { - log.Logger.Debugf("failed to parse Chainguard installed package version: %s", err) + log.DebugContext(ctx, "Failed to parse the installed package version", log.Err(err)) continue } for _, adv := range advisories { - if !s.isVulnerable(installedVersion, adv) { + if !s.isVulnerable(ctx, installedVersion, adv) { continue } vulns = append(vulns, types.DetectedVulnerability{ @@ -70,11 +68,12 @@ func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) return vulns, nil } -func (s *Scanner) isVulnerable(installedVersion version.Version, adv dbTypes.Advisory) bool { +func (s *Scanner) isVulnerable(ctx context.Context, installedVersion version.Version, adv dbTypes.Advisory) bool { // Compare versions for fixed vulnerabilities fixedVersion, err := version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("failed to parse Chainguard fixed version: %s", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) return false } diff --git a/pkg/detector/ospkg/chainguard/chainguard_test.go b/pkg/detector/ospkg/chainguard/chainguard_test.go index 446693ce2170..27758c191636 100644 --- a/pkg/detector/ospkg/chainguard/chainguard_test.go +++ b/pkg/detector/ospkg/chainguard/chainguard_test.go @@ -194,7 +194,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := chainguard.NewScanner() - got, err := s.Detect("", tt.args.repo, tt.args.pkgs) + got, err := s.Detect(nil, "", tt.args.repo, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/debian/debian.go b/pkg/detector/ospkg/debian/debian.go index c350f6d2c281..e347146f0747 100644 --- a/pkg/detector/ospkg/debian/debian.go +++ b/pkg/detector/ospkg/debian/debian.go @@ -54,18 +54,17 @@ func NewScanner() *Scanner { } // Detect scans and return vulnerabilities using Debian scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Debian vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Major(osVer) - log.Logger.Debugf("debian: os version: %s", osVer) - log.Logger.Debugf("debian: the number of packages: %d", len(pkgs)) + + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { sourceVersion, err := version.NewVersion(utils.FormatSrcVersion(pkg)) if err != nil { - log.Logger.Debugf("Debian installed package version error: %s", err) + log.DebugContext(ctx, "Installed package version error", log.Err(err)) continue } @@ -106,7 +105,8 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa var fixedVersion version.Version fixedVersion, err = version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("Debian advisory package version error: %s", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) continue } diff --git a/pkg/detector/ospkg/debian/debian_test.go b/pkg/detector/ospkg/debian/debian_test.go index 8c22a386a74d..2f5c2b3595ab 100644 --- a/pkg/detector/ospkg/debian/debian_test.go +++ b/pkg/detector/ospkg/debian/debian_test.go @@ -115,7 +115,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := debian.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/detect.go b/pkg/detector/ospkg/detect.go index 32ed2ff9c5ab..bbeb8e8649d8 100644 --- a/pkg/detector/ospkg/detect.go +++ b/pkg/detector/ospkg/detect.go @@ -55,12 +55,14 @@ func RegisterDriver(name ftypes.OSType, driver Driver) { // Driver defines operations for OS package scan type Driver interface { - Detect(string, *ftypes.Repository, []ftypes.Package) ([]types.DetectedVulnerability, error) + Detect(context.Context, string, *ftypes.Repository, []ftypes.Package) ([]types.DetectedVulnerability, error) IsSupportedVersion(context.Context, ftypes.OSType, string) bool } // Detect detects the vulnerabilities func Detect(ctx context.Context, _, osFamily ftypes.OSType, osName string, repo *ftypes.Repository, _ time.Time, pkgs []ftypes.Package) ([]types.DetectedVulnerability, bool, error) { + ctx = log.WithContextPrefix(ctx, string(osFamily)) + driver, err := newDriver(osFamily) if err != nil { return nil, false, ErrUnsupportedOS @@ -73,7 +75,7 @@ func Detect(ctx context.Context, _, osFamily ftypes.OSType, osName string, repo filteredPkgs := lo.Filter(pkgs, func(pkg ftypes.Package, index int) bool { return pkg.Name != "gpg-pubkey" }) - vulns, err := driver.Detect(osName, repo, filteredPkgs) + vulns, err := driver.Detect(ctx, osName, repo, filteredPkgs) if err != nil { return nil, false, xerrors.Errorf("failed detection: %w", err) } @@ -86,6 +88,6 @@ func newDriver(osFamily ftypes.OSType) (Driver, error) { return driver, nil } - log.Logger.Warnf("unsupported os : %s", osFamily) + log.Warn("Unsupported os", log.String("family", string(osFamily))) return nil, ErrUnsupportedOS } diff --git a/pkg/detector/ospkg/mariner/mariner.go b/pkg/detector/ospkg/mariner/mariner.go index 6e1054151518..ae9d80157381 100644 --- a/pkg/detector/ospkg/mariner/mariner.go +++ b/pkg/detector/ospkg/mariner/mariner.go @@ -27,14 +27,12 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using CBL-Mariner scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting CBL-Mariner vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { // e.g. 1.0.20210127 osVer = osver.Minor(osVer) - log.Logger.Debugf("CBL-Mariner: os version: %s", osVer) - log.Logger.Debugf("CBL-Mariner: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { diff --git a/pkg/detector/ospkg/mariner/mariner_test.go b/pkg/detector/ospkg/mariner/mariner_test.go index 262f211d8401..199ab1dd996a 100644 --- a/pkg/detector/ospkg/mariner/mariner_test.go +++ b/pkg/detector/ospkg/mariner/mariner_test.go @@ -105,8 +105,11 @@ func TestScanner_Detect(t *testing.T) { }, }, { - name: "broken advisory", - fixtures: []string{"testdata/fixtures/invalid.yaml", "testdata/fixtures/data-source.yaml"}, + name: "broken advisory", + fixtures: []string{ + "testdata/fixtures/invalid.yaml", + "testdata/fixtures/data-source.yaml", + }, args: args{ osVer: "1.0", pkgs: []ftypes.Package{ @@ -134,7 +137,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := mariner.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/oracle/oracle.go b/pkg/detector/ospkg/oracle/oracle.go index 186e0d2734dc..2b234698e38a 100644 --- a/pkg/detector/ospkg/oracle/oracle.go +++ b/pkg/detector/ospkg/oracle/oracle.go @@ -54,12 +54,10 @@ func extractKsplice(v string) string { } // Detect scans and return vulnerability in Oracle scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Oracle Linux vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Major(osVer) - log.Logger.Debugf("Oracle Linux: os version: %s", osVer) - log.Logger.Debugf("Oracle Linux: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { diff --git a/pkg/detector/ospkg/oracle/oracle_test.go b/pkg/detector/ospkg/oracle/oracle_test.go index 40c8f26f2d9a..530639c9ea73 100644 --- a/pkg/detector/ospkg/oracle/oracle_test.go +++ b/pkg/detector/ospkg/oracle/oracle_test.go @@ -250,7 +250,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.NotNil(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/photon/photon.go b/pkg/detector/ospkg/photon/photon.go index b6c00f3c24f8..135ac0d5b283 100644 --- a/pkg/detector/ospkg/photon/photon.go +++ b/pkg/detector/ospkg/photon/photon.go @@ -39,10 +39,9 @@ func NewScanner() *Scanner { } // Detect scans and returns vulnerabilities using photon scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Photon Linux vulnerabilities...") - log.Logger.Debugf("Photon Linux: os version: %s", osVer) - log.Logger.Debugf("Photon Linux: the number of packages: %d", len(pkgs)) +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { diff --git a/pkg/detector/ospkg/photon/photon_test.go b/pkg/detector/ospkg/photon/photon_test.go index b81b0fd30d6d..a85957f6c516 100644 --- a/pkg/detector/ospkg/photon/photon_test.go +++ b/pkg/detector/ospkg/photon/photon_test.go @@ -94,7 +94,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := photon.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/redhat/redhat.go b/pkg/detector/ospkg/redhat/redhat.go index fe7581dbf481..277fa6203424 100644 --- a/pkg/detector/ospkg/redhat/redhat.go +++ b/pkg/detector/ospkg/redhat/redhat.go @@ -77,17 +77,15 @@ func NewScanner() *Scanner { } // Detect scans and returns redhat vulnerabilities -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting RHEL/CentOS vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Major(osVer) - log.Logger.Debugf("Red Hat: os version: %s", osVer) - log.Logger.Debugf("Red Hat: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting RHEL/CentOS vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { if !isFromSupportedVendor(pkg) { - log.Logger.Debugf("Skipping %s: unsupported vendor", pkg.Name) + log.DebugContext(ctx, "Skipping the package with unsupported vendor", log.String("package", pkg.Name)) continue } diff --git a/pkg/detector/ospkg/redhat/redhat_test.go b/pkg/detector/ospkg/redhat/redhat_test.go index 3910b87f9cac..fe24220490a8 100644 --- a/pkg/detector/ospkg/redhat/redhat_test.go +++ b/pkg/detector/ospkg/redhat/redhat_test.go @@ -369,7 +369,7 @@ func TestScanner_Detect(t *testing.T) { defer func() { _ = dbtest.Close() }() s := redhat.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) require.Equal(t, tt.wantErr, err != nil, err) assert.Equal(t, tt.want, got) }) diff --git a/pkg/detector/ospkg/rocky/rocky.go b/pkg/detector/ospkg/rocky/rocky.go index 49aaa4d0a543..51efbdd2e01b 100644 --- a/pkg/detector/ospkg/rocky/rocky.go +++ b/pkg/detector/ospkg/rocky/rocky.go @@ -37,12 +37,10 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using Rocky Linux scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Rocky Linux vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Major(osVer) - log.Logger.Debugf("Rocky Linux: os version: %s", osVer) - log.Logger.Debugf("Rocky Linux: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability var skipPkgs []string @@ -79,7 +77,8 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa } } if len(skipPkgs) > 0 { - log.Logger.Infof("Skipped detection of these packages: %q because modular packages cannot be detected correctly due to a bug in Rocky Linux Errata. See also: https://forums.rockylinux.org/t/some-errata-missing-in-comparison-with-rhel-and-almalinux/3843", skipPkgs) + log.InfoContext(ctx, "Skipped detection of the packages because modular packages cannot be detected correctly due to a bug in Rocky Linux Errata. See also: https://forums.rockylinux.org/t/some-errata-missing-in-comparison-with-rhel-and-almalinux/3843", + log.Any("packages", skipPkgs)) } return vulns, nil diff --git a/pkg/detector/ospkg/rocky/rocky_test.go b/pkg/detector/ospkg/rocky/rocky_test.go index dddba1df850a..91250aa73776 100644 --- a/pkg/detector/ospkg/rocky/rocky_test.go +++ b/pkg/detector/ospkg/rocky/rocky_test.go @@ -122,7 +122,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := rocky.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/suse/suse.go b/pkg/detector/ospkg/suse/suse.go index 617161df87aa..a5ccade5c813 100644 --- a/pkg/detector/ospkg/suse/suse.go +++ b/pkg/detector/ospkg/suse/suse.go @@ -89,10 +89,9 @@ func NewScanner(t Type) *Scanner { } // Detect scans and returns the vulnerabilities -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting SUSE vulnerabilities...") - log.Logger.Debugf("SUSE: os version: %s", osVer) - log.Logger.Debugf("SUSE: the number of packages: %d", len(pkgs)) +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { diff --git a/pkg/detector/ospkg/suse/suse_test.go b/pkg/detector/ospkg/suse/suse_test.go index 7766863e2ec4..eb3e7d9673b4 100644 --- a/pkg/detector/ospkg/suse/suse_test.go +++ b/pkg/detector/ospkg/suse/suse_test.go @@ -97,7 +97,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := suse.NewScanner(tt.distribution) - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/ubuntu/ubuntu.go b/pkg/detector/ospkg/ubuntu/ubuntu.go index 46948806a64d..7c6453050992 100644 --- a/pkg/detector/ospkg/ubuntu/ubuntu.go +++ b/pkg/detector/ospkg/ubuntu/ubuntu.go @@ -76,10 +76,9 @@ func NewScanner() *Scanner { } // Detect scans and returns the vulnerabilities -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Ubuntu vulnerabilities...") - log.Logger.Debugf("ubuntu: os version: %s", osVer) - log.Logger.Debugf("ubuntu: the number of packages: %d", len(pkgs)) +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { @@ -91,7 +90,7 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa sourceVersion, err := version.NewVersion(utils.FormatSrcVersion(pkg)) if err != nil { - log.Logger.Debugf("failed to parse Ubuntu installed package version: %w", err) + log.DebugContext(ctx, "Failed to parse the installed package version", log.Err(err)) continue } @@ -115,7 +114,8 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa fixedVersion, err := version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("failed to parse Ubuntu package version: %w", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) continue } diff --git a/pkg/detector/ospkg/ubuntu/ubuntu_test.go b/pkg/detector/ospkg/ubuntu/ubuntu_test.go index a2218e211b77..a95c48bb24ee 100644 --- a/pkg/detector/ospkg/ubuntu/ubuntu_test.go +++ b/pkg/detector/ospkg/ubuntu/ubuntu_test.go @@ -179,7 +179,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := ubuntu.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/version/version.go b/pkg/detector/ospkg/version/version.go index dc47ffd88409..8ed344f1f3ee 100644 --- a/pkg/detector/ospkg/version/version.go +++ b/pkg/detector/ospkg/version/version.go @@ -31,7 +31,8 @@ func Minor(osVer string) string { func Supported(ctx context.Context, eolDates map[string]time.Time, osFamily ftypes.OSType, osVer string) bool { eol, ok := eolDates[osVer] if !ok { - log.Logger.Warnf("This OS version is not on the EOL list: %s %s", osFamily, osVer) + log.Warn("This OS version is not on the EOL list", + log.String("family", string(osFamily)), log.String("version", osVer)) return true // can be the latest version } return clock.Now(ctx).Before(eol) diff --git a/pkg/detector/ospkg/wolfi/wolfi.go b/pkg/detector/ospkg/wolfi/wolfi.go index 9757fc6aa637..c77c968f993f 100644 --- a/pkg/detector/ospkg/wolfi/wolfi.go +++ b/pkg/detector/ospkg/wolfi/wolfi.go @@ -27,10 +27,8 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using Wolfi scanner -func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Wolfi vulnerabilities...") - - log.Logger.Debugf("wolfi: the number of packages: %d", len(pkgs)) +func (s *Scanner) Detect(ctx context.Context, _ string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { + log.InfoContext(ctx, "Detecting vulnerabilities...", log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { @@ -46,12 +44,13 @@ func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) installed := utils.FormatVersion(pkg) installedVersion, err := version.NewVersion(installed) if err != nil { - log.Logger.Debugf("failed to parse Wolfi Linux installed package version: %s", err) + log.DebugContext(ctx, "Failed to parse the installed package version", + log.String("version", installed), log.Err(err)) continue } for _, adv := range advisories { - if !s.isVulnerable(installedVersion, adv) { + if !s.isVulnerable(ctx, installedVersion, adv) { continue } vulns = append(vulns, types.DetectedVulnerability{ @@ -70,11 +69,12 @@ func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) return vulns, nil } -func (s *Scanner) isVulnerable(installedVersion version.Version, adv dbTypes.Advisory) bool { +func (s *Scanner) isVulnerable(ctx context.Context, installedVersion version.Version, adv dbTypes.Advisory) bool { // Compare versions for fixed vulnerabilities fixedVersion, err := version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("failed to parse Wolfi Linux fixed version: %s", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) return false } diff --git a/pkg/detector/ospkg/wolfi/wolfi_test.go b/pkg/detector/ospkg/wolfi/wolfi_test.go index 78c1e4818c31..2ef5fb664f55 100644 --- a/pkg/detector/ospkg/wolfi/wolfi_test.go +++ b/pkg/detector/ospkg/wolfi/wolfi_test.go @@ -194,7 +194,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := wolfi.NewScanner() - got, err := s.Detect("", tt.args.repo, tt.args.pkgs) + got, err := s.Detect(nil, "", tt.args.repo, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/fanal/analyzer/analyzer.go b/pkg/fanal/analyzer/analyzer.go index c5f55fd5fa15..9312a90ad283 100644 --- a/pkg/fanal/analyzer/analyzer.go +++ b/pkg/fanal/analyzer/analyzer.go @@ -16,8 +16,8 @@ import ( "golang.org/x/xerrors" fos "github.com/aquasecurity/trivy/pkg/fanal/analyzer/os" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/misconf" xio "github.com/aquasecurity/trivy/pkg/x/io" ) @@ -92,7 +92,7 @@ const GroupBuiltin Group = "builtin" func RegisterAnalyzer(analyzer analyzer) { if _, ok := analyzers[analyzer.Type()]; ok { - log.Logger.Fatalf("analyzer %s is registered twice", analyzer.Type()) + log.Fatal("Analyzer is registered twice", log.String("type", string(analyzer.Type()))) } analyzers[analyzer.Type()] = analyzer } @@ -101,7 +101,7 @@ type postAnalyzerInitialize func(options AnalyzerOptions) (PostAnalyzer, error) func RegisterPostAnalyzer(t Type, initializer postAnalyzerInitialize) { if _, ok := postAnalyzers[t]; ok { - log.Logger.Fatalf("analyzer %s is registered twice", t) + log.Fatal("Analyzer is registered twice", log.String("type", string(t))) } postAnalyzers[t] = initializer } @@ -120,6 +120,7 @@ type CustomGroup interface { type Opener func() (xio.ReadSeekCloserAt, error) type AnalyzerGroup struct { + logger *log.Logger analyzers []analyzer postAnalyzers []PostAnalyzer filePatterns map[Type][]*regexp.Regexp @@ -318,6 +319,7 @@ func NewAnalyzerGroup(opt AnalyzerOptions) (AnalyzerGroup, error) { } group := AnalyzerGroup{ + logger: log.WithPrefix("analyzer"), filePatterns: make(map[Type][]*regexp.Regexp), } for _, p := range opt.FilePatterns { @@ -411,7 +413,7 @@ func (ag AnalyzerGroup) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, lim } rc, err := opener() if errors.Is(err, fs.ErrPermission) { - log.Logger.Debugf("Permission error: %s", filePath) + ag.logger.Debug("Permission error", log.String("file_path", filePath)) break } else if err != nil { return xerrors.Errorf("unable to open %s: %w", filePath, err) @@ -435,7 +437,7 @@ func (ag AnalyzerGroup) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, lim Options: opts, }) if err != nil && !errors.Is(err, fos.AnalyzeOSError) { - log.Logger.Debugf("Analysis error: %s", err) + ag.logger.Debug("Analysis error", log.Err(err)) return } result.Merge(ret) diff --git a/pkg/fanal/analyzer/config_analyzer.go b/pkg/fanal/analyzer/config_analyzer.go index 651b936f1ac7..c5682694a5c3 100644 --- a/pkg/fanal/analyzer/config_analyzer.go +++ b/pkg/fanal/analyzer/config_analyzer.go @@ -114,7 +114,7 @@ func (ag *ConfigAnalyzerGroup) AnalyzeImageConfig(ctx context.Context, targetOS r, err := a.Analyze(ctx, input) if err != nil { - log.Logger.Debugf("Image config analysis error: %s", err) + log.Debug("Image config analysis error", log.Err(err)) continue } diff --git a/pkg/fanal/analyzer/imgconf/secret/secret.go b/pkg/fanal/analyzer/imgconf/secret/secret.go index 3d153e5f1b1f..8d59726281b6 100644 --- a/pkg/fanal/analyzer/imgconf/secret/secret.go +++ b/pkg/fanal/analyzer/imgconf/secret/secret.go @@ -52,7 +52,7 @@ func (a *secretAnalyzer) Analyze(_ context.Context, input analyzer.ConfigAnalysi }) if len(result.Findings) == 0 { - log.Logger.Debug("No secrets found in container image config") + log.Debug("No secrets found in container image config") return nil, nil } diff --git a/pkg/fanal/analyzer/language/analyze.go b/pkg/fanal/analyzer/language/analyze.go index d31cbb2dfb0f..8a156bece710 100644 --- a/pkg/fanal/analyzer/language/analyze.go +++ b/pkg/fanal/analyzer/language/analyze.go @@ -84,7 +84,7 @@ func toApplication(fileType types.LangType, filePath, libFilePath string, r xio. // Calculate the file digest when one of `spdx` formats is selected d, err := calculateDigest(r) if err != nil { - log.Logger.Warnf("Unable to get checksum for %s: %s", filePath, err) + log.Warn("Unable to get checksum", log.String("file_path", filePath), log.Err(err)) } deps := make(map[string][]string) diff --git a/pkg/fanal/analyzer/language/dart/pub/pubspec.go b/pkg/fanal/analyzer/language/dart/pub/pubspec.go index ab924cafd191..1981e08a023d 100644 --- a/pkg/fanal/analyzer/language/dart/pub/pubspec.go +++ b/pkg/fanal/analyzer/language/dart/pub/pubspec.go @@ -35,11 +35,13 @@ const ( // pubSpecLockAnalyzer analyzes `pubspec.lock` type pubSpecLockAnalyzer struct { + logger *log.Logger parser godeptypes.Parser } func newPubSpecLockAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return pubSpecLockAnalyzer{ + logger: log.WithPrefix("pub"), parser: pub.NewParser(), }, nil } @@ -49,9 +51,9 @@ func (a pubSpecLockAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostA // get all DependsOn from cache dir // lib ID -> DependsOn names - allDependsOn, err := findDependsOn() + allDependsOn, err := a.findDependsOn() if err != nil { - log.Logger.Warnf("Unable to parse cache dir: %s", err) + a.logger.Warn("Unable to parse cache dir", log.Err(err)) } required := func(path string, d fs.DirEntry) bool { @@ -98,10 +100,11 @@ func (a pubSpecLockAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostA }, nil } -func findDependsOn() (map[string][]string, error) { +func (a pubSpecLockAnalyzer) findDependsOn() (map[string][]string, error) { dir := cacheDir() if !fsutils.DirExists(dir) { - log.Logger.Debugf("Cache dir (%s) not found. Need 'dart pub get' to fill dependency relationships", dir) + a.logger.Debug("Cache dir not found. Need 'dart pub get' to fill dependency relationships", + log.String("dir", dir)) return nil, nil } @@ -113,7 +116,7 @@ func findDependsOn() (map[string][]string, error) { if err := fsutils.WalkDir(os.DirFS(dir), ".", required, func(path string, d fs.DirEntry, r io.Reader) error { id, dependsOn, err := parsePubSpecYaml(r) if err != nil { - log.Logger.Debugf("Unable to parse %q: %s", path, err) + a.logger.Debug("Unable to parse pubspec.yaml", log.String("path", path), log.Err(err)) return nil } if id != "" { diff --git a/pkg/fanal/analyzer/language/dotnet/nuget/nuspec.go b/pkg/fanal/analyzer/language/dotnet/nuget/nuspec.go index b42d1cec38d2..6f25c91e565a 100644 --- a/pkg/fanal/analyzer/language/dotnet/nuget/nuspec.go +++ b/pkg/fanal/analyzer/language/dotnet/nuget/nuspec.go @@ -30,10 +30,13 @@ type License struct { } type nuspecParser struct { + logger *log.Logger packagesDir string // global packages folder - https: //learn.microsoft.com/en-us/nuget/consume-packages/managing-the-global-packages-and-cache-folders } func newNuspecParser() nuspecParser { + logger := log.WithPrefix("nuget") + // cf. https: //learn.microsoft.com/en-us/nuget/consume-packages/managing-the-global-packages-and-cache-folders packagesDir := os.Getenv("NUGET_PACKAGES") if packagesDir == "" { @@ -41,11 +44,12 @@ func newNuspecParser() nuspecParser { } if !fsutils.DirExists(packagesDir) { - log.Logger.Debugf("The nuget packages directory couldn't be found. License search disabled") + logger.Debug("The nuget packages directory couldn't be found. License search disabled") return nuspecParser{} } return nuspecParser{ + logger: logger, packagesDir: packagesDir, } } diff --git a/pkg/fanal/analyzer/language/golang/mod/mod.go b/pkg/fanal/analyzer/language/golang/mod/mod.go index 5aa0ae2293fe..cc9b1b439a95 100644 --- a/pkg/fanal/analyzer/language/golang/mod/mod.go +++ b/pkg/fanal/analyzer/language/golang/mod/mod.go @@ -52,6 +52,8 @@ type gomodAnalyzer struct { leafModParser godeptypes.Parser licenseClassifierConfidenceLevel float64 + + logger *log.Logger } func newGoModAnalyzer(opt analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { @@ -60,6 +62,7 @@ func newGoModAnalyzer(opt analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, erro sumParser: sum.NewParser(), leafModParser: mod.NewParser(false), licenseClassifierConfidenceLevel: opt.LicenseScannerOption.ClassifierConfidenceLevel, + logger: log.WithPrefix("golang"), }, nil } @@ -97,7 +100,7 @@ func (a *gomodAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalys } if err = a.fillAdditionalData(apps); err != nil { - log.Logger.Warnf("Unable to collect additional info: %s", err) + a.logger.Warn("Unable to collect additional info", log.Err(err)) } return &analyzer.AnalysisResult{ @@ -128,7 +131,8 @@ func (a *gomodAnalyzer) fillAdditionalData(apps []types.Application) error { // $GOPATH/pkg/mod modPath := filepath.Join(gopath, "pkg", "mod") if !fsutils.DirExists(modPath) { - log.Logger.Debugf("GOPATH (%s) not found. Need 'go mod download' to fill licenses and dependency relationships", modPath) + a.logger.Debug("GOPATH not found. Need 'go mod download' to fill licenses and dependency relationships", + log.String("GOPATH", modPath)) return nil } @@ -185,7 +189,8 @@ func (a *gomodAnalyzer) collectDeps(modDir, pkgID string) (godeptypes.Dependency modPath := filepath.Join(modDir, "go.mod") f, err := os.Open(modPath) if errors.Is(err, fs.ErrNotExist) { - log.Logger.Debugf("Unable to identify dependencies of %s as it doesn't support Go modules", pkgID) + a.logger.Debug("Unable to identify dependencies as it doesn't support Go modules", + log.String("module", pkgID)) return godeptypes.Dependency{}, nil } else if err != nil { return godeptypes.Dependency{}, xerrors.Errorf("file open error: %w", err) diff --git a/pkg/fanal/analyzer/language/java/gradle/lockfile.go b/pkg/fanal/analyzer/language/java/gradle/lockfile.go index 5dddb0b49c3c..2426722c31ca 100644 --- a/pkg/fanal/analyzer/language/java/gradle/lockfile.go +++ b/pkg/fanal/analyzer/language/java/gradle/lockfile.go @@ -32,19 +32,21 @@ const ( // gradleLockAnalyzer analyzes '*gradle.lockfile' type gradleLockAnalyzer struct { + logger *log.Logger parser godeptypes.Parser } func newGradleLockAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &gradleLockAnalyzer{ + logger: log.WithPrefix("gradle"), parser: lockfile.NewParser(), }, nil } func (a gradleLockAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysisInput) (*analyzer.AnalysisResult, error) { - poms, err := parsePoms() + poms, err := a.parsePoms() if err != nil { - log.Logger.Warnf("Unable to get licenses and dependsOn: %s", err) + a.logger.Warn("Unable to get licenses and dependencies", log.Err(err)) } required := func(path string, d fs.DirEntry) bool { diff --git a/pkg/fanal/analyzer/language/java/gradle/pom.go b/pkg/fanal/analyzer/language/java/gradle/pom.go index 638b5c9fd61b..cf24e4716054 100644 --- a/pkg/fanal/analyzer/language/java/gradle/pom.go +++ b/pkg/fanal/analyzer/language/java/gradle/pom.go @@ -12,7 +12,7 @@ import ( "golang.org/x/net/html/charset" "golang.org/x/xerrors" - "github.com/aquasecurity/trivy/pkg/fanal/log" + "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/utils/fsutils" ) @@ -65,8 +65,8 @@ func (props *Properties) UnmarshalXML(d *xml.Decoder, _ xml.StartElement) error return nil } -func parsePoms() (map[string]pomXML, error) { - cacheDir := detectCacheDir() +func (a gradleLockAnalyzer) parsePoms() (map[string]pomXML, error) { + cacheDir := a.detectCacheDir() // Cache dir is not found if cacheDir == "" { return nil, nil @@ -80,7 +80,7 @@ func parsePoms() (map[string]pomXML, error) { err := fsutils.WalkDir(os.DirFS(cacheDir), ".", required, func(path string, _ fs.DirEntry, r io.Reader) error { pom, err := parsePom(r, path) if err != nil { - log.Logger.Debugf("Unable to parse %q: %s", path, err) + a.logger.Debug("Unable to parse pom", log.String("file_path", path), log.Err(err)) return nil } @@ -146,7 +146,7 @@ func (pom *pomXML) resolveDependencyVersions() error { return nil } -func detectCacheDir() string { +func (a gradleLockAnalyzer) detectCacheDir() string { // https://docs.gradle.org/current/userguide/directory_layout.html dir := os.Getenv("GRADLE_USER_HOME") if dir == "" { @@ -159,7 +159,7 @@ func detectCacheDir() string { dir = filepath.Join(dir, "caches") if !fsutils.DirExists(dir) { - log.Logger.Debug("Unable to get licenses and dependsOn. Gradle cache dir doesn't exist.") + a.logger.Debug("Unable to get licenses and dependencies. Gradle cache dir doesn't exist.") return "" } return dir diff --git a/pkg/fanal/analyzer/language/nodejs/license/license.go b/pkg/fanal/analyzer/language/nodejs/license/license.go index 7889a2b4e9a8..0a797c558e56 100644 --- a/pkg/fanal/analyzer/language/nodejs/license/license.go +++ b/pkg/fanal/analyzer/language/nodejs/license/license.go @@ -17,12 +17,14 @@ import ( ) type License struct { + logger *log.Logger parser *packagejson.Parser classifierConfidenceLevel float64 } func NewLicense(classifierConfidenceLevel float64) *License { return &License{ + logger: log.WithPrefix("npm"), parser: packagejson.NewParser(), classifierConfidenceLevel: classifierConfidenceLevel, } @@ -42,7 +44,8 @@ func (l *License) Traverse(fsys fs.FS, root string) (map[string][]string, error) return nil } - log.Logger.Debugf("License names are missing in %q, an attempt to find them in the %q file", pkgJSONPath, licenseFileName) + l.logger.Debug("License names are missing, an attempt to find them in the license file", + log.String("file", pkgJSONPath), log.String("license_file", licenseFileName)) licenseFilePath := path.Join(path.Dir(pkgJSONPath), licenseFileName) if findings, err := classifyLicense(licenseFilePath, l.classifierConfidenceLevel, fsys); err != nil { @@ -51,7 +54,8 @@ func (l *License) Traverse(fsys fs.FS, root string) (map[string][]string, error) // License found licenses[pkg.ID] = findings.Names() } else { - log.Logger.Debugf("The license file %q was not found or the license could not be classified", licenseFilePath) + l.logger.Debug("The license file was not found or the license could not be classified", + log.String("license_file", licenseFilePath)) } return nil } diff --git a/pkg/fanal/analyzer/language/nodejs/npm/npm.go b/pkg/fanal/analyzer/language/nodejs/npm/npm.go index c5dd5d26eed0..44123eade970 100644 --- a/pkg/fanal/analyzer/language/nodejs/npm/npm.go +++ b/pkg/fanal/analyzer/language/nodejs/npm/npm.go @@ -32,12 +32,14 @@ const ( ) type npmLibraryAnalyzer struct { + logger *log.Logger lockParser godeptypes.Parser packageParser *packagejson.Parser } func newNpmLibraryAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &npmLibraryAnalyzer{ + logger: log.WithPrefix("npm"), lockParser: npm.NewParser(), packageParser: packagejson.NewParser(), }, nil @@ -54,7 +56,7 @@ func (a npmLibraryAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAn // Find all licenses from package.json files under node_modules dirs licenses, err := a.findLicenses(input.FS, filePath) if err != nil { - log.Logger.Errorf("Unable to collect licenses: %s", err) + a.logger.Error("Unable to collect licenses", log.Err(err)) licenses = make(map[string]string) } @@ -127,7 +129,8 @@ func (a npmLibraryAnalyzer) findLicenses(fsys fs.FS, lockPath string) (map[strin dir := path.Dir(lockPath) root := path.Join(dir, "node_modules") if _, err := fs.Stat(fsys, root); errors.Is(err, fs.ErrNotExist) { - log.Logger.Infof(`To collect the license information of packages in %q, "npm install" needs to be performed beforehand`, lockPath) + a.logger.Info(`To collect the license information of packages, "npm install" needs to be performed beforehand`, + log.String("dir", root)) return nil, nil } diff --git a/pkg/fanal/analyzer/language/nodejs/npm/npm_test.go b/pkg/fanal/analyzer/language/nodejs/npm/npm_test.go index 7635e0266729..9c1cc51a55d8 100644 --- a/pkg/fanal/analyzer/language/nodejs/npm/npm_test.go +++ b/pkg/fanal/analyzer/language/nodejs/npm/npm_test.go @@ -15,7 +15,7 @@ import ( ) func TestMain(m *testing.M) { - _ = log.InitLogger(false, true) + log.InitLogger(false, true) os.Exit(m.Run()) } diff --git a/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go b/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go index 1cbd6b4896aa..3d654662078b 100644 --- a/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go +++ b/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go @@ -42,6 +42,7 @@ const version = 2 var fragmentRegexp = regexp.MustCompile(`(\S+):(@?.*?)(@(.*?)|)$`) type yarnAnalyzer struct { + logger *log.Logger packageJsonParser *packagejson.Parser lockParser godeptypes.Parser comparer npm.Comparer @@ -50,6 +51,7 @@ type yarnAnalyzer struct { func newYarnAnalyzer(opt analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &yarnAnalyzer{ + logger: log.WithPrefix("yarn"), packageJsonParser: packagejson.NewParser(), lockParser: yarn.NewParser(), comparer: npm.Comparer{}, @@ -75,12 +77,13 @@ func (a yarnAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysis licenses, err := a.traverseLicenses(input.FS, filePath) if err != nil { - log.Logger.Debugf("Unable to traverse licenses: %s", err) + a.logger.Debug("Unable to traverse licenses", log.Err(err)) } // Parse package.json alongside yarn.lock to find direct deps and mark dev deps if err = a.analyzeDependencies(input.FS, path.Dir(filePath), app); err != nil { - log.Logger.Warnf("Unable to parse %q to remove dev dependencies: %s", path.Join(path.Dir(filePath), types.NpmPkg), err) + a.logger.Warn("Unable to parse package.json to remove dev dependencies", + log.String("file_path", path.Join(path.Dir(filePath), types.NpmPkg)), log.Err(err)) } // Fill licenses @@ -156,7 +159,7 @@ func (a yarnAnalyzer) analyzeDependencies(fsys fs.FS, dir string, app *types.App packageJsonPath := path.Join(dir, types.NpmPkg) directDeps, directDevDeps, err := a.parsePackageJsonDependencies(fsys, packageJsonPath) if errors.Is(err, fs.ErrNotExist) { - log.Logger.Debugf("Yarn: %s not found", packageJsonPath) + a.logger.Debug("package.json not found", log.String("path", packageJsonPath)) return nil } else if err != nil { return xerrors.Errorf("unable to parse %s: %w", dir, err) diff --git a/pkg/fanal/analyzer/language/php/composer/composer.go b/pkg/fanal/analyzer/language/php/composer/composer.go index d0244514a0e7..6fffecf05a50 100644 --- a/pkg/fanal/analyzer/language/php/composer/composer.go +++ b/pkg/fanal/analyzer/language/php/composer/composer.go @@ -62,7 +62,8 @@ func (a composerAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnal // Parse composer.json alongside composer.lock to identify the direct dependencies if err = a.mergeComposerJson(input.FS, filepath.Dir(path), app); err != nil { - log.Logger.Warnf("Unable to parse %q to identify direct dependencies: %s", filepath.Join(filepath.Dir(path), types.ComposerJson), err) + log.Warn("Unable to parse composer.json to identify direct dependencies", + log.String("path", filepath.Join(filepath.Dir(path), types.ComposerJson)), log.Err(err)) } sort.Sort(app.Libraries) apps = append(apps, *app) @@ -109,7 +110,7 @@ func (a composerAnalyzer) mergeComposerJson(fsys fs.FS, dir string, app *types.A p, err := a.parseComposerJson(fsys, path) if errors.Is(err, fs.ErrNotExist) { // Assume all the packages are direct dependencies as it cannot identify them from composer.lock - log.Logger.Debugf("Unable to determine the direct dependencies: %s not found", path) + log.Debug("Unable to determine the direct dependencies, composer.json not found", log.String("path", path)) return nil } else if err != nil { return xerrors.Errorf("unable to parse %s: %w", path, err) diff --git a/pkg/fanal/analyzer/language/python/packaging/packaging.go b/pkg/fanal/analyzer/language/python/packaging/packaging.go index c45a4f5aef0f..6f2c508b5404 100644 --- a/pkg/fanal/analyzer/language/python/packaging/packaging.go +++ b/pkg/fanal/analyzer/language/python/packaging/packaging.go @@ -34,6 +34,7 @@ const version = 1 func newPackagingAnalyzer(opt analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &packagingAnalyzer{ + logger: log.WithPrefix("python"), pkgParser: packaging.NewParser(), licenseClassifierConfidenceLevel: opt.LicenseScannerOption.ClassifierConfidenceLevel, }, nil @@ -54,6 +55,7 @@ var ( ) type packagingAnalyzer struct { + logger *log.Logger pkgParser godeptypes.Parser licenseClassifierConfidenceLevel float64 } @@ -99,7 +101,7 @@ func (a packagingAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAna } if err := a.fillAdditionalData(input.FS, app); err != nil { - log.Logger.Warnf("Unable to collect additional info: %s", err) + a.logger.Warn("Unable to collect additional info", log.Err(err)) } apps = append(apps, *app) diff --git a/pkg/fanal/analyzer/language/python/poetry/poetry.go b/pkg/fanal/analyzer/language/python/poetry/poetry.go index 90897e8f12ba..8a68a61439c3 100644 --- a/pkg/fanal/analyzer/language/python/poetry/poetry.go +++ b/pkg/fanal/analyzer/language/python/poetry/poetry.go @@ -27,12 +27,14 @@ func init() { const version = 1 type poetryAnalyzer struct { + logger *log.Logger pyprojectParser *pyproject.Parser lockParser godeptypes.Parser } func newPoetryAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &poetryAnalyzer{ + logger: log.WithPrefix("poetry"), pyprojectParser: pyproject.NewParser(), lockParser: poetry.NewParser(), }, nil @@ -56,7 +58,8 @@ func (a poetryAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalys // Parse pyproject.toml alongside poetry.lock to identify the direct dependencies if err = a.mergePyProject(input.FS, filepath.Dir(path), app); err != nil { - log.Logger.Warnf("Unable to parse %q to identify direct dependencies: %s", filepath.Join(filepath.Dir(path), types.PyProject), err) + a.logger.Warn("Unable to parse pyproject.toml to identify direct dependencies", + log.String("path", filepath.Join(filepath.Dir(path), types.PyProject)), log.Err(err)) } apps = append(apps, *app) @@ -94,7 +97,7 @@ func (a poetryAnalyzer) mergePyProject(fsys fs.FS, dir string, app *types.Applic p, err := a.parsePyProject(fsys, path) if errors.Is(err, fs.ErrNotExist) { // Assume all the packages are direct dependencies as it cannot identify them from poetry.lock - log.Logger.Debugf("Poetry: %s not found", path) + a.logger.Debug("pyproject.toml not found", log.String("path", path)) return nil } else if err != nil { return xerrors.Errorf("unable to parse %s: %w", path, err) diff --git a/pkg/fanal/analyzer/language/rust/cargo/cargo.go b/pkg/fanal/analyzer/language/rust/cargo/cargo.go index f487ba0c46e8..ba0654a4942f 100644 --- a/pkg/fanal/analyzer/language/rust/cargo/cargo.go +++ b/pkg/fanal/analyzer/language/rust/cargo/cargo.go @@ -41,12 +41,14 @@ var requiredFiles = []string{ } type cargoAnalyzer struct { + logger *log.Logger lockParser godeptypes.Parser comparer compare.GenericComparer } func newCargoAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &cargoAnalyzer{ + logger: log.WithPrefix("cargo"), lockParser: cargo.NewParser(), comparer: compare.GenericComparer{}, }, nil @@ -70,7 +72,8 @@ func (a cargoAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysi // Parse Cargo.toml alongside Cargo.lock to identify the direct dependencies if err = a.removeDevDependencies(input.FS, path.Dir(filePath), app); err != nil { - log.Logger.Warnf("Unable to parse %q to identify direct dependencies: %s", path.Join(path.Dir(filePath), types.CargoToml), err) + a.logger.Warn("Unable to parse Cargo.toml q to identify direct dependencies", + log.String("path", path.Join(path.Dir(filePath), types.CargoToml)), log.Err(err)) } sort.Sort(app.Libraries) apps = append(apps, *app) @@ -107,7 +110,7 @@ func (a cargoAnalyzer) removeDevDependencies(fsys fs.FS, dir string, app *types. cargoTOMLPath := path.Join(dir, types.CargoToml) directDeps, err := a.parseRootCargoTOML(fsys, cargoTOMLPath) if errors.Is(err, fs.ErrNotExist) { - log.Logger.Debugf("Cargo: %s not found", cargoTOMLPath) + a.logger.Debug("Cargo.toml not found", log.String("path", cargoTOMLPath)) return nil } else if err != nil { return xerrors.Errorf("unable to parse %s: %w", cargoTOMLPath, err) @@ -128,7 +131,7 @@ func (a cargoAnalyzer) removeDevDependencies(fsys fs.FS, dir string, app *types. } if match, err := a.matchVersion(pkg.Version, constraint); err != nil { - log.Logger.Warnf("Unable to match Cargo version: package: %s, error: %s", pkg.ID, err) + a.logger.Warn("Unable to match Cargo version", log.String("package", pkg.ID), log.Err(err)) continue } else if match { // Mark as a direct dependency @@ -179,7 +182,7 @@ func (a cargoAnalyzer) parseRootCargoTOML(fsys fs.FS, filePath string) (map[stri memberPath := path.Join(path.Dir(filePath), member, types.CargoToml) memberDeps, _, err := parseCargoTOML(fsys, memberPath) if err != nil { - log.Logger.Warnf("Unable to parse %q: %s", memberPath, err) + a.logger.Warn("Unable to parse Cargo.toml", log.String("member_path", memberPath), log.Err(err)) continue } // Member dependencies shouldn't overwrite dependencies from root cargo.toml file diff --git a/pkg/fanal/analyzer/licensing/license.go b/pkg/fanal/analyzer/licensing/license.go index 3e3986d732f8..42872b1c8474 100644 --- a/pkg/fanal/analyzer/licensing/license.go +++ b/pkg/fanal/analyzer/licensing/license.go @@ -12,9 +12,9 @@ import ( "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/licensing" + "github.com/aquasecurity/trivy/pkg/log" xio "github.com/aquasecurity/trivy/pkg/x/io" ) @@ -37,18 +37,48 @@ var ( } acceptedExtensions = []string{ - ".asp", ".aspx", ".bas", ".bat", ".b", ".c", ".cue", ".cgi", ".cs", ".css", ".fish", ".html", ".h", ".ini", - ".java", ".js", ".jsx", ".markdown", ".md", ".py", ".php", ".pl", ".r", ".rb", ".sh", ".sql", ".ts", - ".tsx", ".txt", ".vue", ".zsh", + ".asp", + ".aspx", + ".bas", + ".bat", + ".b", + ".c", + ".cue", + ".cgi", + ".cs", + ".css", + ".fish", + ".html", + ".h", + ".ini", + ".java", + ".js", + ".jsx", + ".markdown", + ".md", + ".py", + ".php", + ".pl", + ".r", + ".rb", + ".sh", + ".sql", + ".ts", + ".tsx", + ".txt", + ".vue", + ".zsh", } acceptedFileNames = []string{ - "license", "licence", "copyright", + "license", + "licence", + "copyright", } ) func init() { - analyzer.RegisterAnalyzer(&licenseFileAnalyzer{}) + analyzer.RegisterAnalyzer(newLicenseFileAnalyzer()) } // licenseFileAnalyzer is an analyzer for file headers and license files @@ -56,8 +86,13 @@ type licenseFileAnalyzer struct { classifierConfidenceLevel float64 } -func (a licenseFileAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { - log.Logger.Debugf("License scanning: %s", input.FilePath) +func newLicenseFileAnalyzer() *licenseFileAnalyzer { + return &licenseFileAnalyzer{} +} + +func (a *licenseFileAnalyzer) Analyze(ctx context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { + ctx = log.WithContextPrefix(ctx, "license") + log.DebugContext(ctx, "License scanning", log.String("file_path", input.FilePath)) // need files to be text based, readable files readable, err := isHumanReadable(input.Content, input.Info.Size()) @@ -81,7 +116,7 @@ func (a *licenseFileAnalyzer) Init(opt analyzer.AnalyzerOptions) error { return nil } -func (a licenseFileAnalyzer) Required(filePath string, _ os.FileInfo) bool { +func (a *licenseFileAnalyzer) Required(filePath string, _ os.FileInfo) bool { for _, skipDir := range skipDirs { if strings.Contains(filePath, skipDir) { return false @@ -116,10 +151,10 @@ func isHumanReadable(content xio.ReadSeekerAt, fileSize int64) (bool, error) { return true, nil } -func (a licenseFileAnalyzer) Type() analyzer.Type { +func (a *licenseFileAnalyzer) Type() analyzer.Type { return analyzer.TypeLicenseFile } -func (a licenseFileAnalyzer) Version() int { +func (a *licenseFileAnalyzer) Version() int { return version } diff --git a/pkg/fanal/analyzer/licensing/license_test.go b/pkg/fanal/analyzer/licensing/license_test.go index fd04029873d9..292b6aea98be 100644 --- a/pkg/fanal/analyzer/licensing/license_test.go +++ b/pkg/fanal/analyzer/licensing/license_test.go @@ -53,7 +53,7 @@ func Test_licenseAnalyzer_Analyze(t *testing.T) { fi, err := f.Stat() require.NoError(t, err) - a := licenseFileAnalyzer{} + a := newLicenseFileAnalyzer() got, err := a.Analyze(context.TODO(), analyzer.AnalysisInput{ FilePath: tt.filePath, Content: f, diff --git a/pkg/fanal/analyzer/pkg/apk/apk.go b/pkg/fanal/analyzer/pkg/apk/apk.go index 5f3b82dbbe76..bb2007470b1b 100644 --- a/pkg/fanal/analyzer/pkg/apk/apk.go +++ b/pkg/fanal/analyzer/pkg/apk/apk.go @@ -23,7 +23,7 @@ import ( ) func init() { - analyzer.RegisterAnalyzer(&alpinePkgAnalyzer{}) + analyzer.RegisterAnalyzer(newAlpinePkgAnalyzer()) } const analyzerVersion = 2 @@ -32,9 +32,12 @@ var requiredFiles = []string{"lib/apk/db/installed"} type alpinePkgAnalyzer struct{} -func (a alpinePkgAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { +func newAlpinePkgAnalyzer() *alpinePkgAnalyzer { return &alpinePkgAnalyzer{} } + +func (a alpinePkgAnalyzer) Analyze(ctx context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { + ctx = log.WithContextPrefix(ctx, "apk") scanner := bufio.NewScanner(input.Content) - parsedPkgs, installedFiles := a.parseApkInfo(scanner) + parsedPkgs, installedFiles := a.parseApkInfo(ctx, scanner) return &analyzer.AnalysisResult{ PackageInfos: []types.PackageInfo{ @@ -47,7 +50,7 @@ func (a alpinePkgAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInp }, nil } -func (a alpinePkgAnalyzer) parseApkInfo(scanner *bufio.Scanner) ([]types.Package, []string) { +func (a alpinePkgAnalyzer) parseApkInfo(ctx context.Context, scanner *bufio.Scanner) ([]types.Package, []string) { var ( pkgs []types.Package pkg types.Package @@ -76,7 +79,8 @@ func (a alpinePkgAnalyzer) parseApkInfo(scanner *bufio.Scanner) ([]types.Package case "V:": version = line[2:] if !apkVersion.Valid(version) { - log.Logger.Warnf("Invalid Version Found : OS %s, Package %s, Version %s", "alpine", pkg.Name, version) + log.WarnContext(ctx, "Invalid version found", + log.String("name", pkg.Name), log.String("version", version)) continue } pkg.Version = version @@ -99,7 +103,7 @@ func (a alpinePkgAnalyzer) parseApkInfo(scanner *bufio.Scanner) ([]types.Package case "A:": pkg.Arch = line[2:] case "C:": - d := decodeChecksumLine(line) + d := a.decodeChecksumLine(ctx, line) if d != "" { pkg.Digest = d } @@ -223,9 +227,9 @@ func (a alpinePkgAnalyzer) Version() int { } // decodeChecksumLine decodes checksum line -func decodeChecksumLine(line string) digest.Digest { +func (a alpinePkgAnalyzer) decodeChecksumLine(ctx context.Context, line string) digest.Digest { if len(line) < 2 { - log.Logger.Debugf("Unable to decode checksum line of apk package: %s", line) + log.DebugContext(ctx, "Unable to decode checksum line of apk package", log.String("line", line)) return "" } // https://wiki.alpinelinux.org/wiki/Apk_spec#Package_Checksum_Field @@ -239,7 +243,7 @@ func decodeChecksumLine(line string) digest.Digest { decodedDigestString, err := base64.StdEncoding.DecodeString(d) if err != nil { - log.Logger.Debugf("unable to decode digest: %s", err) + log.DebugContext(ctx, "Unable to decode digest", log.Err(err)) return "" } h := hex.EncodeToString(decodedDigestString) diff --git a/pkg/fanal/analyzer/pkg/apk/apk_test.go b/pkg/fanal/analyzer/pkg/apk/apk_test.go index 948cd1775da0..08a5d302d324 100644 --- a/pkg/fanal/analyzer/pkg/apk/apk_test.go +++ b/pkg/fanal/analyzer/pkg/apk/apk_test.go @@ -2,6 +2,7 @@ package apk import ( "bufio" + "context" "os" "testing" @@ -51,9 +52,12 @@ var pkgs = []types.Package{ SrcName: "alpine-baselayout", SrcVersion: "3.0.3-r0", Licenses: []string{"GPL-2.0"}, - DependsOn: []string{"busybox@1.24.2-r9", "musl@1.1.14-r10"}, - Arch: "x86_64", - Digest: "sha1:a214896150411d72dd1fafdb32d1c6c4855cccfa", + DependsOn: []string{ + "busybox@1.24.2-r9", + "musl@1.1.14-r10", + }, + Arch: "x86_64", + Digest: "sha1:a214896150411d72dd1fafdb32d1c6c4855cccfa", InstalledFiles: []string{ "etc/hosts", "etc/sysctl.conf", @@ -121,9 +125,12 @@ var pkgs = []types.Package{ SrcName: "openssl", SrcVersion: "1.0.2h-r1", Licenses: []string{"openssl"}, - DependsOn: []string{"musl@1.1.14-r10", "zlib@1.2.8-r2"}, - Arch: "x86_64", - Digest: "sha1:65c860ff8f103b664f40ba849a3f5a51c69c8beb", + DependsOn: []string{ + "musl@1.1.14-r10", + "zlib@1.2.8-r2", + }, + Arch: "x86_64", + Digest: "sha1:65c860ff8f103b664f40ba849a3f5a51c69c8beb", InstalledFiles: []string{ "lib/libcrypto.so.1.0.0", "usr/bin/c_rehash", @@ -199,8 +206,12 @@ var pkgs = []types.Package{ Version: "1.1.14-r10", SrcName: "musl", SrcVersion: "1.1.14-r10", - Licenses: []string{"MIT", "BSD-3-Clause", "GPL-2.0"}, - Digest: "sha1:608aa1dd39eff7bc6615d3e5e33383750f8f5ecc", + Licenses: []string{ + "MIT", + "BSD-3-Clause", + "GPL-2.0", + }, + Digest: "sha1:608aa1dd39eff7bc6615d3e5e33383750f8f5ecc", DependsOn: []string{ "musl@1.1.14-r10", "scanelf@1.1.6-r0", @@ -288,8 +299,12 @@ var pkgs = []types.Package{ Version: "2.7.4-r0", SrcName: "ada", SrcVersion: "2.7.4-r0", - Licenses: []string{"Apache-2.0", "MIT", "MPL-2.0"}, - Digest: "sha1:593154f80c440685448e0f52479725d7bc9b678d", + Licenses: []string{ + "Apache-2.0", + "MIT", + "MPL-2.0", + }, + Digest: "sha1:593154f80c440685448e0f52479725d7bc9b678d", DependsOn: []string{ "musl@1.1.14-r10", }, @@ -431,7 +446,7 @@ func TestParseApkInfo(t *testing.T) { require.NoError(t, err) defer f.Close() scanner := bufio.NewScanner(f) - gotPkgs, gotFiles := a.parseApkInfo(scanner) + gotPkgs, gotFiles := a.parseApkInfo(context.Background(), scanner) assert.Equal(t, tt.wantPkgs, gotPkgs) assert.Equal(t, tt.wantFiles, gotFiles) diff --git a/pkg/fanal/analyzer/pkg/dpkg/dpkg.go b/pkg/fanal/analyzer/pkg/dpkg/dpkg.go index 74fd4d82ef7b..d73c905fd413 100644 --- a/pkg/fanal/analyzer/pkg/dpkg/dpkg.go +++ b/pkg/fanal/analyzer/pkg/dpkg/dpkg.go @@ -16,7 +16,6 @@ import ( debVersion "github.com/knqyf263/go-deb-version" "github.com/samber/lo" - "go.uber.org/zap" "golang.org/x/exp/slices" "golang.org/x/xerrors" @@ -31,10 +30,14 @@ func init() { analyzer.RegisterPostAnalyzer(analyzer.TypeDpkg, newDpkgAnalyzer) } -type dpkgAnalyzer struct{} +type dpkgAnalyzer struct { + logger *log.Logger +} func newDpkgAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { - return &dpkgAnalyzer{}, nil + return &dpkgAnalyzer{ + logger: log.WithPrefix("dpkg"), + }, nil } const ( @@ -58,7 +61,7 @@ func (a dpkgAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysis // parse `available` file to get digest for packages digests, err := a.parseDpkgAvailable(input.FS) if err != nil { - log.Logger.Debugf("Unable to parse %q file: %s", availableFile, err) + a.logger.Debug("Unable to parse the available file", log.String("file", availableFile), log.Err(err)) } required := func(path string, d fs.DirEntry) bool { @@ -155,7 +158,7 @@ func (a dpkgAnalyzer) parseDpkgAvailable(fsys fs.FS) (map[string]digest.Digest, for scanner.Scan() { header, err := scanner.Header() if !errors.Is(err, io.EOF) && err != nil { - log.Logger.Warnw("Parse error", zap.String("file", availableFile), zap.Error(err)) + a.logger.Warn("Parse error", log.String("file", availableFile), log.Err(err)) continue } name, version, checksum := header.Get("Package"), header.Get("Version"), header.Get("SHA256") @@ -181,7 +184,7 @@ func (a dpkgAnalyzer) parseDpkgStatus(filePath string, r io.Reader, digests map[ for scanner.Scan() { header, err := scanner.Header() if !errors.Is(err, io.EOF) && err != nil { - log.Logger.Warnw("Parse error", zap.String("file", filePath), zap.Error(err)) + a.logger.Warn("Parse error", log.String("file", filePath), log.Err(err)) continue } @@ -251,8 +254,8 @@ func (a dpkgAnalyzer) parseDpkgPkg(header textproto.MIMEHeader) *types.Package { } if v, err := debVersion.NewVersion(pkg.Version); err != nil { - log.Logger.Warnw("Invalid version", zap.String("OS", "debian"), - zap.String("package", pkg.Name), zap.String("version", pkg.Version)) + a.logger.Warn("Invalid version", log.String("OS", "debian"), + log.String("package", pkg.Name), log.String("version", pkg.Version)) return nil } else { pkg.ID = a.pkgID(pkg.Name, pkg.Version) @@ -262,8 +265,8 @@ func (a dpkgAnalyzer) parseDpkgPkg(header textproto.MIMEHeader) *types.Package { } if v, err := debVersion.NewVersion(pkg.SrcVersion); err != nil { - log.Logger.Warnw("Invalid source version", zap.String("OS", "debian"), - zap.String("package", pkg.Name), zap.String("version", pkg.SrcVersion)) + a.logger.Warn("Invalid source version", log.String("OS", "debian"), + log.String("package", pkg.Name), log.String("version", pkg.SrcVersion)) return nil } else { pkg.SrcVersion = v.Version() diff --git a/pkg/fanal/analyzer/pkg/rpm/rpm.go b/pkg/fanal/analyzer/pkg/rpm/rpm.go index f3a52286e578..70d5b9dcd26a 100644 --- a/pkg/fanal/analyzer/pkg/rpm/rpm.go +++ b/pkg/fanal/analyzer/pkg/rpm/rpm.go @@ -16,13 +16,13 @@ import ( "github.com/aquasecurity/trivy/pkg/digest" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/utils" + "github.com/aquasecurity/trivy/pkg/log" ) func init() { - analyzer.RegisterAnalyzer(&rpmPkgAnalyzer{}) + analyzer.RegisterAnalyzer(newRPMPkgAnalyzer()) } const version = 3 @@ -63,12 +63,17 @@ var osVendors = []string{ type rpmPkgAnalyzer struct{} +func newRPMPkgAnalyzer() *rpmPkgAnalyzer { + return &rpmPkgAnalyzer{} +} + type RPMDB interface { ListPackages() ([]*rpmdb.PackageInfo, error) } -func (a rpmPkgAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { - parsedPkgs, installedFiles, err := a.parsePkgInfo(input.Content) +func (a rpmPkgAnalyzer) Analyze(ctx context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { + ctx = log.WithContextPrefix(ctx, "rpm") + parsedPkgs, installedFiles, err := a.parsePkgInfo(ctx, input.Content) if err != nil { return nil, xerrors.Errorf("failed to parse rpmdb: %w", err) } @@ -84,7 +89,7 @@ func (a rpmPkgAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) }, nil } -func (a rpmPkgAnalyzer) parsePkgInfo(rc io.Reader) (types.Packages, []string, error) { +func (a rpmPkgAnalyzer) parsePkgInfo(ctx context.Context, rc io.Reader) (types.Packages, []string, error) { filePath, err := writeToTempFile(rc) if err != nil { return nil, nil, xerrors.Errorf("temp file error: %w", err) @@ -99,10 +104,10 @@ func (a rpmPkgAnalyzer) parsePkgInfo(rc io.Reader) (types.Packages, []string, er } defer db.Close() - return a.listPkgs(db) + return a.listPkgs(ctx, db) } -func (a rpmPkgAnalyzer) listPkgs(db RPMDB) (types.Packages, []string, error) { +func (a rpmPkgAnalyzer) listPkgs(ctx context.Context, db RPMDB) (types.Packages, []string, error) { // equivalent: // new version: rpm -qa --qf "%{NAME} %{EPOCHNUM} %{VERSION} %{RELEASE} %{SOURCERPM} %{ARCH}\n" // old version: rpm -qa --qf "%{NAME} %{EPOCH} %{VERSION} %{RELEASE} %{SOURCERPM} %{ARCH}\n" @@ -126,7 +131,7 @@ func (a rpmPkgAnalyzer) listPkgs(db RPMDB) (types.Packages, []string, error) { // source epoch is not included in SOURCERPM srcName, srcVer, srcRel, err = splitFileName(pkg.SourceRpm) if err != nil { - log.Logger.Debugf("Invalid Source RPM Found: %s", pkg.SourceRpm) + log.DebugContext(ctx, "Invalid Source RPM Found", log.String("sourcerpm", pkg.SourceRpm)) } } diff --git a/pkg/fanal/analyzer/pkg/rpm/rpm_test.go b/pkg/fanal/analyzer/pkg/rpm/rpm_test.go index 2ef2a0b46219..7c1ec8ca77c8 100644 --- a/pkg/fanal/analyzer/pkg/rpm/rpm_test.go +++ b/pkg/fanal/analyzer/pkg/rpm/rpm_test.go @@ -125,13 +125,20 @@ func Test_rpmPkgAnalyzer_listPkgs(t *testing.T) { mock: mock{ packages: []*rpmdb.PackageInfo{ { - Name: "glibc", - Version: "2.17", - Release: "307.el7.1", - Arch: "x86_64", - SourceRpm: "glibc-2.17-317.el7.src.rpm", - DirNames: []string{"/etc", "/lib64"}, - DirIndexes: []int32{0, 0, 1}, + Name: "glibc", + Version: "2.17", + Release: "307.el7.1", + Arch: "x86_64", + SourceRpm: "glibc-2.17-317.el7.src.rpm", + DirNames: []string{ + "/etc", + "/lib64", + }, + DirIndexes: []int32{ + 0, + 0, + 1, + }, BaseNames: []string{ "ld.so.conf", "rpc", @@ -182,7 +189,13 @@ func Test_rpmPkgAnalyzer_listPkgs(t *testing.T) { "/usr/lib/.build-id/aa/", "/usr/share/man/man1/", }, - DirIndexes: []int32{0, 1, 2, 3, 4}, + DirIndexes: []int32{ + 0, + 1, + 2, + 3, + 4, + }, BaseNames: []string{ "curl", ".build-id", @@ -259,8 +272,8 @@ func Test_rpmPkgAnalyzer_listPkgs(t *testing.T) { err: tt.mock.err, } - a := rpmPkgAnalyzer{} - gotPkgs, gotFiles, err := a.listPkgs(m) + a := newRPMPkgAnalyzer() + gotPkgs, gotFiles, err := a.listPkgs(context.Background(), m) if tt.wantErr != "" { assert.ErrorContains(t, err, tt.wantErr) return diff --git a/pkg/fanal/analyzer/secret/secret_test.go b/pkg/fanal/analyzer/secret/secret_test.go index 4f499c4d59c3..7cba1d137e8f 100644 --- a/pkg/fanal/analyzer/secret/secret_test.go +++ b/pkg/fanal/analyzer/secret/secret_test.go @@ -111,7 +111,10 @@ func TestSecretAnalyzer(t *testing.T) { Secrets: []types.Secret{ { FilePath: "testdata/secret.txt", - Findings: []types.SecretFinding{wantFinding1, wantFinding2}, + Findings: []types.SecretFinding{ + wantFinding1, + wantFinding2, + }, }, }, }, @@ -124,7 +127,10 @@ func TestSecretAnalyzer(t *testing.T) { Secrets: []types.Secret{ { FilePath: "/testdata/secret.txt", - Findings: []types.SecretFinding{wantFinding1, wantFinding2}, + Findings: []types.SecretFinding{ + wantFinding1, + wantFinding2, + }, }, }, }, @@ -151,7 +157,7 @@ func TestSecretAnalyzer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - a := &secret.SecretAnalyzer{} + a := secret.SecretAnalyzer{} err := a.Init(analyzer.AnalyzerOptions{ SecretScannerOption: analyzer.SecretScannerOption{ConfigPath: tt.configPath}, }) @@ -161,7 +167,7 @@ func TestSecretAnalyzer(t *testing.T) { fi, err := content.Stat() require.NoError(t, err) - got, err := a.Analyze(context.TODO(), analyzer.AnalysisInput{ + got, err := a.Analyze(context.Background(), analyzer.AnalysisInput{ FilePath: tt.filePath, Dir: tt.dir, Content: content, diff --git a/pkg/fanal/applier/docker.go b/pkg/fanal/applier/docker.go index abcc1ce51958..0e9781edbca6 100644 --- a/pkg/fanal/applier/docker.go +++ b/pkg/fanal/applier/docker.go @@ -253,7 +253,7 @@ func ApplyLayers(layers []ftypes.BlobInfo) ftypes.ArtifactDetail { func newPURL(pkgType ftypes.TargetType, metadata types.Metadata, pkg ftypes.Package) *packageurl.PackageURL { p, err := purl.New(pkgType, metadata, pkg) if err != nil { - log.Logger.Errorf("Failed to create PackageURL: %s", err) + log.Error("Failed to create PackageURL", log.Err(err)) return nil } return p.Unwrap() diff --git a/pkg/fanal/artifact/image/image.go b/pkg/fanal/artifact/image/image.go index 782d13a86097..e22f8d7bb0a2 100644 --- a/pkg/fanal/artifact/image/image.go +++ b/pkg/fanal/artifact/image/image.go @@ -19,14 +19,15 @@ import ( "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/handler" "github.com/aquasecurity/trivy/pkg/fanal/image" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/walker" + "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/parallel" "github.com/aquasecurity/trivy/pkg/semaphore" ) type Artifact struct { + logger *log.Logger image types.Image cache cache.ArtifactCache walker walker.LayerTar @@ -60,6 +61,7 @@ func NewArtifact(img types.Image, c cache.ArtifactCache, opt artifact.Option) (a } return Artifact{ + logger: log.WithPrefix("image"), image: img, cache: c, walker: walker.NewLayerTar(opt.SkipFiles, opt.SkipDirs), @@ -76,6 +78,7 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error) if err != nil { return types.ArtifactReference{}, xerrors.Errorf("unable to get the image ID: %w", err) } + a.logger.Debug("Detected image ID", log.String("image_id", imageID)) configFile, err := a.image.ConfigFile() if err != nil { @@ -83,10 +86,7 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error) } diffIDs := a.diffIDs(configFile) - - // Debug - log.Logger.Debugf("Image ID: %s", imageID) - log.Logger.Debugf("Diff IDs: %v", diffIDs) + a.logger.Debug("Detected diff ID", log.Any("diff_ids", diffIDs)) // Try retrieving a remote SBOM document if res, err := a.retrieveRemoteSBOM(ctx); err == nil { @@ -99,7 +99,7 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error) // Try to detect base layers. baseDiffIDs := a.guessBaseLayers(diffIDs, configFile) - log.Logger.Debugf("Base Layers: %v", baseDiffIDs) + a.logger.Debug("Detected base layers", log.Any("diff_ids", baseDiffIDs)) // Convert image ID and layer IDs to cache keys imageKey, layerKeys, err := a.calcCacheKeys(imageID, diffIDs) @@ -117,7 +117,7 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error) missingImageKey := imageKey if missingImage { - log.Logger.Debugf("Missing image ID in cache: %s", imageID) + a.logger.Debug("Missing image ID in cache", log.String("image_id", imageID)) } else { missingImageKey = "" } @@ -239,7 +239,7 @@ func (a Artifact) inspect(ctx context.Context, missingImage string, layerKeys, b } func (a Artifact) inspectLayer(ctx context.Context, layerInfo LayerInfo, disabled []analyzer.Type) (types.BlobInfo, error) { - log.Logger.Debugf("Missing diff ID in cache: %s", layerInfo.DiffID) + a.logger.Debug("Missing diff ID in cache", log.String("diff_id", layerInfo.DiffID)) layerDigest, rc, err := a.uncompressedLayer(layerInfo.DiffID) if err != nil { diff --git a/pkg/fanal/artifact/image/remote_sbom.go b/pkg/fanal/artifact/image/remote_sbom.go index f0c9ae26bfeb..9bb609e64c3b 100644 --- a/pkg/fanal/artifact/image/remote_sbom.go +++ b/pkg/fanal/artifact/image/remote_sbom.go @@ -15,8 +15,8 @@ import ( sbomatt "github.com/aquasecurity/trivy/pkg/attestation/sbom" "github.com/aquasecurity/trivy/pkg/fanal/artifact/sbom" - "github.com/aquasecurity/trivy/pkg/fanal/log" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/oci" "github.com/aquasecurity/trivy/pkg/remote" "github.com/aquasecurity/trivy/pkg/types" @@ -42,7 +42,7 @@ func (a Artifact) retrieveRemoteSBOM(ctx context.Context) (ftypes.ArtifactRefere ref, err := inspect(ctx) if errors.Is(err, errNoSBOMFound) { // Try the next SBOM source - log.Logger.Debugf("No SBOM found in the source: %s", sbomSource) + a.logger.Debug("No SBOM found in the source", log.String("source", sbomSource)) continue } else if err != nil { return ftypes.ArtifactReference{}, xerrors.Errorf("SBOM searching error: %w", err) @@ -74,7 +74,8 @@ func (a Artifact) inspectOCIReferrerSBOM(ctx context.Context) (ftypes.ArtifactRe } res, err := a.parseReferrer(ctx, digest.Context().String(), m) if err != nil { - log.Logger.Warnf("Error with SBOM via OCI referrers (%s): %s", m.Digest.String(), err) + a.logger.Warn("Error with SBOM via OCI referrers", + log.String("digest", m.Digest.String()), log.Err(err)) continue } return res, nil @@ -110,7 +111,7 @@ func (a Artifact) parseReferrer(ctx context.Context, repo string, desc v1.Descri } // Found SBOM - log.Logger.Infof("Found SBOM (%s) in the OCI referrers", res.Type) + a.logger.Info("Found SBOM in the OCI referrers", log.String("type", string(res.Type))) return res, nil } @@ -151,7 +152,8 @@ func (a Artifact) inspectRekorSBOMAttestation(ctx context.Context) (ftypes.Artif } // Found SBOM - log.Logger.Infof("Found SBOM (%s) in Rekor (%s)", res.Type, a.artifactOption.RekorURL) + a.logger.Info("Found SBOM in Rekor", log.String("type", string(res.Type)), + log.String("url", a.artifactOption.RekorURL)) return res, nil } diff --git a/pkg/fanal/artifact/image/remote_sbom_test.go b/pkg/fanal/artifact/image/remote_sbom_test.go index c9255c2057bd..ef777fe5c641 100644 --- a/pkg/fanal/artifact/image/remote_sbom_test.go +++ b/pkg/fanal/artifact/image/remote_sbom_test.go @@ -141,7 +141,7 @@ func TestArtifact_InspectRekorAttestation(t *testing.T) { }, } - require.NoError(t, log.InitLogger(false, true)) + log.InitLogger(false, true) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ts := rekortest.NewServer(t) diff --git a/pkg/fanal/artifact/local/fs.go b/pkg/fanal/artifact/local/fs.go index 8d7409cdfae3..ff72d01a72fd 100644 --- a/pkg/fanal/artifact/local/fs.go +++ b/pkg/fanal/artifact/local/fs.go @@ -61,7 +61,7 @@ func buildPathsToSkip(base string, paths []string) []string { var relativePaths []string absBase, err := filepath.Abs(base) if err != nil { - log.Logger.Warnf("Failed to get an absolute path of %s: %s", base, err) + log.Warn("Failed to get an absolute path", log.String("base", base), log.Err(err)) return nil } for _, path := range paths { @@ -84,12 +84,12 @@ func buildPathsToSkip(base string, paths []string) []string { absSkipPath, err := filepath.Abs(path) if err != nil { - log.Logger.Warnf("Failed to get an absolute path of %s: %s", base, err) + log.Warn("Failed to get an absolute path", log.String("base", base), log.Err(err)) continue } rel, err := filepath.Rel(absBase, absSkipPath) if err != nil { - log.Logger.Warnf("Failed to get a relative path from %s to %s: %s", base, path, err) + log.Warn("Failed to get an relative path", log.String("base", base), log.Err(err)) continue } diff --git a/pkg/fanal/artifact/sbom/sbom.go b/pkg/fanal/artifact/sbom/sbom.go index 2be0ce31e6a4..90eed8c89e1b 100644 --- a/pkg/fanal/artifact/sbom/sbom.go +++ b/pkg/fanal/artifact/sbom/sbom.go @@ -49,7 +49,7 @@ func (a Artifact) Inspect(_ context.Context) (types.ArtifactReference, error) { if err != nil { return types.ArtifactReference{}, xerrors.Errorf("failed to detect SBOM format: %w", err) } - log.Logger.Infof("Detected SBOM format: %s", format) + log.Info("Detected SBOM format", log.String("format", string(format))) bom, err := sbom.Decode(f, format) if err != nil { diff --git a/pkg/fanal/artifact/vm/ami.go b/pkg/fanal/artifact/vm/ami.go index 6ce2dbd4ef30..791c41d52896 100644 --- a/pkg/fanal/artifact/vm/ami.go +++ b/pkg/fanal/artifact/vm/ami.go @@ -41,7 +41,7 @@ func newAMI(imageID string, storage Storage, region, endpoint string) (*AMI, err if snapshotID == "" { continue } - log.Logger.Infof("Snapshot %s found", snapshotID) + log.WithPrefix("ami").Info("Snapshot found", log.String("snapshot_id", snapshotID)) ebs, err := newEBS(snapshotID, storage, region, endpoint) if err != nil { return nil, xerrors.Errorf("new EBS error: %w", err) diff --git a/pkg/fanal/artifact/vm/ebs.go b/pkg/fanal/artifact/vm/ebs.go index 879d5e9b424c..d9881edf193a 100644 --- a/pkg/fanal/artifact/vm/ebs.go +++ b/pkg/fanal/artifact/vm/ebs.go @@ -21,6 +21,7 @@ const storageEBSCacheSize = 128 // EBS represents an artifact for AWS EBS snapshots type EBS struct { Storage + logger *log.Logger snapshotID string ebs ebsfile.EBSAPI } @@ -33,6 +34,7 @@ func newEBS(snapshotID string, vm Storage, region, endpoint string) (*EBS, error return &EBS{ Storage: vm, + logger: log.WithPrefix("ebs"), snapshotID: snapshotID, ebs: ebs, }, nil @@ -107,7 +109,7 @@ func (a *EBS) calcCacheKey(key string) (string, error) { func (a *EBS) hasCache(cacheKey string) bool { _, missingCacheKeys, err := a.cache.MissingBlobs(cacheKey, []string{cacheKey}) if err != nil { - log.Logger.Debugf("Unable to query missing cache: %s", err) + a.logger.Debug("Unable to query missing cache", log.Err(err)) return false } @@ -116,6 +118,6 @@ func (a *EBS) hasCache(cacheKey string) bool { return true } - log.Logger.Debugf("Missing virtual machine cache: %s", cacheKey) + a.logger.Debug("Missing virtual machine cache", log.String("key", cacheKey)) return false } diff --git a/pkg/fanal/artifact/vm/file.go b/pkg/fanal/artifact/vm/file.go index cecddf57e472..58fd4d46c96a 100644 --- a/pkg/fanal/artifact/vm/file.go +++ b/pkg/fanal/artifact/vm/file.go @@ -49,8 +49,9 @@ func newFile(filePath string, storage Storage) (*ImageFile, error) { return nil, err } - log.Logger.Debugf("VM image not detected: %s", err) - log.Logger.Debugf("Assume raw image") + logger := log.WithPrefix("vm") + logger.Debug("VM image not detected", log.Err(err)) + logger.Debug("Assume raw image") fi, err := f.Stat() if err != nil { return nil, xerrors.Errorf("file stat error: %w", err) diff --git a/pkg/fanal/handler/unpackaged/unpackaged.go b/pkg/fanal/handler/unpackaged/unpackaged.go index 5f450c7923cb..119cae3e7dad 100644 --- a/pkg/fanal/handler/unpackaged/unpackaged.go +++ b/pkg/fanal/handler/unpackaged/unpackaged.go @@ -24,6 +24,7 @@ func init() { const version = 1 type unpackagedHook struct { + logger *log.Logger client sbomatt.Rekor } @@ -33,6 +34,7 @@ func NewUnpackagedHandler(opt artifact.Option) (handler.PostHandler, error) { return nil, xerrors.Errorf("rekor client error: %w", err) } return unpackagedHook{ + logger: log.WithPrefix("unpackaged"), client: c, }, nil } @@ -68,7 +70,7 @@ func (h unpackagedHook) Handle(ctx context.Context, res *analyzer.AnalysisResult } if len(bom.Applications) > 0 { - log.Logger.Infof("Found SBOM attestation in Rekor: %s", filePath) + h.logger.Info("Found SBOM attestation in Rekor", log.String("file_path", filePath)) // Take the first app since this SBOM should contain a single application. app := bom.Applications[0] app.FilePath = filePath // Use the original file path rather than the one in the SBOM. diff --git a/pkg/fanal/handler/unpackaged/unpackaged_test.go b/pkg/fanal/handler/unpackaged/unpackaged_test.go index b466aa83282a..685af042d131 100644 --- a/pkg/fanal/handler/unpackaged/unpackaged_test.go +++ b/pkg/fanal/handler/unpackaged/unpackaged_test.go @@ -74,7 +74,7 @@ func Test_unpackagedHook_Handle(t *testing.T) { }, } - require.NoError(t, log.InitLogger(false, true)) + log.InitLogger(false, true) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ts := rekortest.NewServer(t) diff --git a/pkg/fanal/image/daemon/image.go b/pkg/fanal/image/daemon/image.go index 23adf966f84b..5d80cb93eee4 100644 --- a/pkg/fanal/image/daemon/image.go +++ b/pkg/fanal/image/daemon/image.go @@ -132,7 +132,7 @@ func (img *image) ConfigFile() (*v1.ConfigFile, error) { } func (img *image) configFile() (*v1.ConfigFile, error) { - log.Logger.Debug("Saving the container image to a local file to obtain the image config...") + log.Debug("Saving the container image to a local file to obtain the image config...") // Need to fall back into expensive operations like "docker save" // because the config file cannot be generated properly from container engine API for some reason. diff --git a/pkg/fanal/image/image.go b/pkg/fanal/image/image.go index fc8e14ab37e1..2a8862a89ab5 100644 --- a/pkg/fanal/image/image.go +++ b/pkg/fanal/image/image.go @@ -42,7 +42,7 @@ func NewContainerImage(ctx context.Context, imageName string, opt types.ImageOpt for _, src := range opt.ImageSources { trySrc, ok := imageSourceFuncs[src] if !ok { - log.Logger.Warnf("Unknown image source: '%s'", src) + log.Warn("Unknown image source", log.String("source", string(src))) continue } diff --git a/pkg/fanal/image/registry/token.go b/pkg/fanal/image/registry/token.go index 1e51b0fd31a4..b959c6cc7bbc 100644 --- a/pkg/fanal/image/registry/token.go +++ b/pkg/fanal/image/registry/token.go @@ -8,8 +8,8 @@ import ( "github.com/aquasecurity/trivy/pkg/fanal/image/registry/azure" "github.com/aquasecurity/trivy/pkg/fanal/image/registry/ecr" "github.com/aquasecurity/trivy/pkg/fanal/image/registry/google" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" ) var ( @@ -41,7 +41,7 @@ func GetToken(ctx context.Context, domain string, opt types.RegistryOptions) (au username, password, err := registry.GetCredential(ctx) if err != nil { // only skip check registry if error occurred - log.Logger.Debug(err) + log.Debug("Credential error", log.Err(err)) break } return authn.Basic{ diff --git a/pkg/fanal/log/log.go b/pkg/fanal/log/log.go deleted file mode 100644 index 93344390d03e..000000000000 --- a/pkg/fanal/log/log.go +++ /dev/null @@ -1,17 +0,0 @@ -package log - -import ( - "go.uber.org/zap" -) - -var Logger *zap.SugaredLogger - -func init() { - if logger, err := zap.NewProduction(); err == nil { - Logger = logger.Sugar() - } -} - -func SetLogger(l *zap.SugaredLogger) { - Logger = l -} diff --git a/pkg/fanal/secret/scanner.go b/pkg/fanal/secret/scanner.go index c773b9707ae3..ef10ad45bb0c 100644 --- a/pkg/fanal/secret/scanner.go +++ b/pkg/fanal/secret/scanner.go @@ -14,13 +14,14 @@ import ( "golang.org/x/xerrors" "gopkg.in/yaml.v3" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" ) var lineSep = []byte{'\n'} type Scanner struct { + logger *log.Logger *Global } @@ -123,7 +124,8 @@ func (s *Scanner) FindSubmatchLocations(r Rule, content []byte) []Location { var submatchLocations []Location matchsIndices := r.Regex.FindAllSubmatchIndex(content, -1) for _, matchIndices := range matchsIndices { - matchLocation := Location{ // first two indexes are always start and end of the whole match + matchLocation := Location{ + // first two indexes are always start and end of the whole match Start: matchIndices[0], End: matchIndices[1], } @@ -151,7 +153,10 @@ func (r *Rule) getMatchSubgroupsLocations(matchLocs []int) []Location { if name == r.SecretGroupName { startLocIndex := 2 * i endLocIndex := startLocIndex + 1 - locations = append(locations, Location{Start: matchLocs[startLocIndex], End: matchLocs[endLocIndex]}) + locations = append(locations, Location{ + Start: matchLocs[startLocIndex], + End: matchLocs[endLocIndex], + }) } } return locations @@ -270,17 +275,18 @@ func ParseConfig(configPath string) (*Config, error) { return nil, nil } + logger := log.WithPrefix("secret").With("config_path", configPath) f, err := os.Open(configPath) if errors.Is(err, os.ErrNotExist) { // If the specified file doesn't exist, it just uses built-in rules and allow rules. - log.Logger.Debugf("No secret config detected: %s", configPath) + logger.Debug("No secret config detected") return nil, nil } else if err != nil { return nil, xerrors.Errorf("file open error %s: %w", configPath, err) } defer f.Close() - log.Logger.Infof("Loading %s for secret scanning...", configPath) + logger.Info("Loading the config file s for secret scanning...") var config Config if err = yaml.NewDecoder(f).Decode(&config); err != nil { @@ -291,12 +297,17 @@ func ParseConfig(configPath string) (*Config, error) { } func NewScanner(config *Config) Scanner { + logger := log.WithPrefix("secret") + // Use the default rules if config == nil { - return Scanner{Global: &Global{ - Rules: builtinRules, - AllowRules: builtinAllowRules, - }} + return Scanner{ + logger: logger, + Global: &Global{ + Rules: builtinRules, + AllowRules: builtinAllowRules, + }, + } } enabledRules := builtinRules @@ -321,11 +332,14 @@ func NewScanner(config *Config) Scanner { return !slices.Contains(config.DisableAllowRuleIDs, v.ID) }) - return Scanner{Global: &Global{ - Rules: rules, - AllowRules: allowRules, - ExcludeBlock: config.ExcludeBlock, - }} + return Scanner{ + logger: logger, + Global: &Global{ + Rules: rules, + AllowRules: allowRules, + ExcludeBlock: config.ExcludeBlock, + }, + } } type ScanArgs struct { @@ -339,9 +353,11 @@ type Match struct { } func (s *Scanner) Scan(args ScanArgs) types.Secret { + logger := s.logger.With("file_path", args.FilePath) + // Global allowed paths if s.AllowPath(args.FilePath) { - log.Logger.Debugf("Skipped secret scanning on %q matching allowed paths", args.FilePath) + logger.Debug("Skipped secret scanning matching allowed paths") return types.Secret{ FilePath: args.FilePath, } @@ -354,15 +370,16 @@ func (s *Scanner) Scan(args ScanArgs) types.Secret { var findings []types.SecretFinding globalExcludedBlocks := newBlocks(args.Content, s.ExcludeBlock.Regexes) for _, rule := range s.Rules { + ruleLogger := logger.With("rule_id", rule.ID) // Check if the file path should be scanned by this rule if !rule.MatchPath(args.FilePath) { - log.Logger.Debugf("Skipped secret scanning on %q as non-compliant to the rule %q", args.FilePath, rule.ID) + ruleLogger.Debug("Skipped secret scanning as non-compliant to the rule") continue } // Check if the file path should be allowed if rule.AllowPath(args.FilePath) { - log.Logger.Debugf("Skipped secret scanning on %q as allowed", args.FilePath) + ruleLogger.Debug("Skipped secret scanning as allowed") continue } diff --git a/pkg/fanal/secret/scanner_test.go b/pkg/fanal/secret/scanner_test.go index 2e43047f9d53..fe73270b9ae7 100644 --- a/pkg/fanal/secret/scanner_test.go +++ b/pkg/fanal/secret/scanner_test.go @@ -6,19 +6,15 @@ import ( "path/filepath" "testing" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/zap" - "go.uber.org/zap/zapcore" - - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/secret" "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestMain(m *testing.M) { - logger, _ := zap.NewDevelopment(zap.IncreaseLevel(zapcore.FatalLevel)) - log.SetLogger(logger.Sugar()) + log.InitLogger(false, true) os.Exit(m.Run()) } @@ -675,7 +671,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "secret.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "secret.txt"), - Findings: []types.SecretFinding{wantFinding1, wantFinding2}, + Findings: []types.SecretFinding{ + wantFinding1, + wantFinding2, + }, }, }, { @@ -684,7 +683,11 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "aws-secrets.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "aws-secrets.txt"), - Findings: []types.SecretFinding{wantFinding5, wantFinding10, wantFinding9}, + Findings: []types.SecretFinding{ + wantFinding5, + wantFinding10, + wantFinding9, + }, }, }, { @@ -720,7 +723,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "docker-secrets.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "docker-secrets.txt"), - Findings: []types.SecretFinding{wantFindingDockerKey1, wantFindingDockerKey2}, + Findings: []types.SecretFinding{ + wantFindingDockerKey1, + wantFindingDockerKey2, + }, }, }, { @@ -738,7 +744,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "secret.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "secret.txt"), - Findings: []types.SecretFinding{wantFinding1, wantFinding2}, + Findings: []types.SecretFinding{ + wantFinding1, + wantFinding2, + }, }, }, { @@ -761,7 +770,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "secret.md"), want: types.Secret{ FilePath: filepath.Join("testdata", "secret.md"), - Findings: []types.SecretFinding{wantFinding1, wantFinding2}, + Findings: []types.SecretFinding{ + wantFinding1, + wantFinding2, + }, }, }, { @@ -770,7 +782,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "builtin-rule-secret.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "builtin-rule-secret.txt"), - Findings: []types.SecretFinding{wantFinding5a, wantFinding6}, + Findings: []types.SecretFinding{ + wantFinding5a, + wantFinding6, + }, }, }, { @@ -877,7 +892,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "secret.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "secret.txt"), - Findings: []types.SecretFinding{wantFinding3, wantFinding4}, + Findings: []types.SecretFinding{ + wantFinding3, + wantFinding4, + }, }, }, { diff --git a/pkg/fanal/walker/fs.go b/pkg/fanal/walker/fs.go index 5397c1bdfc10..88a623ed1792 100644 --- a/pkg/fanal/walker/fs.go +++ b/pkg/fanal/walker/fs.go @@ -88,7 +88,7 @@ func (w FS) walkFast(root string, walkFn fastWalkFunc) error { // Multiple goroutines stat the filesystem concurrently. The provided // walkFn must be safe for concurrent use. - log.Logger.Debugf("Walk the file tree rooted at '%s' in parallel", root) + log.Debug("Walking the file tree in parallel", log.String("root", root)) if err := swalker.Walk(root, walkFn, errorCallbackOption); err != nil { return xerrors.Errorf("walk error: %w", err) } @@ -96,7 +96,7 @@ func (w FS) walkFast(root string, walkFn fastWalkFunc) error { } func (w FS) walkSlow(root string, walkFn fastWalkFunc) error { - log.Logger.Debugf("Walk the file tree rooted at '%s' in series", root) + log.Debug("Walking the file tree in series", log.String("root", root)) err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error { if err != nil { return w.errCallback(path, err) diff --git a/pkg/fanal/walker/vm.go b/pkg/fanal/walker/vm.go index 8d13e0aee57c..42206fa5efcc 100644 --- a/pkg/fanal/walker/vm.go +++ b/pkg/fanal/walker/vm.go @@ -35,6 +35,7 @@ func AppendPermitDiskName(s ...string) { type VM struct { walker + logger *log.Logger threshold int64 analyzeFn WalkFunc } @@ -42,6 +43,7 @@ type VM struct { func NewVM(skipFiles, skipDirs []string) *VM { threshold := defaultSizeThreshold return &VM{ + logger: log.WithPrefix("vm"), walker: newWalker(skipFiles, skipDirs), threshold: threshold, } @@ -72,7 +74,7 @@ func (w *VM) Walk(vreader *io.SectionReader, root string, fn WalkFunc) error { // Walk each partition if err = w.diskWalk(root, partition); err != nil { - log.Logger.Warnf("Partition error: %s", err.Error()) + w.logger.Warn("Partition error", log.Err(err)) } } return nil @@ -80,7 +82,7 @@ func (w *VM) Walk(vreader *io.SectionReader, root string, fn WalkFunc) error { // Inject disk partitioning processes from externally with diskWalk. func (w *VM) diskWalk(root string, partition types.Partition) error { - log.Logger.Debugf("Found partition: %s", partition.Name()) + w.logger.Debug("Found partition", log.String("name", partition.Name())) sr := partition.GetSectionReader() @@ -89,7 +91,7 @@ func (w *VM) diskWalk(root string, partition types.Partition) error { if err != nil { return xerrors.Errorf("LVM detection error: %w", err) } else if foundLVM { - log.Logger.Errorf("LVM is not supported, skip %s.img", partition.Name()) + w.logger.Error("LVM is not supported, skipping", log.String("name", partition.Name()+".img")) return nil } diff --git a/pkg/fanal/walker/walk.go b/pkg/fanal/walker/walk.go index cebc86ee76d7..7d7d71e702f7 100644 --- a/pkg/fanal/walker/walk.go +++ b/pkg/fanal/walker/walk.go @@ -60,7 +60,7 @@ func (w *walker) shouldSkipFile(filePath string) bool { if err != nil { return false // return early if bad pattern } else if match { - log.Logger.Debugf("Skipping file: %s", filePath) + log.Debug("Skipping file", log.String("file_path", filePath)) return true } } @@ -81,7 +81,7 @@ func (w *walker) shouldSkipDir(dir string) bool { if match, err := doublestar.Match(pattern, dir); err != nil { return false // return early if bad pattern } else if match { - log.Logger.Debugf("Skipping directory: %s", dir) + log.Debug("Skipping directory", log.String("dir", dir)) return true } } diff --git a/pkg/flag/db_flags.go b/pkg/flag/db_flags.go index 58e7809a2152..fd426ae9ccbb 100644 --- a/pkg/flag/db_flags.go +++ b/pkg/flag/db_flags.go @@ -4,7 +4,6 @@ import ( "fmt" "github.com/google/go-containerregistry/pkg/name" - "go.uber.org/zap" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/db" @@ -145,7 +144,7 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { return DBOptions{}, xerrors.New("--skip-java-db-update and --download-java-db-only options can not be specified both") } if light { - log.Logger.Warn("'--light' option is deprecated and will be removed. See also: https://github.com/aquasecurity/trivy/discussions/1649") + log.Warn("'--light' option is deprecated and will be removed. See also: https://github.com/aquasecurity/trivy/discussions/1649") } var dbRepository, javaDBRepository name.Reference @@ -157,8 +156,8 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { // Add the schema version if the tag is not specified for backward compatibility. if t, ok := dbRepository.(name.Tag); ok && t.TagStr() == "" { dbRepository = t.Tag(fmt.Sprint(db.SchemaVersion)) - log.Logger.Infow("Adding schema version to the DB repository for backward compatibility", - zap.String("repository", dbRepository.String())) + log.Info("Adding schema version to the DB repository for backward compatibility", + log.String("repository", dbRepository.String())) } } @@ -169,8 +168,8 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { // Add the schema version if the tag is not specified for backward compatibility. if t, ok := javaDBRepository.(name.Tag); ok && t.TagStr() == "" { javaDBRepository = t.Tag(fmt.Sprint(javadb.SchemaVersion)) - log.Logger.Infow("Adding schema version to the Java DB repository for backward compatibility", - zap.String("repository", javaDBRepository.String())) + log.Info("Adding schema version to the Java DB repository for backward compatibility", + log.String("repository", javaDBRepository.String())) } } diff --git a/pkg/flag/db_flags_test.go b/pkg/flag/db_flags_test.go index b53f29135d74..5d121033fd3b 100644 --- a/pkg/flag/db_flags_test.go +++ b/pkg/flag/db_flags_test.go @@ -1,17 +1,14 @@ package flag_test import ( + "github.com/aquasecurity/trivy/pkg/log" "github.com/google/go-containerregistry/pkg/name" "testing" + "github.com/aquasecurity/trivy/pkg/flag" "github.com/spf13/viper" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "go.uber.org/zap" - "go.uber.org/zap/zaptest/observer" - - "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" ) func TestDBFlagGroup_ToOptions(t *testing.T) { @@ -86,9 +83,7 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - level := zap.WarnLevel - core, obs := observer.New(level) - log.Logger = zap.New(core).Sugar() + out := newLogger(log.LevelWarn) viper.Set(flag.SkipDBUpdateFlag.ConfigName, tt.fields.SkipDBUpdate) viper.Set(flag.DownloadDBOnlyFlag.ConfigName, tt.fields.DownloadDBOnly) @@ -109,11 +104,7 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { assert.EqualExportedValues(t, tt.want, got) // Assert log messages - var gotMessages []string - for _, entry := range obs.AllUntimed() { - gotMessages = append(gotMessages, entry.Message) - } - assert.Equal(t, tt.wantLogs, gotMessages, tt.name) + assert.Equal(t, tt.wantLogs, out.Messages(), tt.name) }) } } diff --git a/pkg/flag/options.go b/pkg/flag/options.go index cfdce46e2240..9448f7c0f2fa 100644 --- a/pkg/flag/options.go +++ b/pkg/flag/options.go @@ -121,7 +121,7 @@ func (f *Flag[T]) parse() any { } v = viper.Get(alias.ConfigName) if v != nil { - log.Logger.Warnf("'%s' in config file is deprecated. Use '%s' instead.", alias.ConfigName, f.ConfigName) + log.Warnf("'%s' in config file is deprecated. Use '%s' instead.", alias.ConfigName, f.ConfigName) return v } } @@ -276,7 +276,7 @@ func (f *Flag[T]) BindEnv() error { } if alias.Deprecated { if _, ok := os.LookupEnv(envAlias); ok { - log.Logger.Warnf("'%s' is deprecated. Use '%s' instead.", envAlias, envName) + log.Warnf("'%s' is deprecated. Use '%s' instead.", envAlias, envName) } } } @@ -355,18 +355,18 @@ type Options struct { // Align takes consistency of options func (o *Options) Align() { if o.Format == types.FormatSPDX || o.Format == types.FormatSPDXJSON { - log.Logger.Info(`"--format spdx" and "--format spdx-json" disable security scanning`) + log.Info(`"--format spdx" and "--format spdx-json" disable security scanning`) o.Scanners = nil } // Vulnerability scanning is disabled by default for CycloneDX. if o.Format == types.FormatCycloneDX && !viper.IsSet(ScannersFlag.ConfigName) && len(o.K8sOptions.Components) == 0 { // remove K8sOptions.Components validation check when vuln scan is supported for k8s report with cycloneDX - log.Logger.Info(`"--format cyclonedx" disables security scanning. Specify "--scanners vuln" explicitly if you want to include vulnerabilities in the CycloneDX report.`) + log.Info(`"--format cyclonedx" disables security scanning. Specify "--scanners vuln" explicitly if you want to include vulnerabilities in the CycloneDX report.`) o.Scanners = nil } if o.Format == types.FormatCycloneDX && len(o.K8sOptions.Components) > 0 { - log.Logger.Info(`"k8s with --format cyclonedx" disable security scanning`) + log.Info(`"k8s with --format cyclonedx" disable security scanning`) o.Scanners = nil } } @@ -736,7 +736,7 @@ func (a flagAliases) NormalizeFunc() func(*pflag.FlagSet, string) pflag.Normaliz if alias.deprecated { // NormalizeFunc is called several times alias.once.Do(func() { - log.Logger.Warnf("'--%s' is deprecated. Use '--%s' instead.", name, alias.formalName) + log.Warnf("'--%s' is deprecated. Use '--%s' instead.", name, alias.formalName) }) } name = alias.formalName diff --git a/pkg/flag/options_test.go b/pkg/flag/options_test.go index 092e09d7b411..7827b4303b7d 100644 --- a/pkg/flag/options_test.go +++ b/pkg/flag/options_test.go @@ -1,11 +1,15 @@ package flag_test import ( + "bytes" "github.com/aquasecurity/trivy/pkg/flag" + "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/types" "github.com/samber/lo" "github.com/spf13/cobra" "github.com/stretchr/testify/require" + "log/slog" + "strings" "testing" "github.com/spf13/viper" @@ -125,3 +129,26 @@ func setSliceValue[T any](key string, value []T) { viper.Set(key, value) } } + +type Output struct { + b *bytes.Buffer +} + +func (o Output) Messages() []string { + var messages []string + for _, line := range strings.Split(o.b.String(), "\n") { + if line == "" { + continue + } + ss := strings.Split(line, "\t") + messages = append(messages, strings.Join(ss[2:], "\t")) + } + return messages +} + +func newLogger(level slog.Level) Output { + out := bytes.NewBuffer(nil) + logger := log.New(log.NewHandler(out, &log.Options{Level: level})) + log.SetDefault(logger) + return Output{b: out} +} diff --git a/pkg/flag/remote_flags.go b/pkg/flag/remote_flags.go index 9277f2db908f..2348ef649e66 100644 --- a/pkg/flag/remote_flags.go +++ b/pkg/flag/remote_flags.go @@ -110,16 +110,16 @@ func (f *RemoteFlagGroup) ToOptions() (RemoteOptions, error) { if serverAddr == "" && listen == "" { switch { case len(customHeaders) > 0: - log.Logger.Warn(`"--custom-header" can be used only with "--server"`) + log.Warn(`"--custom-header" can be used only with "--server"`) case token != "": - log.Logger.Warn(`"--token" can be used only with "--server"`) + log.Warn(`"--token" can be used only with "--server"`) case tokenHeader != "" && tokenHeader != DefaultTokenHeader: - log.Logger.Warn(`"--token-header" can be used only with "--server"`) + log.Warn(`"--token-header" can be used only with "--server"`) } } if token == "" && tokenHeader != DefaultTokenHeader { - log.Logger.Warn(`"--token-header" should be used with "--token"`) + log.Warn(`"--token-header" should be used with "--token"`) } if token != "" && tokenHeader != "" { diff --git a/pkg/flag/remote_flags_test.go b/pkg/flag/remote_flags_test.go index 4500b0bb5ca9..d6a7a95387db 100644 --- a/pkg/flag/remote_flags_test.go +++ b/pkg/flag/remote_flags_test.go @@ -1,17 +1,14 @@ package flag_test import ( + "github.com/aquasecurity/trivy/pkg/log" "github.com/stretchr/testify/require" "net/http" "testing" + "github.com/aquasecurity/trivy/pkg/flag" "github.com/spf13/viper" "github.com/stretchr/testify/assert" - "go.uber.org/zap" - "go.uber.org/zap/zaptest/observer" - - "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" ) func TestRemoteFlagGroup_ToOptions(t *testing.T) { @@ -98,9 +95,7 @@ func TestRemoteFlagGroup_ToOptions(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - level := zap.WarnLevel - core, obs := observer.New(level) - log.Logger = zap.New(core).Sugar() + out := newLogger(log.LevelWarn) viper.Set(flag.ServerAddrFlag.ConfigName, tt.fields.Server) viper.Set(flag.ServerCustomHeadersFlag.ConfigName, tt.fields.CustomHeaders) @@ -119,11 +114,7 @@ func TestRemoteFlagGroup_ToOptions(t *testing.T) { assert.Equalf(t, tt.want, got, "ToOptions()") // Assert log messages - var gotMessages []string - for _, entry := range obs.AllUntimed() { - gotMessages = append(gotMessages, entry.Message) - } - assert.Equal(t, tt.wantLogs, gotMessages, tt.name) + assert.Equal(t, tt.wantLogs, out.Messages(), tt.name) }) } } diff --git a/pkg/flag/report_flags.go b/pkg/flag/report_flags.go index 94b8c2ff689d..c079e6bb256a 100644 --- a/pkg/flag/report_flags.go +++ b/pkg/flag/report_flags.go @@ -198,29 +198,29 @@ func (f *ReportFlagGroup) ToOptions() (ReportOptions, error) { if template != "" { if format == "" { - log.Logger.Warn("'--template' is ignored because '--format template' is not specified. Use '--template' option with '--format template' option.") + log.Warn("'--template' is ignored because '--format template' is not specified. Use '--template' option with '--format template' option.") } else if format != "template" { - log.Logger.Warnf("'--template' is ignored because '--format %s' is specified. Use '--template' option with '--format template' option.", format) + log.Warnf("'--template' is ignored because '--format %s' is specified. Use '--template' option with '--format template' option.", format) } } else { if format == types.FormatTemplate { - log.Logger.Warn("'--format template' is ignored because '--template' is not specified. Specify '--template' option when you use '--format template'.") + log.Warn("'--format template' is ignored because '--template' is not specified. Specify '--template' option when you use '--format template'.") } } // "--list-all-pkgs" option is unavailable with "--format table". // If user specifies "--list-all-pkgs" with "--format table", we should warn it. if listAllPkgs && format == types.FormatTable { - log.Logger.Warn(`"--list-all-pkgs" cannot be used with "--format table". Try "--format json" or other formats.`) + log.Warn(`"--list-all-pkgs" cannot be used with "--format table". Try "--format json" or other formats.`) } // "--dependency-tree" option is available only with "--format table". if dependencyTree { - log.Logger.Infof(`"--dependency-tree" only shows the dependents of vulnerable packages. ` + + log.Info(`"--dependency-tree" only shows the dependents of vulnerable packages. ` + `Note that it is the reverse of the usual dependency tree, which shows the packages that depend on the vulnerable package. ` + `It supports limited package managers. Please see the document for the detail.`) if format != types.FormatTable { - log.Logger.Warn(`"--dependency-tree" can be used only with "--format table".`) + log.Warn(`"--dependency-tree" can be used only with "--format table".`) } } @@ -275,16 +275,16 @@ func loadComplianceTypes(compliance string) (spec.ComplianceSpec, error) { func (f *ReportFlagGroup) forceListAllPkgs(format types.Format, listAllPkgs, dependencyTree bool) bool { if slices.Contains(types.SupportedSBOMFormats, format) && !listAllPkgs { - log.Logger.Debugf("%q automatically enables '--list-all-pkgs'.", types.SupportedSBOMFormats) + log.Debugf("%q automatically enables '--list-all-pkgs'.", types.SupportedSBOMFormats) return true } // We need this flag to insert dependency locations into Sarif('Package' struct contains 'Locations') if format == types.FormatSarif && !listAllPkgs { - log.Logger.Debugf("Sarif format automatically enables '--list-all-pkgs' to get locations") + log.Debug("Sarif format automatically enables '--list-all-pkgs' to get locations") return true } if dependencyTree && !listAllPkgs { - log.Logger.Debugf("'--dependency-tree' enables '--list-all-pkgs'.") + log.Debug("'--dependency-tree' enables '--list-all-pkgs'.") return true } return false @@ -300,6 +300,6 @@ func toSeverity(severity []string) []dbTypes.Severity { sev, _ := dbTypes.NewSeverity(s) return sev }) - log.Logger.Debugf("Severities: %q", severities) + log.Debug("Parsed severities", log.Any("severities", severities)) return severities } diff --git a/pkg/flag/report_flags_test.go b/pkg/flag/report_flags_test.go index 1d230398d5c7..37ba3c81b84f 100644 --- a/pkg/flag/report_flags_test.go +++ b/pkg/flag/report_flags_test.go @@ -1,19 +1,16 @@ package flag_test import ( + "github.com/aquasecurity/trivy/pkg/log" "testing" - "github.com/spf13/viper" - "github.com/stretchr/testify/assert" - "go.uber.org/zap" - "go.uber.org/zap/zaptest/observer" - dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy/pkg/compliance/spec" "github.com/aquasecurity/trivy/pkg/flag" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/types" + "github.com/spf13/viper" + "github.com/stretchr/testify/assert" ) func TestReportFlagGroup_ToOptions(t *testing.T) { @@ -67,7 +64,7 @@ func TestReportFlagGroup_ToOptions(t *testing.T) { }, wantLogs: []string{ `["cyclonedx" "spdx" "spdx-json" "github"] automatically enables '--list-all-pkgs'.`, - `Severities: ["CRITICAL"]`, + `Parsed severities severities=[CRITICAL]`, }, want: flag.ReportOptions{ Severities: []dbTypes.Severity{ @@ -187,12 +184,11 @@ func TestReportFlagGroup_ToOptions(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Cleanup(viper.Reset) - level := zap.WarnLevel + level := log.LevelWarn if tt.fields.debug { - level = zap.DebugLevel + level = log.LevelDebug } - core, obs := observer.New(level) - log.Logger = zap.New(core).Sugar() + out := newLogger(level) setValue(flag.FormatFlag.ConfigName, string(tt.fields.format)) setValue(flag.TemplateFlag.ConfigName, tt.fields.template) @@ -229,11 +225,7 @@ func TestReportFlagGroup_ToOptions(t *testing.T) { assert.Equalf(t, tt.want, got, "ToOptions()") // Assert log messages - var gotMessages []string - for _, entry := range obs.AllUntimed() { - gotMessages = append(gotMessages, entry.Message) - } - assert.Equal(t, tt.wantLogs, gotMessages, tt.name) + assert.Equal(t, tt.wantLogs, out.Messages(), tt.name) }) } } diff --git a/pkg/flag/sbom_flags.go b/pkg/flag/sbom_flags.go index f5ab1aff3189..388911abd83e 100644 --- a/pkg/flag/sbom_flags.go +++ b/pkg/flag/sbom_flags.go @@ -56,7 +56,7 @@ func (f *SBOMFlagGroup) ToOptions() (SBOMOptions, error) { sbomFormat := f.SBOMFormat.Value() if artifactType != "" || sbomFormat != "" { - log.Logger.Error("'trivy sbom' is now for scanning SBOM. " + + log.Error("'trivy sbom' is now for scanning SBOM. " + "See https://github.com/aquasecurity/trivy/discussions/2407 for the detail") return SBOMOptions{}, xerrors.New("'--artifact-type' and '--sbom-format' are no longer available") } diff --git a/pkg/flag/scan_flags.go b/pkg/flag/scan_flags.go index aba3961b0243..e2128816e849 100644 --- a/pkg/flag/scan_flags.go +++ b/pkg/flag/scan_flags.go @@ -48,7 +48,7 @@ var ( case "misconf", "misconfiguration": return string(types.MisconfigScanner) case "config": - log.Logger.Warn("'--scanners config' is deprecated. Use '--scanners misconfig' instead. See https://github.com/aquasecurity/trivy/discussions/5586 for the detail.") + log.Warn("'--scanners config' is deprecated. Use '--scanners misconfig' instead. See https://github.com/aquasecurity/trivy/discussions/5586 for the detail.") return string(types.MisconfigScanner) } return s @@ -175,7 +175,7 @@ func (f *ScanFlagGroup) ToOptions(args []string) (ScanOptions, error) { parallel := f.Parallel.Value() if f.Parallel != nil && parallel == 0 { - log.Logger.Infof("Set '--parallel' to the number of CPUs (%d)", runtime.NumCPU()) + log.Info("Set '--parallel' to the number of CPUs", log.Int("parallel", runtime.NumCPU())) parallel = runtime.NumCPU() } diff --git a/pkg/flag/vulnerability_flags.go b/pkg/flag/vulnerability_flags.go index 3989fbfa1c51..f0db9e7b70c3 100644 --- a/pkg/flag/vulnerability_flags.go +++ b/pkg/flag/vulnerability_flags.go @@ -89,7 +89,7 @@ func (f *VulnerabilityFlagGroup) ToOptions() (VulnerabilityOptions, error) { switch { case ignoreUnfixed && len(ignoreStatuses) > 0: - log.Logger.Warn("'--ignore-unfixed' is ignored because '--ignore-status' is specified") + log.Warn("'--ignore-unfixed' is ignored because '--ignore-status' is specified") case ignoreUnfixed: // '--ignore-unfixed' is a shorthand of '--ignore-status'. ignoreStatuses = lo.FilterMap(dbTypes.Statuses, func(s string, _ int) (dbTypes.Status, bool) { @@ -102,7 +102,7 @@ func (f *VulnerabilityFlagGroup) ToOptions() (VulnerabilityOptions, error) { case len(ignoreStatuses) == 0: ignoreStatuses = nil } - log.Logger.Debugw("Ignore statuses", "statuses", ignoreStatuses) + log.Debug("Ignore statuses", log.Any("statuses", ignoreStatuses)) return VulnerabilityOptions{ VulnType: f.VulnType.Value(), diff --git a/pkg/flag/vulnerability_flags_test.go b/pkg/flag/vulnerability_flags_test.go index 02ee3c8d9605..4f4490753aff 100644 --- a/pkg/flag/vulnerability_flags_test.go +++ b/pkg/flag/vulnerability_flags_test.go @@ -1,17 +1,14 @@ package flag_test import ( + "github.com/aquasecurity/trivy/pkg/log" "github.com/stretchr/testify/require" "testing" - "github.com/spf13/viper" - "github.com/stretchr/testify/assert" - "go.uber.org/zap" - "go.uber.org/zap/zaptest/observer" - "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/types" + "github.com/spf13/viper" + "github.com/stretchr/testify/assert" ) func TestVulnerabilityFlagGroup_ToOptions(t *testing.T) { @@ -49,10 +46,7 @@ func TestVulnerabilityFlagGroup_ToOptions(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - level := zap.WarnLevel - - core, obs := observer.New(level) - log.Logger = zap.New(core).Sugar() + out := newLogger(log.LevelWarn) viper.Set(flag.VulnTypeFlag.ConfigName, tt.fields.vulnType) @@ -66,11 +60,7 @@ func TestVulnerabilityFlagGroup_ToOptions(t *testing.T) { assert.Equalf(t, tt.want, got, "ToOptions()") // Assert log messages - var gotMessages []string - for _, entry := range obs.AllUntimed() { - gotMessages = append(gotMessages, entry.Message) - } - assert.Equal(t, tt.wantLogs, gotMessages, tt.name) + assert.Equal(t, tt.wantLogs, out.Messages(), tt.name) }) } diff --git a/pkg/iac/ignore/parse.go b/pkg/iac/ignore/parse.go index 075f1f621203..bb2f13603263 100644 --- a/pkg/iac/ignore/parse.go +++ b/pkg/iac/ignore/parse.go @@ -50,7 +50,7 @@ func parseLine(line string, rng types.Range, parsers []RuleSectionParser) []Rule rule, err := parseComment(section, rng, parsers) if err != nil { - log.Logger.Debugf("Failed to parse rule at %s: %s", rng.String(), err.Error()) + log.Debug("Failed to parse rule", log.String("range", rng.String()), log.Err(err)) continue } rules = append(rules, rule) @@ -60,7 +60,10 @@ func parseLine(line string, rng types.Range, parsers []RuleSectionParser) []Rule } func hasIgnoreRulePrefix(s string) (string, bool) { - for _, prefix := range []string{"tfsec:", "trivy:"} { + for _, prefix := range []string{ + "tfsec:", + "trivy:", + } { if after, found := strings.CutPrefix(s, prefix); found { return after, true } @@ -153,10 +156,10 @@ func (s *expiryDateParser) Key() string { func (s *expiryDateParser) Parse(str string) bool { parsed, err := time.Parse("2006-01-02", str) if err != nil { - log.Logger.Debugf("Incorrect time to ignore is specified: %s", str) + log.Debug("Incorrect time to ignore is specified", log.String("time", str)) parsed = time.Time{} } else if time.Now().After(parsed) { - log.Logger.Debug("Ignore rule time has expired for location: %s", s.rng.String()) + log.Debug("Ignore rule time has expired for location", log.String("range", s.rng.String())) } s.expiry = parsed diff --git a/pkg/javadb/client.go b/pkg/javadb/client.go index 86194b263569..408456e16500 100644 --- a/pkg/javadb/client.go +++ b/pkg/javadb/client.go @@ -48,15 +48,15 @@ func (u *Updater) Update() error { if !errors.Is(err, os.ErrNotExist) { return xerrors.Errorf("Java DB metadata error: %w", err) } else if u.skip { - log.Logger.Error("The first run cannot skip downloading Java DB") + log.Error("The first run cannot skip downloading Java DB") return xerrors.New("'--skip-java-db-update' cannot be specified on the first run") } } if (meta.Version != SchemaVersion || meta.NextUpdate.Before(time.Now().UTC())) && !u.skip { // Download DB - log.Logger.Infof("Java DB Repository: %s", u.repo) - log.Logger.Info("Downloading the Java DB...") + log.Info("Java DB Repository", log.Any("repository", u.repo)) + log.Info("Downloading the Java DB...") // TODO: support remote options var a *oci.Artifact @@ -78,7 +78,7 @@ func (u *Updater) Update() error { if err = metac.Update(meta); err != nil { return xerrors.Errorf("Java DB metadata update error: %w", err) } - log.Logger.Info("The Java DB is cached for 3 days. If you want to update the database more frequently, " + + log.Info("The Java DB is cached for 3 days. If you want to update the database more frequently, " + "the '--reset' flag clears the DB cache.") } diff --git a/pkg/k8s/commands/cluster.go b/pkg/k8s/commands/cluster.go index bf28f26f5d7f..b4ad9be78347 100644 --- a/pkg/k8s/commands/cluster.go +++ b/pkg/k8s/commands/cluster.go @@ -3,6 +3,7 @@ package commands import ( "context" + "go.uber.org/zap" "golang.org/x/exp/slices" "golang.org/x/xerrors" @@ -10,12 +11,14 @@ import ( "github.com/aquasecurity/trivy-kubernetes/pkg/k8s" "github.com/aquasecurity/trivy-kubernetes/pkg/trivyk8s" "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/types" ) // clusterRun runs scan on kubernetes cluster func clusterRun(ctx context.Context, opts flag.Options, cluster k8s.Cluster) error { + // TODO: replace with log.Logger + logger, _ := zap.NewProduction() + if err := validateReportArguments(opts); err != nil { return err } @@ -23,13 +26,13 @@ func clusterRun(ctx context.Context, opts flag.Options, cluster k8s.Cluster) err var err error switch opts.Format { case types.FormatCycloneDX: - artifacts, err = trivyk8s.New(cluster, log.Logger).ListClusterBomInfo(ctx) + artifacts, err = trivyk8s.New(cluster, logger.Sugar()).ListClusterBomInfo(ctx) if err != nil { return xerrors.Errorf("get k8s artifacts with node info error: %w", err) } case types.FormatJSON, types.FormatTable: if opts.Scanners.AnyEnabled(types.MisconfigScanner) && slices.Contains(opts.Components, "infra") { - artifacts, err = trivyk8s.New(cluster, log.Logger, trivyk8s.WithExcludeOwned(opts.ExcludeOwned)).ListArtifactAndNodeInfo(ctx, + artifacts, err = trivyk8s.New(cluster, logger.Sugar(), trivyk8s.WithExcludeOwned(opts.ExcludeOwned)).ListArtifactAndNodeInfo(ctx, trivyk8s.WithScanJobNamespace(opts.NodeCollectorNamespace), trivyk8s.WithIgnoreLabels(opts.ExcludeNodes), trivyk8s.WithScanJobImageRef(opts.NodeCollectorImageRef), @@ -38,7 +41,7 @@ func clusterRun(ctx context.Context, opts flag.Options, cluster k8s.Cluster) err return xerrors.Errorf("get k8s artifacts with node info error: %w", err) } } else { - artifacts, err = trivyk8s.New(cluster, log.Logger).ListArtifacts(ctx) + artifacts, err = trivyk8s.New(cluster, logger.Sugar()).ListArtifacts(ctx) if err != nil { return xerrors.Errorf("get k8s artifacts error: %w", err) } diff --git a/pkg/k8s/commands/namespace.go b/pkg/k8s/commands/namespace.go index a748bf38dd9d..6d828d4efdf9 100644 --- a/pkg/k8s/commands/namespace.go +++ b/pkg/k8s/commands/namespace.go @@ -3,24 +3,27 @@ package commands import ( "context" + "go.uber.org/zap" "golang.org/x/xerrors" "github.com/aquasecurity/trivy-kubernetes/pkg/k8s" "github.com/aquasecurity/trivy-kubernetes/pkg/trivyk8s" "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" ) // namespaceRun runs scan on kubernetes cluster func namespaceRun(ctx context.Context, opts flag.Options, cluster k8s.Cluster) error { + // TODO: replace with slog.Logger + logger, _ := zap.NewProduction() + if err := validateReportArguments(opts); err != nil { return err } var trivyk trivyk8s.TrivyK8S if opts.AllNamespaces { - trivyk = trivyk8s.New(cluster, log.Logger).AllNamespaces() + trivyk = trivyk8s.New(cluster, logger.Sugar()).AllNamespaces() } else { - trivyk = trivyk8s.New(cluster, log.Logger).Namespace(getNamespace(opts, cluster.GetCurrentNamespace())) + trivyk = trivyk8s.New(cluster, logger.Sugar()).Namespace(getNamespace(opts, cluster.GetCurrentNamespace())) } artifacts, err := trivyk.ListArtifacts(ctx) diff --git a/pkg/k8s/commands/resource.go b/pkg/k8s/commands/resource.go index 10557e5a62f0..1662fe25d4d8 100644 --- a/pkg/k8s/commands/resource.go +++ b/pkg/k8s/commands/resource.go @@ -4,13 +4,13 @@ import ( "context" "strings" + "go.uber.org/zap" "golang.org/x/xerrors" "github.com/aquasecurity/trivy-kubernetes/pkg/artifacts" "github.com/aquasecurity/trivy-kubernetes/pkg/k8s" "github.com/aquasecurity/trivy-kubernetes/pkg/trivyk8s" "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" ) // resourceRun runs scan on kubernetes cluster @@ -24,7 +24,9 @@ func resourceRun(ctx context.Context, args []string, opts flag.Options, cluster var trivyk trivyk8s.TrivyK8S - trivyk = trivyk8s.New(cluster, log.Logger, trivyk8s.WithExcludeOwned(opts.ExcludeOwned)) + // TODO: replace with slog.Logger + logger, _ := zap.NewProduction() + trivyk = trivyk8s.New(cluster, logger.Sugar(), trivyk8s.WithExcludeOwned(opts.ExcludeOwned)) if opts.AllNamespaces { trivyk = trivyk.AllNamespaces() diff --git a/pkg/k8s/commands/run.go b/pkg/k8s/commands/run.go index e9e3510f6bce..567c63fa461e 100644 --- a/pkg/k8s/commands/run.go +++ b/pkg/k8s/commands/run.go @@ -27,6 +27,11 @@ const ( // Run runs a k8s scan func Run(ctx context.Context, args []string, opts flag.Options) error { + ctx, cancel := context.WithTimeout(ctx, opts.Timeout) + defer cancel() + + ctx = log.WithContextPrefix(ctx, "k8s") + cluster, err := k8s.GetCluster( k8s.WithContext(opts.K8sOptions.ClusterContext), k8s.WithKubeConfig(opts.K8sOptions.KubeConfig), @@ -36,12 +41,10 @@ func Run(ctx context.Context, args []string, opts flag.Options) error { if err != nil { return xerrors.Errorf("failed getting k8s cluster: %w", err) } - ctx, cancel := context.WithTimeout(ctx, opts.Timeout) - defer cancel() defer func() { if errors.Is(err, context.DeadlineExceeded) { - log.Logger.Warn("Increase --timeout value") + log.Warn("Increase --timeout value") } }() opts.K8sVersion = cluster.GetClusterVersion() @@ -68,8 +71,8 @@ type runner struct { func newRunner(flagOpts flag.Options, cluster string) *runner { return &runner{ - flagOpts, - cluster, + flagOpts: flagOpts, + cluster: cluster, } } @@ -83,7 +86,7 @@ func (r *runner) run(ctx context.Context, artifacts []*k8sArtifacts.Artifact) er } defer func() { if err := runner.Close(ctx); err != nil { - log.Logger.Errorf("failed to close runner: %s", err) + log.ErrorContext(ctx, "Failed to close runner", log.Err(err)) } }() diff --git a/pkg/k8s/report/report.go b/pkg/k8s/report/report.go index 5de332a703bc..0861a8669143 100644 --- a/pkg/k8s/report/report.go +++ b/pkg/k8s/report/report.go @@ -1,6 +1,7 @@ package report import ( + "errors" "fmt" "io" "strings" @@ -260,7 +261,7 @@ func createK8sResource(artifact *artifacts.Artifact, scanResults types.Results) func (r Report) PrintErrors() { for _, resource := range r.Resources { if resource.Error != "" { - log.Logger.Errorf("Error during vulnerabilities or misconfiguration scan: %s", resource.Error) + log.Error("Error during vulnerabilities or misconfiguration scan", log.Err(errors.New(resource.Error))) } } } diff --git a/pkg/k8s/scanner/io.go b/pkg/k8s/scanner/io.go index 09709db6441e..9c32699ddd63 100644 --- a/pkg/k8s/scanner/io.go +++ b/pkg/k8s/scanner/io.go @@ -28,7 +28,7 @@ func createTempFile(artifact *artifacts.Artifact) (string, error) { } defer func() { if err := file.Close(); err != nil { - log.Logger.Errorf("failed to close temp file %s: %s:", file.Name(), err) + log.Error("Failed to close temp file", log.String("path", file.Name()), log.Err(err)) } }() @@ -42,7 +42,7 @@ func createTempFile(artifact *artifacts.Artifact) (string, error) { func removeFile(filename string) { if err := os.Remove(filename); err != nil { - log.Logger.Errorf("failed to remove temp file %s: %s:", filename, err) + log.Error("Failed to remove temp file", log.String("path", filename), log.Err(err)) } } diff --git a/pkg/k8s/scanner/scanner.go b/pkg/k8s/scanner/scanner.go index 55fe4c1e9386..68698ea1d3d2 100644 --- a/pkg/k8s/scanner/scanner.go +++ b/pkg/k8s/scanner/scanner.go @@ -53,21 +53,12 @@ func NewScanner(cluster string, runner cmd.Runner, opts flag.Options) *Scanner { func (s *Scanner) Scan(ctx context.Context, artifactsData []*artifacts.Artifact) (report.Report, error) { // disable logs before scanning - err := log.InitLogger(s.opts.Debug, true) - if err != nil { - return report.Report{}, xerrors.Errorf("logger error: %w", err) - } + log.InitLogger(s.opts.Debug, true) // enable log, this is done in a defer function, // to enable logs even when the function returns earlier // due to an error - defer func() { - err = log.InitLogger(s.opts.Debug, false) - if err != nil { - // we use log.Fatal here because the error was to enable the logger - log.Fatal(xerrors.Errorf("can't enable logger error: %w", err)) - } - }() + defer log.InitLogger(s.opts.Debug, false) if s.opts.Format == types.FormatCycloneDX { kbom, err := s.clusterInfoToReportResources(artifactsData) @@ -139,8 +130,7 @@ func (s *Scanner) Scan(ctx context.Context, artifactsData []*artifacts.Artifact) } p := parallel.NewPipeline(s.opts.Parallel, !s.opts.Quiet, resourceArtifacts, onItem, onResult) - err = p.Do(ctx) - if err != nil { + if err := p.Do(ctx); err != nil { return report.Report{}, err } if s.opts.Scanners.AnyEnabled(types.VulnerabilityScanner) { @@ -168,7 +158,6 @@ func (s *Scanner) scanVulns(ctx context.Context, artifact *artifacts.Artifact, o imageReport, err := s.runner.ScanImage(ctx, opts) if err != nil { - log.Logger.Warnf("failed to scan image %s: %s", image, err) resources = append(resources, report.CreateResource(artifact, imageReport, err)) continue } @@ -196,7 +185,6 @@ func (s *Scanner) scanMisconfigs(ctx context.Context, artifact *artifacts.Artifa // remove config file after scanning removeFile(configFile) if err != nil { - log.Logger.Debugf("failed to scan config %s/%s: %s", artifact.Kind, artifact.Name, err) return report.CreateResource(artifact, configReport, err), err } diff --git a/pkg/licensing/classifier.go b/pkg/licensing/classifier.go index 0af770ef987a..74f825f303a7 100644 --- a/pkg/licensing/classifier.go +++ b/pkg/licensing/classifier.go @@ -25,7 +25,7 @@ func initGoogleClassifier() error { // This loading is expensive and should be called only when the license classification is needed. var err error classifierOnce.Do(func() { - log.Logger.Debug("Loading the default license classifier...") + log.Debug("Loading the default license classifier...") cf, err = assets.DefaultClassifier() }) return err diff --git a/pkg/log/context.go b/pkg/log/context.go new file mode 100644 index 000000000000..67a161875d0f --- /dev/null +++ b/pkg/log/context.go @@ -0,0 +1,47 @@ +package log + +import ( + "context" + "log/slog" +) + +// prefixContextKey is the context key for logger. +// It is unexported to prevent collisions with context keys defined in other packages. +type prefixContextKey struct{} + +// WithContextPrefix returns a new context with the given prefix. +func WithContextPrefix(ctx context.Context, prefix string) context.Context { + if prefix == "" { + return ctx + } + return context.WithValue(ctx, prefixContextKey{}, "["+prefix+"] ") +} + +func contextualPrefix(ctx context.Context) string { + if prefix, ok := ctx.Value(prefixContextKey{}).(string); ok { + return prefix + } + return "" +} + +// attrContextKey is the context key for logger. +// It is unexported to prevent collisions with context keys defined in other packages. +type attrContextKey struct{} + +// WithContextAttrs returns a new context with the given attrs. +func WithContextAttrs(ctx context.Context, attrs ...slog.Attr) context.Context { + if len(attrs) == 0 { + return ctx + } + if ctxAttrs := contextualAttrs(ctx); ctxAttrs != nil { + attrs = append(ctxAttrs, attrs...) + } + return context.WithValue(ctx, attrContextKey{}, attrs) +} + +func contextualAttrs(ctx context.Context) []slog.Attr { + if attrs, ok := ctx.Value(attrContextKey{}).([]slog.Attr); ok { + return attrs + } + return nil +} diff --git a/pkg/log/handler.go b/pkg/log/handler.go new file mode 100644 index 000000000000..2e9bbe4a67dd --- /dev/null +++ b/pkg/log/handler.go @@ -0,0 +1,304 @@ +package log + +import ( + "bytes" + "context" + "fmt" + "io" + "log/slog" + "slices" + "strconv" + "sync" + "time" + + "github.com/fatih/color" + "github.com/samber/lo" + "golang.org/x/xerrors" +) + +const ( + errKey = "err" + prefixKey = "prefix" +) + +type ColorHandler struct { + opts Options + prefix string + preformatted []byte // data from WithGroup and WithAttrs + groups []string // groups from WithGroup + mu *sync.Mutex + out io.Writer +} + +type Options struct { + // Level reports the minimum level to log. + // Levels with lower levels are discarded. + // If nil, the Handler uses [slog.LevelInfo]. + Level slog.Leveler +} + +func NewHandler(out io.Writer, opts *Options) *ColorHandler { + h := &ColorHandler{ + out: out, + mu: &sync.Mutex{}, + } + if opts != nil { + h.opts = *opts + } + if h.opts.Level == nil { + h.opts.Level = slog.LevelInfo + } + return h +} + +func (h *ColorHandler) Enabled(_ context.Context, level slog.Level) bool { + return level >= h.opts.Level.Level() +} + +func (h *ColorHandler) WithGroup(name string) slog.Handler { + if name == "" { + return h + } + h2 := *h + // Add an unopened group to h2 without modifying h. + h2.groups = make([]string, len(h.groups)+1) + copy(h2.groups, h.groups) + h2.groups[len(h2.groups)-1] = name + return &h2 +} + +func (h *ColorHandler) WithAttrs(attrs []slog.Attr) slog.Handler { + if len(attrs) == 0 { + return h + } + h2 := *h + + // Force an append to copy the underlying array. + h2.preformatted = slices.Clip(h.preformatted) + + // Pre-format the attributes. + for _, a := range attrs { + if isLogPrefix(a) { + h2.prefix = string(a.Value.Any().(logPrefix)) + continue + } + h2.preformatted = h2.appendAttr(h2.preformatted, a, h.groups) + } + return &h2 +} + +func (h *ColorHandler) appendAttr(buf []byte, a slog.Attr, groups []string) []byte { + // Resolve the Attr's value before doing anything else. + a.Value = a.Value.Resolve() + // Ignore empty Attrs and log prefixes. + if a.Equal(slog.Attr{}) || isLogPrefix(a) { + return buf + } + + var key string + for _, g := range groups { + key += g + "." + } + key += a.Key + + switch a.Value.Kind() { + case slog.KindString: + // Quote string values, to make them easy to parse. + buf = append(buf, key...) + buf = append(buf, '=') + buf = strconv.AppendQuote(buf, a.Value.String()) + case slog.KindTime: + // Write times in a standard way, without the monotonic time. + buf = append(buf, key...) + buf = append(buf, '=') + buf = a.Value.Time().AppendFormat(buf, time.RFC3339Nano) + case slog.KindGroup: + attrs := a.Value.Group() + // Ignore empty groups. + if len(attrs) == 0 { + return buf + } + if a.Key != "" { + groups = append(groups, a.Key) + } + for _, ga := range attrs { + buf = h.appendAttr(buf, ga, groups) + } + buf = bytes.TrimRight(buf, " ") // Trim the trailing space. + default: + buf = append(buf, key...) + buf = append(buf, '=') + if err, ok := a.Value.Any().(error); ok { + buf = strconv.AppendQuote(buf, color.HiRedString(err.Error())) + } else { + buf = append(buf, a.Value.String()...) + } + } + return append(buf, ' ') +} + +func (h *ColorHandler) Handle(ctx context.Context, r slog.Record) error { + bufp := allocBuf() + buf := *bufp + defer func() { + *bufp = buf + freeBuf(bufp) + }() + + buf = h.handle(ctx, buf, r) + + h.mu.Lock() + defer h.mu.Unlock() + + if _, err := h.out.Write(buf); err != nil { + return xerrors.Errorf("failed to write log: %w", err) + } + + return nil +} + +func (h *ColorHandler) handle(ctx context.Context, buf []byte, r slog.Record) []byte { + colorize := color.New() + switch r.Level { + case slog.LevelDebug: + colorize = colorize.Add(color.FgHiBlack) + case slog.LevelInfo: + colorize = colorize.Add(color.FgHiBlue) + case slog.LevelWarn: + colorize = colorize.Add(color.FgHiYellow) + case slog.LevelError: + colorize = colorize.Add(color.FgHiRed) + case LevelFatal: + colorize = colorize.Add(color.FgRed) + } + + // Timestamp + if !r.Time.IsZero() { + buf = append(buf, r.Time.Format(time.RFC3339)...) + buf = append(buf, '\t') + } + + // Level + buf = append(buf, colorize.Sprint(levelString(r.Level))...) + buf = append(buf, '\t') + + // Message + buf = append(buf, h.Prefix(ctx, r)+r.Message...) + if r.Level == LevelFatal { + // Show the error and return early. + format := lo.Ternary(h.opts.Level == slog.LevelDebug, "\n - %+v\n", "\t%v\n") + return fmt.Appendf(buf, format, h.Err(r)) + } + + // Attrs + var preformatted []byte + for _, a := range contextualAttrs(ctx) { + preformatted = h.appendAttr(preformatted, a, h.groups) + } + preformatted = append(preformatted, h.preformatted...) + + if len(preformatted) > 0 || r.NumAttrs() > 0 { + buf = append(buf, '\t') + } + + if len(preformatted) > 0 { + buf = append(buf, preformatted...) + } + r.Attrs(func(a slog.Attr) bool { + buf = h.appendAttr(buf, a, h.groups) + return true + }) + + // Trim the trailing space. + buf = bytes.TrimRight(buf, " ") + buf = append(buf, '\n') + + return buf +} + +// Err returns the error from the attrs, if any. +func (h *ColorHandler) Err(r slog.Record) error { + return findKey[error](errKey, r) +} + +// Prefix returns the prefix from the attrs, if any. +func (h *ColorHandler) Prefix(ctx context.Context, r slog.Record) string { + if attrPrefix := string(findKey[logPrefix](prefixKey, r)); attrPrefix != "" { + return attrPrefix + } + if ctxPrefix := contextualPrefix(ctx); ctxPrefix != "" { + return ctxPrefix + } + return h.prefix +} + +func findKey[T any](key string, r slog.Record) T { + var v T + r.Attrs(func(a slog.Attr) bool { + if a.Key != key { + return true + } + + var ok bool + if v, ok = a.Value.Any().(T); !ok { + return true + } + return false + }) + return v +} + +var ( + String = slog.String + Int64 = slog.Int64 + Int = slog.Int + Bool = slog.Bool + Time = slog.Time + Duration = slog.Duration + Group = slog.Group + Any = slog.Any +) + +// Err returns an Attr that represents an error. +func Err(err error) slog.Attr { + return slog.Any(errKey, err) +} + +type logPrefix string + +// Prefix returns an Attr that represents a prefix. +func Prefix(prefix string) slog.Attr { + return slog.Any(prefixKey, logPrefix("["+prefix+"] ")) +} + +func isLogPrefix(a slog.Attr) bool { + _, ok := a.Value.Any().(logPrefix) + return ok +} + +func levelString(level slog.Level) string { + if level == LevelFatal { + return "FATAL" + } + return level.String() +} + +var bufPool = sync.Pool{ + New: func() any { + b := make([]byte, 0, 1024) + return &b + }, +} + +func allocBuf() *[]byte { + return bufPool.Get().(*[]byte) +} + +func freeBuf(b *[]byte) { + // To reduce peak allocation, return only smaller buffers to the pool. + const maxBufferSize = 16 << 10 + if cap(*b) <= maxBufferSize { + *b = (*b)[:0] + bufPool.Put(b) + } +} diff --git a/pkg/log/handler_test.go b/pkg/log/handler_test.go new file mode 100644 index 000000000000..4d106535b8d8 --- /dev/null +++ b/pkg/log/handler_test.go @@ -0,0 +1,252 @@ +package log_test + +import ( + "bytes" + "context" + "errors" + "fmt" + "github.com/aquasecurity/trivy/pkg/log" + "github.com/stretchr/testify/assert" + "log/slog" + "os" + "strings" + "testing" + "testing/slogtest" + "time" +) + +func TestColorHandler(t *testing.T) { + var buf bytes.Buffer + logger := slog.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelDebug})) + + // Test logging with different levels, attributes, and groups. + logger.Debug("debug message", "key1", "value1", "key2", 2) + logger.Info("info message", "key3", true) + logger.Warn("warn message", slog.Group("group1", slog.Int("key4", 42))) + logger.Error("error message", slog.Group("group2", slog.String("key5", "value5"))) + + got := buf.String() + + wantLines := []string{ + `DEBUG debug message key1="value1" key2=2`, + `INFO info message key3=true`, + `WARN warn message group1.key4=42`, + `ERROR error message group2.key5="value5"`, + } + compareLines(t, got, wantLines) +} + +func TestSlog(t *testing.T) { + logger := slog.New(log.NewHandler(os.Stdout, &log.Options{Level: slog.LevelWarn})) + logger.Info("foo") + logger.Warn("warn message", slog.Group("group2", slog.String("key5", "value5"))) + logger.Error("error", slog.Int("key3", 3), slog.Group("group3", slog.String("key4", "value4"))) +} + +func TestWithAttrsAndWithGroup(t *testing.T) { + t.Run("single group", func(t *testing.T) { + var buf bytes.Buffer + baseLogger := log.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelWarn})) + + // Test logging with WithContextAttrs and WithGroup. + logger := baseLogger. + With("key1", "value1"). + WithGroup("group1"). + With("key2", "value2") + + logger.Debug("debug message") + logger.Info("info message", "key3", true) + logger.Warn("warn message", log.Err(errors.New("error"))) + logger.Error("error message", slog.Group("group2", slog.Int("key4", 4))) + + got := buf.String() + wantLines := []string{ + `WARN warn message key1="value1" group1.key2="value2" group1.err="error"`, + `ERROR error message key1="value1" group1.key2="value2" group1.group2.key4=4`, + } + compareLines(t, got, wantLines) + }) + + t.Run("multiple groups", func(t *testing.T) { + var buf bytes.Buffer + baseLogger := log.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelWarn})) + + // Test logging with WithContextAttrs and WithGroup. + logger := baseLogger. + WithGroup("group1"). + With("key1", "value1"). + WithGroup("group2") + + logger.Error("error message", slog.Group("group3", slog.Int("key2", 2))) + + got := buf.String() + wantLines := []string{ + `ERROR error message group1.key1="value1" group1.group2.group3.key2=2`, + } + compareLines(t, got, wantLines) + }) + + t.Run("prefix", func(t *testing.T) { + var buf bytes.Buffer + logger := log.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelWarn})) + logger.Error("error message", log.Prefix("prefix1"), log.String("key1", "value1")) + + wantLines := []string{ + `ERROR [prefix1] error message key1="value1"`, + } + compareLines(t, buf.String(), wantLines) + + buf.Reset() + log.SetDefault(logger) + log.WithPrefix("prefix2").Error("error message", log.String("key1", "value1")) + + wantLines = []string{ + `ERROR [prefix2] error message key1="value1"`, + } + compareLines(t, buf.String(), wantLines) + }) +} + +func TestContext(t *testing.T) { + t.Run("with context prefix", func(t *testing.T) { + var buf bytes.Buffer + baseLogger := log.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelInfo})) + + // Test logging with WithContextPrefix + ctx := context.Background() + ctx = log.WithContextPrefix(ctx, "prefix1") + + logger := baseLogger.With("key1", "value1").WithGroup("group1") + logger.InfoContext(ctx, "info message", "key2", true) + + got := buf.String() + wantLines := []string{ + `INFO [prefix1] info message key1="value1" group1.key2=true`, + } + compareLines(t, got, wantLines) + }) + + t.Run("with context attrs", func(t *testing.T) { + var buf bytes.Buffer + baseLogger := log.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelInfo})) + + // Test logging with WithContextAttrs + ctx := context.Background() + ctx = log.WithContextAttrs(ctx, log.String("key1", "value1")) + + logger := baseLogger.WithGroup("group1") + logger.InfoContext(ctx, "info message", "key2", true) + + got := buf.String() + wantLines := []string{ + `INFO info message group1.key1="value1" group1.key2=true`, + } + compareLines(t, got, wantLines) + }) +} + +func compareLines(t *testing.T, got string, wantLines []string) { + // Strip color codes from the output. + got = stripColorCodes(got) + + // Split the output into lines. + gotLines := strings.Split(got, "\n") + + assert.Len(t, gotLines, len(wantLines)+1) // Expecting log lines and an empty line. + + for i, wantLine := range wantLines { + if i >= len(gotLines) { + break + } + + ss := strings.Split(gotLines[i], "\t") + gotLevel, gotMessage, gotAttrs := ss[1], ss[2], ss[3] + + ss = strings.Split(wantLine, "\t") + wantLevel, wantMessage, wantAttrs := ss[0], ss[1], ss[2] + + assert.Equal(t, wantLevel, gotLevel) + assert.Equal(t, wantMessage, gotMessage) + assert.Equal(t, wantAttrs, gotAttrs) + } + assert.Empty(t, strings.TrimSpace(gotLines[len(gotLines)-1])) // Last line should be empty. +} + +func stripColorCodes(s string) string { + // This is a simplified version that only handles the color codes used in ColorHandler. + s = strings.ReplaceAll(s, "\x1b[90m", "") // FgHiBlack + s = strings.ReplaceAll(s, "\x1b[94m", "") // FgHiBlue + s = strings.ReplaceAll(s, "\x1b[93m", "") // FgHiYellow + s = strings.ReplaceAll(s, "\x1b[91m", "") // FgHiRed + s = strings.ReplaceAll(s, "\x1b[96m", "") // FgHiCyan + s = strings.ReplaceAll(s, "\x1b[95m", "") // FgHiMagenta + s = strings.ReplaceAll(s, "\x1b[97m", "") // FgWhite + s = strings.ReplaceAll(s, "\x1b[0m", "") // Reset + return s +} + +// TODO: slogtest.Run was added in Go 1.22. Waiting for https://github.com/aquasecurity/trivy/pull/6075. +func TestSlogtest(t *testing.T) { + var buf bytes.Buffer + newHandler := func(*testing.T) slog.Handler { + buf.Reset() + return log.NewHandler(&buf, &log.Options{Level: slog.LevelDebug}) + } + + results := func(*testing.T) map[string]any { + for _, line := range strings.Split(buf.String(), "\n") { + if len(line) == 0 { + continue + } + m, err := parseLogLine(line) + if err != nil { + t.Fatalf("Failed to parse log line: %v", err) + } + return m + } + return nil + } + + slogtest.Run(t, newHandler, results) +} + +func parseLogLine(line string) (map[string]any, error) { + parts := strings.SplitN(line, "\t", 4) + if len(parts) < 2 { + return nil, fmt.Errorf("invalid log line format: %s", line) + } + + m := make(map[string]any) + if t, err := time.Parse(time.RFC3339, parts[0]); err == nil { + m["time"] = t + parts = parts[1:] + } + m["level"] = parts[0] + m["msg"] = parts[1] + + if len(parts) == 3 { + for _, attr := range strings.Split(parts[2], " ") { + kv := strings.SplitN(attr, "=", 2) + if len(kv) == 2 { + parseAttr(m, kv[0], kv[1]) + } + } + } + + return m, nil +} + +func parseAttr(attrs map[string]any, key, value string) { + parts := strings.Split(key, ".") + currentMap := attrs + for i, part := range parts { + if i == len(parts)-1 { + currentMap[part] = strings.Trim(value, `"`) + } else { + if _, ok := currentMap[part]; !ok { + currentMap[part] = make(map[string]any) + } + currentMap = currentMap[part].(map[string]any) + } + } +} diff --git a/pkg/log/logger.go b/pkg/log/logger.go index 89354def9185..efee7ef8800d 100644 --- a/pkg/log/logger.go +++ b/pkg/log/logger.go @@ -1,129 +1,87 @@ package log import ( + "context" + "fmt" + "io" + "log/slog" "os" - "runtime" "strings" - xlog "github.com/masahiro331/go-xfs-filesystem/log" - "go.uber.org/zap" - "go.uber.org/zap/zapcore" - "golang.org/x/xerrors" - - flog "github.com/aquasecurity/trivy/pkg/fanal/log" + "github.com/samber/lo" ) -var ( - // Logger is the global variable for logging - Logger *zap.SugaredLogger - debugOption bool +const ( + LevelDebug = slog.LevelDebug + LevelInfo = slog.LevelInfo + LevelWarn = slog.LevelWarn + LevelError = slog.LevelError + LevelFatal = slog.Level(12) ) -func init() { - // Set the default logger - Logger, _ = NewLogger(false, false) // nolint: errcheck +// Logger is an alias of slog.Logger +type Logger = slog.Logger + +// New creates a new Logger with the given non-nil Handler. +func New(h slog.Handler) *Logger { + return slog.New(h) } // InitLogger initialize the logger variable -func InitLogger(debug, disable bool) (err error) { - debugOption = debug - Logger, err = NewLogger(debug, disable) - if err != nil { - return xerrors.Errorf("failed to initialize a logger: %w", err) - } - - // Set logger for fanal - flog.SetLogger(Logger) - - // Set logger for go-xfs-filesystem - xlog.SetLogger(Logger) - - return nil - +func InitLogger(debug, disable bool) { + level := lo.Ternary(debug, slog.LevelDebug, slog.LevelInfo) + out := lo.Ternary(disable, io.Discard, io.Writer(os.Stderr)) + slog.SetDefault(New(NewHandler(out, &Options{Level: level}))) } -// NewLogger is the factory method to return the instance of logger -func NewLogger(debug, disable bool) (*zap.SugaredLogger, error) { - // First, define our level-handling logic. - errorPriority := zap.LevelEnablerFunc(func(lvl zapcore.Level) bool { - return lvl >= zapcore.ErrorLevel - }) - logPriority := zap.LevelEnablerFunc(func(lvl zapcore.Level) bool { - if debug { - return lvl < zapcore.ErrorLevel - } - // Not enable debug level - return zapcore.DebugLevel < lvl && lvl < zapcore.ErrorLevel - }) - - encoderLevel := zapcore.CapitalColorLevelEncoder - // when running on Windows, don't log with color - if runtime.GOOS == "windows" { - encoderLevel = zapcore.CapitalLevelEncoder - } - - encoderConfig := zapcore.EncoderConfig{ - TimeKey: "Time", - LevelKey: "Level", - NameKey: "Name", - CallerKey: "Caller", - MessageKey: "Msg", - StacktraceKey: "St", - EncodeLevel: encoderLevel, - EncodeTime: zapcore.ISO8601TimeEncoder, - EncodeDuration: zapcore.StringDurationEncoder, - EncodeCaller: zapcore.ShortCallerEncoder, - } - - consoleEncoder := zapcore.NewConsoleEncoder(encoderConfig) - - // High-priority output should also go to standard error, and low-priority - // output should also go to standard out. - consoleLogs := zapcore.Lock(os.Stderr) - consoleErrors := zapcore.Lock(os.Stderr) - if disable { - devNull, err := os.Create(os.DevNull) - if err != nil { - return nil, err - } - // Discard low-priority output - consoleLogs = zapcore.Lock(devNull) - } - - core := zapcore.NewTee( - zapcore.NewCore(consoleEncoder, consoleErrors, errorPriority), - zapcore.NewCore(consoleEncoder, consoleLogs, logPriority), - ) - - opts := []zap.Option{zap.ErrorOutput(zapcore.Lock(os.Stderr))} - if debug { - opts = append(opts, zap.Development()) - } - logger := zap.New(core, opts...) +var ( + // With calls [Logger.With] on the default logger. + With = slog.With + + SetDefault = slog.SetDefault + + Debug = slog.Debug + DebugContext = slog.DebugContext + Info = slog.Info + InfoContext = slog.InfoContext + Warn = slog.Warn + WarnContext = slog.WarnContext + Error = slog.Error + ErrorContext = slog.ErrorContext +) - return logger.Sugar(), nil +// WithPrefix calls [Logger.With] with the prefix on the default logger. +// +// Note: If WithPrefix is called within init() or during global variable +// initialization, it will use the default logger of log/slog package +// before Trivy's logger is set up. In such cases, it's recommended to pass the prefix +// via WithContextPrefix to ensure the correct logger is used. +func WithPrefix(prefix string) *Logger { + return slog.Default().With(Prefix(prefix)) } +func Debugf(format string, args ...any) { slog.Default().Debug(fmt.Sprintf(format, args...)) } +func Infof(format string, args ...any) { slog.Default().Info(fmt.Sprintf(format, args...)) } +func Warnf(format string, args ...any) { slog.Default().Warn(fmt.Sprintf(format, args...)) } +func Errorf(format string, args ...any) { slog.Default().Error(fmt.Sprintf(format, args...)) } + // Fatal for logging fatal errors -func Fatal(err error) { - if debugOption { - Logger.Fatalf("%+v", err) - } - Logger.Fatal(err) +func Fatal(msg string, args ...any) { + slog.Default().Log(context.Background(), LevelFatal, msg, args...) + os.Exit(1) } -func String(key, val string) zap.Field { - if key == "" || val == "" { - return zap.Skip() - } - return zap.String(key, val) +// WriteLogger is a wrapper around Logger to implement io.Writer +type WriteLogger struct { + logger *Logger } -type PrefixedLogger struct { - Name string +// NewWriteLogger creates a new WriteLogger +func NewWriteLogger(logger *Logger) *WriteLogger { + return &WriteLogger{logger: logger} } -func (d *PrefixedLogger) Write(p []byte) (n int, err error) { - Logger.Debugf("[%s] %s", d.Name, strings.TrimSpace(string(p))) +func (l *WriteLogger) Write(p []byte) (n int, err error) { + l.logger.Debug(strings.TrimSpace(string(p))) return len(p), nil } diff --git a/pkg/misconf/scanner.go b/pkg/misconf/scanner.go index 6a30c9b69ec4..950ad73cbca6 100644 --- a/pkg/misconf/scanner.go +++ b/pkg/misconf/scanner.go @@ -153,12 +153,12 @@ func (s *Scanner) Scan(ctx context.Context, fsys fs.FS) ([]types.Misconfiguratio return nil, nil } - log.Logger.Debugf("Scanning %s files for misconfigurations...", s.scanner.Name()) + log.Debug("Scanning files for misconfigurations...", log.String("scanner", s.scanner.Name())) results, err := s.scanner.ScanFS(ctx, newfs, ".") if err != nil { var invalidContentError *cfparser.InvalidContentError if errors.As(err, &invalidContentError) { - log.Logger.Errorf("scan %q was broken with InvalidContentError: %v", s.scanner.Name(), err) + log.Error("scan was broken with InvalidContentError", s.scanner.Name(), log.Err(err)) return nil, nil } return nil, xerrors.Errorf("scan config error: %w", err) @@ -237,7 +237,7 @@ func scannerOptions(t detection.FileType, opt ScannerOption) ([]options.ScannerO ) if opt.Debug { - opts = append(opts, options.ScannerWithDebug(&log.PrefixedLogger{Name: "misconf"})) + opts = append(opts, options.ScannerWithDebug(log.NewWriteLogger(log.WithPrefix("misconf")))) } if opt.Trace { diff --git a/pkg/module/command.go b/pkg/module/command.go index 9114aea1ced6..87a87e5d6209 100644 --- a/pkg/module/command.go +++ b/pkg/module/command.go @@ -22,14 +22,14 @@ func Install(ctx context.Context, dir, repo string, quiet bool, opt types.Regist return xerrors.Errorf("repository parse error: %w", err) } - log.Logger.Infof("Installing the module from %s...", repo) + log.Info("Installing the module from the repository...", log.String("repo", repo)) artifact, err := oci.NewArtifact(repo, quiet, opt) if err != nil { return xerrors.Errorf("module initialize error: %w", err) } dst := filepath.Join(dir, ref.Context().Name()) - log.Logger.Debugf("Installing the module to %s...", dst) + log.Debug("Installing the module...", log.String("dst", dst)) if err = artifact.Download(ctx, dst, oci.DownloadOption{MediaType: mediaType}); err != nil { return xerrors.Errorf("module download error: %w", err) @@ -45,7 +45,7 @@ func Uninstall(_ context.Context, dir, repo string) error { return xerrors.Errorf("repository parse error: %w", err) } - log.Logger.Infof("Uninstalling %s ...", repo) + log.Info("Uninstalling the module ...", log.String("module", repo)) dst := filepath.Join(dir, ref.Context().Name()) if err = os.RemoveAll(dst); err != nil { return xerrors.Errorf("remove error: %w", err) diff --git a/pkg/module/module.go b/pkg/module/module.go index 3d670999e1b9..f573c20597c1 100644 --- a/pkg/module/module.go +++ b/pkg/module/module.go @@ -3,6 +3,7 @@ package module import ( "context" "encoding/json" + "fmt" "io/fs" "os" "path/filepath" @@ -44,7 +45,7 @@ func logDebug(_ context.Context, mod api.Module, params []uint64) { buf := readMemory(mod.Memory(), offset, size) if buf != nil { - log.Logger.Debug(string(buf)) + log.Debug(string(buf)) } return @@ -56,7 +57,7 @@ func logInfo(_ context.Context, mod api.Module, params []uint64) { buf := readMemory(mod.Memory(), offset, size) if buf != nil { - log.Logger.Info(string(buf)) + log.Info(string(buf)) } return @@ -68,7 +69,7 @@ func logWarn(_ context.Context, mod api.Module, params []uint64) { buf := readMemory(mod.Memory(), offset, size) if buf != nil { - log.Logger.Warn(string(buf)) + log.Warn(string(buf)) } return @@ -80,7 +81,7 @@ func logError(_ context.Context, mod api.Module, params []uint64) { buf := readMemory(mod.Memory(), offset, size) if buf != nil { - log.Logger.Error(string(buf)) + log.Error(string(buf)) } return @@ -89,7 +90,8 @@ func logError(_ context.Context, mod api.Module, params []uint64) { func readMemory(mem api.Memory, offset, size uint32) []byte { buf, ok := mem.Read(offset, size) if !ok { - log.Logger.Errorf("Memory.Read(%d, %d) out of range", offset, size) + log.Error("Memory.Read() out of range", + log.Int("offset", int(offset)), log.Int("size", int(size))) return nil } return buf @@ -129,7 +131,7 @@ func (m *Manager) loadModules(ctx context.Context) error { if os.IsNotExist(err) { return nil } - log.Logger.Debugf("Module dir: %s", m.dir) + log.Debug("Module dir", log.String("dir", m.dir)) err = filepath.Walk(m.dir, func(path string, info fs.FileInfo, err error) error { if err != nil { @@ -143,7 +145,7 @@ func (m *Manager) loadModules(ctx context.Context) error { return xerrors.Errorf("failed to get a relative path: %w", err) } - log.Logger.Infof("Reading %s...", rel) + log.Info("Reading a module...", log.String("path", rel)) wasmCode, err := os.ReadFile(path) if err != nil { return xerrors.Errorf("file read error: %w", err) @@ -159,7 +161,7 @@ func (m *Manager) loadModules(ctx context.Context) error { return nil } - log.Logger.Infof("%s loaded", rel) + log.Info("Module loaded", log.String("path", rel)) m.modules = append(m.modules, p) return nil @@ -341,8 +343,9 @@ func newWASMPlugin(ctx context.Context, ccache wazero.CompilationCache, code []b } if apiVersion != tapi.Version { - log.Logger.Infof("Ignore %s@v%d module due to API version mismatch, got: %d, want: %d", - name, version, apiVersion, tapi.Version) + log.Info("Ignore the module due to API version mismatch", + log.String("module", fmt.Sprintf("%s@v%d", name, version)), + log.Int("got", apiVersion), log.Int("want", tapi.Version)) return nil, nil } @@ -403,13 +406,14 @@ func newWASMPlugin(ctx context.Context, ccache wazero.CompilationCache, code []b } func (m *wasmModule) Register() { - log.Logger.Infof("Registering WASM module: %s@v%d", m.name, m.version) + logger := log.With(log.String("name", m.name), log.Int("version", m.version)) + logger.Info("Registering WASM module") if m.isAnalyzer { - log.Logger.Debugf("Registering custom analyzer in %s@v%d", m.name, m.version) + logger.Debug("Registering custom analyzer") analyzer.RegisterAnalyzer(m) } if m.isPostScanner { - log.Logger.Debugf("Registering custom post scanner in %s@v%d", m.name, m.version) + logger.Debug("Registering custom post scanner") post.RegisterPostScanner(m) } } @@ -441,7 +445,7 @@ func (m *wasmModule) Required(filePath string, _ os.FileInfo) bool { func (m *wasmModule) Analyze(ctx context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { filePath := "/" + filepath.ToSlash(input.FilePath) - log.Logger.Debugf("Module %s: analyzing %s...", m.name, filePath) + log.Debug("Module analyzing...", log.String("module", m.name), log.String("file_path", filePath)) // Wasm module instances are not Goroutine safe, so we take look here since Analyze might be called concurrently. // TODO: This is temporary solution and we could improve the Analyze performance by having module instance pool. diff --git a/pkg/parallel/walk.go b/pkg/parallel/walk.go index 1156560a7212..2797bf98391a 100644 --- a/pkg/parallel/walk.go +++ b/pkg/parallel/walk.go @@ -4,7 +4,6 @@ import ( "context" "io/fs" - "go.uber.org/zap" "golang.org/x/sync/errgroup" "golang.org/x/xerrors" @@ -37,7 +36,7 @@ func WalkDir[T any](ctx context.Context, fsys fs.FS, root string, parallel int, if err != nil { return err } else if info.Size() == 0 { - log.Logger.Debugf("%s is empty, skip this file", path) + log.Debug("Skip the empty file", log.String("file_path", path)) return nil } @@ -106,7 +105,7 @@ func walk[T any](ctx context.Context, fsys fs.FS, path string, c chan T, onFile } res, err := onFile(path, info, rsa) if err != nil { - log.Logger.Debugw("Walk error", zap.String("file_path", path), zap.Error(err)) + log.Debug("Walk error", log.String("file_path", path), log.Err(err)) return nil } diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index f96f02a80c00..cbff3f4d01f7 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -136,7 +136,8 @@ func (p Plugin) selectPlatform() (Platform, error) { selector := platform.Selector if (selector.OS == "" || p.GOOS == selector.OS) && (selector.Arch == "" || p.GOARCH == selector.Arch) { - log.Logger.Debugf("Platform found, os: %s, arch: %s", selector.OS, selector.Arch) + log.Debug("Platform found", + log.String("os", selector.OS), log.String("arch", selector.Arch)) return platform, nil } } @@ -144,13 +145,13 @@ func (p Plugin) selectPlatform() (Platform, error) { } func (p Plugin) install(ctx context.Context, dst, pwd string) error { - log.Logger.Debugf("Installing the plugin to %s...", dst) + log.Debug("Installing the plugin...", log.String("path", dst)) platform, err := p.selectPlatform() if err != nil { return xerrors.Errorf("platform selection error: %w", err) } - log.Logger.Debugf("Downloading the execution file from %s...", platform.URI) + log.Debug("Downloading the execution file...", log.String("uri", platform.URI)) if err = downloader.Download(ctx, platform.URI, dst, pwd); err != nil { return xerrors.Errorf("unable to download the execution file (%s): %w", platform.URI, err) } @@ -181,14 +182,14 @@ func Install(ctx context.Context, url string, force bool) (Plugin, error) { } } - log.Logger.Infof("Installing the plugin from %s...", url) + log.Info("Installing the plugin...", log.String("url", url)) tempDir, err := downloader.DownloadToTempDir(ctx, url) if err != nil { return Plugin{}, xerrors.Errorf("download failed: %w", err) } defer os.RemoveAll(tempDir) - log.Logger.Info("Loading the plugin metadata...") + log.Info("Loading the plugin metadata...") plugin, err := loadMetadata(tempDir) if err != nil { return Plugin{}, xerrors.Errorf("failed to load the plugin metadata: %w", err) @@ -259,16 +260,18 @@ func Update(name string) error { return xerrors.Errorf("plugin load error: %w", err) } - log.Logger.Infof("Updating plugin '%s'", name) + logger := log.With("name", name) + logger.Info("Updating plugin...") updated, err := Install(nil, plugin.Repository, true) if err != nil { return xerrors.Errorf("unable to perform an update installation: %w", err) } if plugin.Version == updated.Version { - log.Logger.Infof("The %s plugin is the latest version. [%s]", name, plugin.Version) + logger.Info("The plugin is up-to-date", log.String("version", plugin.Version)) } else { - log.Logger.Infof("Updated '%s' from %s to %s", name, plugin.Version, updated.Version) + logger.Info("Plugin updated", + log.String("from", plugin.Version), log.String("to", updated.Version)) } return nil } @@ -288,7 +291,7 @@ func LoadAll() ([]Plugin, error) { } plugin, err := loadMetadata(filepath.Join(pluginsDir, d.Name())) if err != nil { - log.Logger.Warnf("plugin load error: %s", err) + log.Warn("Plugin load error", log.Err(err)) continue } plugins = append(plugins, plugin) diff --git a/pkg/policy/policy.go b/pkg/policy/policy.go index 9dc802c8207e..6d7aadfc8e3d 100644 --- a/pkg/policy/policy.go +++ b/pkg/policy/policy.go @@ -91,7 +91,7 @@ func NewClient(cacheDir string, quiet bool, policyBundleRepo string, opts ...Opt func (c *Client) populateOCIArtifact(registryOpts types.RegistryOptions) error { if c.artifact == nil { - log.Logger.Debugf("Using URL: %s to load policy bundle", c.policyBundleRepo) + log.Debug("Loading policy bundle", log.String("repository", c.policyBundleRepo)) art, err := oci.NewArtifact(c.policyBundleRepo, c.quiet, registryOpts) if err != nil { return xerrors.Errorf("OCI artifact error: %w", err) @@ -116,7 +116,7 @@ func (c *Client) DownloadBuiltinPolicies(ctx context.Context, registryOpts types if err != nil { return xerrors.Errorf("digest error: %w", err) } - log.Logger.Debugf("Digest of the built-in policies: %s", digest) + log.Debug("Digest of the built-in policies", log.String("digest", digest)) // Update metadata.json with the new digest and the current date if err = c.updateMetadata(digest, c.clock.Now()); err != nil { @@ -222,14 +222,14 @@ func (c *Client) updateMetadata(digest string, now time.Time) error { func (c *Client) GetMetadata() (*Metadata, error) { f, err := os.Open(c.metadataPath()) if err != nil { - log.Logger.Debugf("Failed to open the policy metadata: %s", err) + log.Debug("Failed to open the policy metadata", log.Err(err)) return nil, err } defer f.Close() var meta Metadata if err = json.NewDecoder(f).Decode(&meta); err != nil { - log.Logger.Warnf("Policy metadata decode error: %s", err) + log.Warn("Policy metadata decode error", log.Err(err)) return nil, err } @@ -237,7 +237,7 @@ func (c *Client) GetMetadata() (*Metadata, error) { } func (c *Client) Clear() error { - log.Logger.Info("Removing policy bundle...") + log.Info("Removing policy bundle...") if err := os.RemoveAll(c.policyDir); err != nil { return xerrors.Errorf("failed to remove policy bundle: %w", err) } diff --git a/pkg/rekor/client.go b/pkg/rekor/client.go index a902c3a6f865..d7b0a35dd781 100644 --- a/pkg/rekor/client.go +++ b/pkg/rekor/client.go @@ -37,9 +37,15 @@ type EntryID struct { func NewEntryID(entryID string) (EntryID, error) { switch len(entryID) { case treeIDLen + uuidLen: - return EntryID{TreeID: entryID[:treeIDLen], UUID: entryID[treeIDLen:]}, nil + return EntryID{ + TreeID: entryID[:treeIDLen], + UUID: entryID[treeIDLen:], + }, nil case uuidLen: - return EntryID{TreeID: "", UUID: entryID}, nil + return EntryID{ + TreeID: "", + UUID: entryID, + }, nil default: return EntryID{}, xerrors.New("invalid Entry ID length") } @@ -71,7 +77,7 @@ func NewClient(rekorURL string) (*Client, error) { } func (c *Client) Search(ctx context.Context, hash string) ([]EntryID, error) { - log.Logger.Debugf("Search for %s in Rekor", hash) + log.Debug("Searching index in Rekor", log.String("hash", hash)) params := index.NewSearchIndexParamsWithContext(ctx).WithQuery(&models.SearchIndex{Hash: hash}) resp, err := c.Index.SearchIndex(params) if err != nil { diff --git a/pkg/remote/remote.go b/pkg/remote/remote.go index 08924704a7d5..b989df18a35b 100644 --- a/pkg/remote/remote.go +++ b/pkg/remote/remote.go @@ -185,7 +185,7 @@ func resolvePlatform(ref name.Reference, p types.Platform, options []remote.Opti switch d.MediaType { case v1types.OCIManifestSchema1, v1types.DockerManifestSchema2: // We want an index but the registry has an image, not multi-arch. We just ignore "--platform". - log.Logger.Debug("Ignore --platform as the image is not multi-arch") + log.Debug("Ignore `--platform` as the image is not multi-arch") return types.Platform{}, nil case v1types.OCIImageIndex, v1types.DockerManifestList: // These are expected. @@ -201,7 +201,7 @@ func resolvePlatform(ref name.Reference, p types.Platform, options []remote.Opti return types.Platform{}, xerrors.Errorf("remote index manifest error: %w", err) } if len(m.Manifests) == 0 { - log.Logger.Debug("Ignore '--platform' as the image is not multi-arch") + log.Debug("Ignore '--platform' as the image is not multi-arch") return types.Platform{}, nil } if m.Manifests[0].Platform != nil { diff --git a/pkg/report/table/vulnerability.go b/pkg/report/table/vulnerability.go index bdfa9bf1af1d..9b85a1f4aab1 100644 --- a/pkg/report/table/vulnerability.go +++ b/pkg/report/table/vulnerability.go @@ -22,7 +22,7 @@ import ( ) var showSuppressedOnce = sync.OnceFunc(func() { - log.Logger.Info(`Some vulnerabilities have been ignored/suppressed. Use the "--show-suppressed" flag to display them.`) + log.Info(`Some vulnerabilities have been ignored/suppressed. Use the "--show-suppressed" flag to display them.`) }) type vulnerabilityRenderer struct { @@ -111,7 +111,7 @@ func (r *vulnerabilityRenderer) setVulnerabilityRows(tw *table.Table, vulns []ty fileName := filepath.Base(pkgPath) lib = fmt.Sprintf("%s (%s)", v.PkgName, fileName) r.once.Do(func() { - log.Logger.Infof("Table result includes only package filenames. Use '--format json' option to get the full path to the package file.") + log.Info("Table result includes only package filenames. Use '--format json' option to get the full path to the package file.") }) } diff --git a/pkg/report/template.go b/pkg/report/template.go index 7a28a65cfbb2..1ebabdb89cfe 100644 --- a/pkg/report/template.go +++ b/pkg/report/template.go @@ -40,7 +40,7 @@ func NewTemplateWriter(output io.Writer, outputTemplate, appVersion string) (*Te templateFuncMap["escapeXML"] = func(input string) string { escaped := &bytes.Buffer{} if err := xml.EscapeText(escaped, []byte(input)); err != nil { - log.Logger.Error("error while escapeString to XML: %s", err) + log.Error("Error while escapeString to XML", log.Err(err)) return input } return escaped.String() diff --git a/pkg/report/writer.go b/pkg/report/writer.go index 274688591151..d732ec397a1f 100644 --- a/pkg/report/writer.go +++ b/pkg/report/writer.go @@ -69,7 +69,7 @@ func Write(ctx context.Context, report types.Report, option flag.Options) (err e case types.FormatTemplate: // We keep `sarif.tpl` template working for backward compatibility for a while. if strings.HasPrefix(option.Template, "@") && strings.HasSuffix(option.Template, "sarif.tpl") { - log.Logger.Warn("Using `--template sarif.tpl` is deprecated. Please migrate to `--format sarif`. See https://github.com/aquasecurity/trivy/discussions/1571") + log.Warn("Using `--template sarif.tpl` is deprecated. Please migrate to `--format sarif`. See https://github.com/aquasecurity/trivy/discussions/1571") writer = &SarifWriter{ Output: output, Version: option.AppVersion, diff --git a/pkg/result/ignore.go b/pkg/result/ignore.go index 25f7d03837d7..941680b3086f 100644 --- a/pkg/result/ignore.go +++ b/pkg/result/ignore.go @@ -92,7 +92,7 @@ func (f *IgnoreFindings) Match(id, path string, pkg *packageurl.PackageURL) *Ign continue } - log.Logger.Debugw("Ignored", log.String("id", id), log.String("path", path)) + log.Debug("Ignored", log.String("id", id), log.String("target", path)) return &finding } @@ -223,7 +223,7 @@ func parseIgnoreYAML(ignoreFile string) (IgnoreConfig, error) { return IgnoreConfig{}, xerrors.Errorf("file open error: %w", err) } defer f.Close() - log.Logger.Debugf("Found an ignore yaml: %s", ignoreFile) + log.Debug("Found an ignore yaml", log.String("path", ignoreFile)) // Parse the YAML content var ignoreConfig IgnoreConfig @@ -239,7 +239,7 @@ func parseIgnore(ignoreFile string) (IgnoreFindings, error) { return nil, xerrors.Errorf("file open error: %w", err) } defer f.Close() - log.Logger.Debugf("Found an ignore file: %s", ignoreFile) + log.Debug("Found an ignore file", log.String("path", ignoreFile)) var ignoredFindings IgnoreFindings scanner := bufio.NewScanner(f) @@ -255,7 +255,7 @@ func parseIgnore(ignoreFile string) (IgnoreFindings, error) { if len(fields) > 1 { exp, err = getExpirationDate(fields) if err != nil { - log.Logger.Warnf("Error while parsing expiration date in .trivyignore file: %s", err) + log.Warn("Error while parsing expiration date in .trivyignore file", log.Err(err)) continue } } diff --git a/pkg/rpc/client/headers.go b/pkg/rpc/client/headers.go index 463fb60149a4..0ebac7bf046e 100644 --- a/pkg/rpc/client/headers.go +++ b/pkg/rpc/client/headers.go @@ -14,7 +14,7 @@ func WithCustomHeaders(ctx context.Context, customHeaders http.Header) context.C // Attach the headers to a context ctxWithToken, err := twirp.WithHTTPRequestHeaders(ctx, customHeaders) if err != nil { - log.Logger.Warnf("twirp error setting headers: %s", err) + log.Warn("twirp error setting headers", log.Err(err)) return ctx } return ctxWithToken diff --git a/pkg/rpc/convert.go b/pkg/rpc/convert.go index 4edec4f1f24e..5e21e4e1ce4c 100644 --- a/pkg/rpc/convert.go +++ b/pkg/rpc/convert.go @@ -107,7 +107,7 @@ func ConvertToRPCCustomResources(resources []ftypes.CustomResource) []*common.Cu for _, r := range resources { data, err := structpb.NewValue(r.Data) if err != nil { - log.Logger.Warn(err) + log.Warn("Custom resource conversion error", log.Err(err)) } rpcResources = append(rpcResources, &common.CustomResource{ Type: r.Type, @@ -242,7 +242,7 @@ func ConvertFromRPCPkgIdentifier(pkg *common.PkgIdentifier) ftypes.PkgIdentifier if pkg.Purl != "" { pu, err := packageurl.FromString(pkg.Purl) if err != nil { - log.Logger.Error("Failed to parse PURL (%s): %s", pkg.Purl, err) + log.Error("Failed to parse PURL", log.String("purl", pkg.Purl), log.Err(err)) } pkgID.PURL = &pu } @@ -267,7 +267,7 @@ func ConvertToRPCVulns(vulns []types.DetectedVulnerability) []*common.Vulnerabil for _, vuln := range vulns { severity, err := dbTypes.NewSeverity(vuln.Severity) if err != nil { - log.Logger.Warn(err) + log.Warn("Severity error", log.Err(err)) } cvssMap := make(map[string]*common.CVSS) // This is needed because protobuf generates a map[string]*CVSS type for vendor, vendorSeverity := range vuln.CVSS { @@ -336,7 +336,7 @@ func ConvertToRPCMisconfs(misconfs []types.DetectedMisconfiguration) []*common.D for _, m := range misconfs { severity, err := dbTypes.NewSeverity(m.Severity) if err != nil { - log.Logger.Warn(err) + log.Warn("Severity conversion error", log.Err(err)) } rpcMisconfs = append(rpcMisconfs, &common.DetectedMisconfiguration{ @@ -834,7 +834,7 @@ func ConvertToRPCArtifactInfo(imageID string, imageInfo ftypes.ArtifactInfo) *ca t := timestamppb.New(imageInfo.Created) if err := t.CheckValid(); err != nil { - log.Logger.Warnf("invalid timestamp: %s", err) + log.Warn("Invalid timestamp", log.Err(err)) } return &cache.PutArtifactRequest{ @@ -973,7 +973,7 @@ func ConvertToRPCLicenses(licenses []types.DetectedLicense) []*common.DetectedLi for _, l := range licenses { severity, err := dbTypes.NewSeverity(l.Severity) if err != nil { - log.Logger.Warn(err) + log.Warn("Severity conversion error", log.Err(err)) } rpcLicenses = append(rpcLicenses, &common.DetectedLicense{ Severity: common.Severity(severity), diff --git a/pkg/rpc/retry.go b/pkg/rpc/retry.go index 1c807a04b465..31a7760cb770 100644 --- a/pkg/rpc/retry.go +++ b/pkg/rpc/retry.go @@ -32,8 +32,8 @@ func Retry(f func() error) error { b := backoff.WithMaxRetries(backoff.NewExponentialBackOff(), maxRetries) err := backoff.RetryNotify(operation, b, func(err error, _ time.Duration) { - log.Logger.Warn(err) - log.Logger.Info("Retrying HTTP request...") + log.Warn("HTTP error", log.Err(err)) + log.Info("Retrying HTTP request...") }) if err != nil { return err diff --git a/pkg/rpc/server/listen.go b/pkg/rpc/server/listen.go index 7433bf20a560..802afe68ae3d 100644 --- a/pkg/rpc/server/listen.go +++ b/pkg/rpc/server/listen.go @@ -63,13 +63,13 @@ func (s Server) ListenAndServe(ctx context.Context, serverCache cache.Cache, ski for { time.Sleep(updateInterval) if err := worker.update(ctx, s.appVersion, s.cacheDir, skipDBUpdate, dbUpdateWg, requestWg, s.RegistryOptions); err != nil { - log.Logger.Errorf("%+v\n", err) + log.Errorf("%+v\n", err) } } }() mux := newServeMux(ctx, serverCache, dbUpdateWg, requestWg, s.token, s.tokenHeader, s.cacheDir) - log.Logger.Infof("Listening %s...", s.addr) + log.Infof("Listening %s...", s.addr) return http.ListenAndServe(s.addr, mux) } @@ -102,7 +102,7 @@ func newServeMux(ctx context.Context, serverCache cache.Cache, dbUpdateWg, reque mux.HandleFunc("/healthz", func(rw http.ResponseWriter, r *http.Request) { if _, err := rw.Write([]byte("ok")); err != nil { - log.Logger.Errorf("health check error: %s", err) + log.Error("Health check error", log.Err(err)) } }) @@ -110,7 +110,7 @@ func newServeMux(ctx context.Context, serverCache cache.Cache, dbUpdateWg, reque w.Header().Add("Content-Type", "application/json") if err := json.NewEncoder(w).Encode(version.NewVersionInfo(cacheDir)); err != nil { - log.Logger.Errorf("get version error: %s", err) + log.Error("Version error", log.Err(err)) } }) @@ -137,7 +137,7 @@ func newDBWorker(dbClient dbc.Operation) dbWorker { func (w dbWorker) update(ctx context.Context, appVersion, cacheDir string, skipDBUpdate bool, dbUpdateWg, requestWg *sync.WaitGroup, opt types.RegistryOptions) error { - log.Logger.Debug("Check for DB update...") + log.Debug("Check for DB update...") needsUpdate, err := w.dbClient.NeedsUpdate(appVersion, skipDBUpdate) if err != nil { return xerrors.Errorf("failed to check if db needs an update") @@ -145,7 +145,7 @@ func (w dbWorker) update(ctx context.Context, appVersion, cacheDir string, return nil } - log.Logger.Info("Updating DB...") + log.Info("Updating DB...") if err = w.hotUpdate(ctx, cacheDir, dbUpdateWg, requestWg, opt); err != nil { return xerrors.Errorf("failed DB hot update: %w", err) } @@ -163,11 +163,11 @@ func (w dbWorker) hotUpdate(ctx context.Context, cacheDir string, dbUpdateWg, re return xerrors.Errorf("failed to download vulnerability DB: %w", err) } - log.Logger.Info("Suspending all requests during DB update") + log.Info("Suspending all requests during DB update") dbUpdateWg.Add(1) defer dbUpdateWg.Done() - log.Logger.Info("Waiting for all requests to be processed before DB update...") + log.Info("Waiting for all requests to be processed before DB update...") requestWg.Wait() if err = db.Close(); err != nil { @@ -184,7 +184,7 @@ func (w dbWorker) hotUpdate(ctx context.Context, cacheDir string, dbUpdateWg, re return xerrors.Errorf("failed to copy the metadata file: %w", err) } - log.Logger.Info("Reopening DB...") + log.Info("Reopening DB...") if err = db.Init(cacheDir); err != nil { return xerrors.Errorf("failed to open DB: %w", err) } diff --git a/pkg/rpc/server/server.go b/pkg/rpc/server/server.go index d4cb91294def..051eab982255 100644 --- a/pkg/rpc/server/server.go +++ b/pkg/rpc/server/server.go @@ -37,7 +37,7 @@ func NewScanServer(s scanner.Driver) *ScanServer { // Log and return an error func teeError(err error) error { - log.Logger.Errorf("%+v", err) + log.Errorf("%+v", err) return err } diff --git a/pkg/sbom/cyclonedx/marshal.go b/pkg/sbom/cyclonedx/marshal.go index 684b1b7d235d..9465b790bdd2 100644 --- a/pkg/sbom/cyclonedx/marshal.go +++ b/pkg/sbom/cyclonedx/marshal.go @@ -268,7 +268,7 @@ func (*Marshaler) Hashes(files []core.File) *[]cdx.Hash { case digest.MD5: alg = cdx.HashAlgoMD5 default: - log.Logger.Debugf("Unable to convert %q algorithm to CycloneDX format", d.Algorithm()) + log.Debug("Unable to convert algorithm to CycloneDX format", log.Any("alg", d.Algorithm())) continue } @@ -390,7 +390,7 @@ func (*Marshaler) cwes(cweIDs []string) *[]int { for _, cweID := range cweIDs { number, err := strconv.Atoi(strings.TrimPrefix(strings.ToLower(cweID), "cwe-")) if err != nil { - log.Logger.Debugf("cwe id parse error: %s", err) + log.Debug("CWE-ID parse error", log.Err(err)) continue } ret = append(ret, number) diff --git a/pkg/sbom/cyclonedx/unmarshal.go b/pkg/sbom/cyclonedx/unmarshal.go index 8821fe8b111a..9450a78a455c 100644 --- a/pkg/sbom/cyclonedx/unmarshal.go +++ b/pkg/sbom/cyclonedx/unmarshal.go @@ -9,7 +9,6 @@ import ( cdx "github.com/CycloneDX/cyclonedx-go" "github.com/package-url/packageurl-go" "github.com/samber/lo" - "go.uber.org/zap" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/digest" @@ -35,7 +34,7 @@ func DecodeJSON(r io.Reader) (*cdx.BOM, error) { } func (b *BOM) UnmarshalJSON(data []byte) error { - log.Logger.Debug("Unmarshalling CycloneDX JSON...") + log.Debug("Unmarshalling CycloneDX JSON...") if b.BOM == nil { b.BOM = core.NewBOM(core.Options{GenerateBOMRef: true}) } @@ -46,8 +45,8 @@ func (b *BOM) UnmarshalJSON(data []byte) error { } if !IsTrivySBOM(cdxBOM) { - log.Logger.Warnf("Third-party SBOM may lead to inaccurate vulnerability detection") - log.Logger.Warnf("Recommend using Trivy to generate SBOMs") + log.Warn("Third-party SBOM may lead to inaccurate vulnerability detection") + log.Warn("Recommend using Trivy to generate SBOMs") } if err = b.parseBOM(cdxBOM); err != nil { @@ -108,11 +107,11 @@ func (b *BOM) parseComponents(cdxComponents *[]cdx.Component) map[string]*core.C for _, component := range lo.FromPtr(cdxComponents) { c, err := b.parseComponent(component) if errors.Is(err, ErrUnsupportedType) { - log.Logger.Infow("Skipping the component with the unsupported type", - zap.String("bom-ref", component.BOMRef), zap.String("type", string(component.Type))) + log.Info("Skipping the component with the unsupported type", + log.String("bom-ref", component.BOMRef), log.String("type", string(component.Type))) continue } else if err != nil { - log.Logger.Warnw("Failed to parse component: %s", zap.Error(err)) + log.Warn("Failed to parse component", log.Err(err)) continue } @@ -217,7 +216,7 @@ func (b *BOM) unmarshalHashes(hashes *[]cdx.Hash) []digest.Digest { case cdx.HashAlgoMD5: alg = digest.MD5 default: - log.Logger.Warnf("Unsupported hash algorithm: %s", h.Algorithm) + log.Warn("Unsupported hash algorithm", log.String("algorithm", string(h.Algorithm))) } digests = append(digests, digest.NewDigestFromString(alg, h.Value)) } diff --git a/pkg/sbom/io/decode.go b/pkg/sbom/io/decode.go index af61f41b5a8c..3ebc06d30362 100644 --- a/pkg/sbom/io/decode.go +++ b/pkg/sbom/io/decode.go @@ -9,7 +9,6 @@ import ( debver "github.com/knqyf263/go-deb-version" rpmver "github.com/knqyf263/go-rpm-version" "github.com/package-url/packageurl-go" - "go.uber.org/zap" "golang.org/x/exp/maps" "golang.org/x/xerrors" @@ -178,15 +177,15 @@ func (m *Decoder) decodeApplication(c *core.Component) *ftypes.Application { func (m *Decoder) decodeLibrary(c *core.Component) (*ftypes.Package, error) { p := (*purl.PackageURL)(c.PkgID.PURL) if p == nil { - log.Logger.Debugw("Skipping a component without PURL", - zap.String("name", c.Name), zap.String("version", c.Version)) + log.Debug("Skipping a component without PURL", + log.String("name", c.Name), log.String("version", c.Version)) return nil, ErrPURLEmpty } pkg := p.Package() if p.Class() == types.ClassUnknown { - log.Logger.Debugw("Skipping a component with an unsupported type", - zap.String("name", c.Name), zap.String("version", c.Version), zap.String("type", p.Type)) + log.Debug("Skipping a component with an unsupported type", + log.String("name", c.Name), log.String("version", c.Version), log.String("type", p.Type)) return nil, ErrUnsupportedType } pkg.Name = m.pkgName(pkg, c) @@ -292,7 +291,7 @@ func (m *Decoder) parseSrcVersion(pkg *ftypes.Package, ver string) { case packageurl.TypeDebian: v, err := debver.NewVersion(ver) if err != nil { - log.Logger.Debugw("Failed to parse Debian version", zap.Error(err)) + log.Debug("Failed to parse Debian version", log.Err(err)) return } pkg.SrcEpoch = v.Epoch() @@ -356,7 +355,7 @@ func (m *Decoder) addOrphanPkgs(sbom *types.SBOM) error { // Add OS packages only when OS is detected. for _, pkgs := range osPkgMap { if sbom.Metadata.OS == nil || !sbom.Metadata.OS.Detected() { - log.Logger.Warn("Ignore the OS package as no OS is detected.") + log.Warn("Ignore the OS package as no OS is detected.") break } diff --git a/pkg/sbom/spdx/marshal.go b/pkg/sbom/spdx/marshal.go index 6c1490fe1aec..3f72c6d69a20 100644 --- a/pkg/sbom/spdx/marshal.go +++ b/pkg/sbom/spdx/marshal.go @@ -531,7 +531,7 @@ func NormalizeLicense(licenses []string) string { s, err := expression.Normalize(license, licensing.Normalize, expression.NormalizeForSPDX) if err != nil { // Not fail on the invalid license - log.Logger.Warnf("Unable to marshal SPDX licenses %q", license) + log.Warn("Unable to marshal SPDX licenses", log.String("license", license)) return "" } return s diff --git a/pkg/scanner/langpkg/scan.go b/pkg/scanner/langpkg/scan.go index 9480c8a13614..a0ea1de5e68e 100644 --- a/pkg/scanner/langpkg/scan.go +++ b/pkg/scanner/langpkg/scan.go @@ -52,7 +52,7 @@ func (s *scanner) Packages(target types.ScanTarget, _ types.ScanOptions) types.R func (s *scanner) Scan(target types.ScanTarget, _ types.ScanOptions) (types.Results, error) { apps := target.Applications - log.Logger.Infof("Number of language-specific files: %d", len(apps)) + log.Info("Number of language-specific files", log.Int("num", len(apps))) if len(apps) == 0 { return nil, nil } @@ -64,13 +64,15 @@ func (s *scanner) Scan(target types.ScanTarget, _ types.ScanOptions) (types.Resu continue } + logger := log.WithPrefix(string(app.Type)) + // Prevent the same log messages from being displayed many times for the same type. if _, ok := printedTypes[app.Type]; !ok { - log.Logger.Infof("Detecting %s vulnerabilities...", app.Type) + logger.Info("Detecting vulnerabilities...") printedTypes[app.Type] = struct{}{} } - log.Logger.Debugf("Detecting library vulnerabilities, type: %s, path: %s", app.Type, app.FilePath) + logger.Debug("Scanning packages from the file", log.String("file_path", app.FilePath)) vulns, err := library.Detect(app.Type, app.Libraries) if err != nil { return nil, xerrors.Errorf("failed vulnerability detection of libraries: %w", err) diff --git a/pkg/scanner/local/scan.go b/pkg/scanner/local/scan.go index b5437549c5cc..19f673f54158 100644 --- a/pkg/scanner/local/scan.go +++ b/pkg/scanner/local/scan.go @@ -63,7 +63,7 @@ func (s Scanner) Scan(ctx context.Context, targetName, artifactKey string, blobK detail, err := s.applier.ApplyLayers(artifactKey, blobKeys) switch { case errors.Is(err, analyzer.ErrUnknownOS): - log.Logger.Debug("OS is not detected.") + log.Debug("OS is not detected.") // Packages may contain OS-independent binary information even though OS is not detected. if len(detail.Packages) != 0 { @@ -72,16 +72,18 @@ func (s Scanner) Scan(ctx context.Context, targetName, artifactKey string, blobK // If OS is not detected and repositories are detected, we'll try to use repositories as OS. if detail.Repository != nil { - log.Logger.Debugf("Package repository: %s %s", detail.Repository.Family, detail.Repository.Release) - log.Logger.Debugf("Assuming OS is %s %s.", detail.Repository.Family, detail.Repository.Release) + log.Debug("Package repository", log.String("family", string(detail.Repository.Family)), + log.String("version", detail.Repository.Release)) + log.Debug("Assuming OS", log.String("family", string(detail.Repository.Family)), + log.String("version", detail.Repository.Release)) detail.OS = ftypes.OS{ Family: detail.Repository.Family, Name: detail.Repository.Release, } } case errors.Is(err, analyzer.ErrNoPkgsDetected): - log.Logger.Warn("No OS package is detected. Make sure you haven't deleted any files that contain information about the installed packages.") - log.Logger.Warn(`e.g. files under "/lib/apk/db/", "/var/lib/dpkg/" and "/var/lib/rpm"`) + log.Warn("No OS package is detected. Make sure you haven't deleted any files that contain information about the installed packages.") + log.Warn(`e.g. files under "/lib/apk/db/", "/var/lib/dpkg/" and "/var/lib/rpm"`) case err != nil: return nil, ftypes.OS{}, xerrors.Errorf("failed to apply layers: %w", err) } @@ -222,10 +224,10 @@ func (s Scanner) misconfsToResults(misconfs []ftypes.Misconfiguration, options t // MisconfsToResults is exported for trivy-plugin-aqua purposes only func (s Scanner) MisconfsToResults(misconfs []ftypes.Misconfiguration) types.Results { - log.Logger.Infof("Detected config files: %d", len(misconfs)) + log.Info("Detected config files", log.Int("num", len(misconfs))) var results types.Results for _, misconf := range misconfs { - log.Logger.Debugf("Scanned config file: %s", misconf.FilePath) + log.Debug("Scanned config file", log.String("path", misconf.FilePath)) var detected []types.DetectedMisconfiguration @@ -264,7 +266,7 @@ func (s Scanner) secretsToResults(secrets []ftypes.Secret, options types.ScanOpt var results types.Results for _, secret := range secrets { - log.Logger.Debugf("Secret file: %s", secret.FilePath) + log.Debug("Secret file", log.String("path", secret.FilePath)) results = append(results, types.Result{ Target: secret.FilePath, @@ -367,7 +369,7 @@ func toDetectedMisconfiguration(res ftypes.MisconfResult, defaultSeverity dbType severity := defaultSeverity sev, err := dbTypes.NewSeverity(res.Severity) if err != nil { - log.Logger.Warnf("severity must be %s, but %s", dbTypes.SeverityNames, res.Severity) + log.Warn("Unsupported severity", log.String("severity", res.Severity)) } else { severity = sev } @@ -429,7 +431,7 @@ func excludeDevDeps(apps []ftypes.Application, include bool) { } onceInfo := sync.OnceFunc(func() { - log.Logger.Info("Suppressing dependencies for development and testing. To display them, try the '--include-dev-deps' flag.") + log.Info("Suppressing dependencies for development and testing. To display them, try the '--include-dev-deps' flag.") }) for i := range apps { apps[i].Libraries = lo.Filter(apps[i].Libraries, func(lib ftypes.Package, index int) bool { diff --git a/pkg/scanner/ospkg/scan.go b/pkg/scanner/ospkg/scan.go index ebc94b1dab9c..8edfc1b1d786 100644 --- a/pkg/scanner/ospkg/scan.go +++ b/pkg/scanner/ospkg/scan.go @@ -41,10 +41,11 @@ func (s *scanner) Packages(target types.ScanTarget, _ types.ScanOptions) types.R func (s *scanner) Scan(ctx context.Context, target types.ScanTarget, _ types.ScanOptions) (types.Result, bool, error) { if !target.OS.Detected() { - log.Logger.Debug("Detected OS: unknown") + log.Debug("Detected OS: unknown") return types.Result{}, false, nil } - log.Logger.Infof("Detected OS: %s", target.OS.Family) + log.Info("Detected OS", log.String("family", + string(target.OS.Family)), log.String("version", target.OS.Name)) if target.OS.Extended { // TODO: move the logic to each detector diff --git a/pkg/scanner/scan.go b/pkg/scanner/scan.go index 4cf647b66d13..4964ef1f2ad5 100644 --- a/pkg/scanner/scan.go +++ b/pkg/scanner/scan.go @@ -149,7 +149,8 @@ func (s Scanner) ScanArtifact(ctx context.Context, options types.ScanOptions) (t } defer func() { if err := s.artifact.Clean(artifactInfo); err != nil { - log.Logger.Warnf("Failed to clean the artifact %q: %v", artifactInfo.Name, err) + log.Warn("Failed to clean the artifact", + log.String("artifact", artifactInfo.Name), log.Err(err)) } }() @@ -160,8 +161,9 @@ func (s Scanner) ScanArtifact(ctx context.Context, options types.ScanOptions) (t ptros := &osFound if osFound.Detected() && osFound.Eosl { - log.Logger.Warnf("This OS version is no longer supported by the distribution: %s %s", osFound.Family, osFound.Name) - log.Logger.Warnf("The vulnerability detection may be insufficient because security updates are not provided") + log.Warn("This OS version is no longer supported by the distribution", + log.String("family", string(osFound.Family)), log.String("version", osFound.Name)) + log.Warn("The vulnerability detection may be insufficient because security updates are not provided") } else if !osFound.Detected() { ptros = nil } diff --git a/pkg/utils/fsutils/fs.go b/pkg/utils/fsutils/fs.go index 8e15a575a753..915581f08ad9 100644 --- a/pkg/utils/fsutils/fs.go +++ b/pkg/utils/fsutils/fs.go @@ -7,7 +7,6 @@ import ( "os" "path/filepath" - "go.uber.org/zap" "golang.org/x/exp/slices" "golang.org/x/xerrors" @@ -104,7 +103,7 @@ func WalkDir(fsys fs.FS, root string, required WalkDirRequiredFunc, fn WalkDirFu defer f.Close() if err = fn(path, d, f); err != nil { - log.Logger.Debugw("Walk error", zap.String("file_path", path), zap.Error(err)) + log.Debug("Walk error", log.String("file_path", path), log.Err(err)) } return nil }) diff --git a/pkg/version/version.go b/pkg/version/version.go index 421fff6f1e6e..54914c563c87 100644 --- a/pkg/version/version.go +++ b/pkg/version/version.go @@ -55,7 +55,7 @@ func NewVersionInfo(cacheDir string) VersionInfo { mc := metadata.NewClient(cacheDir) meta, err := mc.Get() if err != nil { - log.Logger.Debugw("Failed to get DB metadata", "error", err) + log.Debug("Failed to get DB metadata", log.Err(err)) } if !meta.UpdatedAt.IsZero() && !meta.NextUpdate.IsZero() && meta.Version != 0 { dbMeta = &metadata.Metadata{ @@ -69,7 +69,7 @@ func NewVersionInfo(cacheDir string) VersionInfo { mcJava := javadb.NewMetadata(filepath.Join(cacheDir, "java-db")) metaJava, err := mcJava.Get() if err != nil { - log.Logger.Debugw("Failed to get Java DB metadata", "error", err) + log.Debug("Failed to get Java DB metadata", log.Err(err)) } if !metaJava.UpdatedAt.IsZero() && !metaJava.NextUpdate.IsZero() && metaJava.Version != 0 { javadbMeta = &metadata.Metadata{ @@ -83,13 +83,13 @@ func NewVersionInfo(cacheDir string) VersionInfo { var pbMeta *policy.Metadata pc, err := policy.NewClient(cacheDir, false, "") if err != nil { - log.Logger.Debugw("Failed to instantiate policy client", "error", err) + log.Debug("Failed to instantiate policy client", log.Err(err)) } if pc != nil && err == nil { pbMetaRaw, err := pc.GetMetadata() if err != nil { - log.Logger.Debugw("Failed to get policy metadata", "error", err) + log.Debug("Failed to get policy metadata", log.Err(err)) } else { pbMeta = &policy.Metadata{ Digest: pbMetaRaw.Digest, diff --git a/pkg/vex/csaf.go b/pkg/vex/csaf.go index d5d68f76adb9..3e43503b042b 100644 --- a/pkg/vex/csaf.go +++ b/pkg/vex/csaf.go @@ -4,7 +4,6 @@ import ( csaf "github.com/csaf-poc/csaf_distribution/v3/csaf" "github.com/package-url/packageurl-go" "github.com/samber/lo" - "go.uber.org/zap" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/purl" @@ -14,13 +13,13 @@ import ( type CSAF struct { advisory csaf.Advisory - logger *zap.SugaredLogger + logger *log.Logger } func newCSAF(advisory csaf.Advisory) VEX { return &CSAF{ advisory: advisory, - logger: log.Logger.With(zap.String("VEX format", "CSAF")), + logger: log.WithPrefix("vex").With(log.String("format", "CSAF")), } } @@ -63,17 +62,17 @@ func (v *CSAF) match(vuln *csaf.Vulnerability, pkgURL *packageurl.PackageURL) ty for status, productRange := range productStatusMap { for _, product := range productRange { if matchProduct(v.getProductPurls(lo.FromPtr(product)), pkgURL) { - v.logger.Infow("Filtered out the detected vulnerability", - zap.String("vulnerability-id", string(*vuln.CVE)), - zap.String("status", string(status))) + v.logger.Info("Filtered out the detected vulnerability", + log.String("vulnerability-id", string(*vuln.CVE)), + log.String("status", string(status))) return status } for relationship, purls := range v.inspectProductRelationships(lo.FromPtr(product)) { if matchProduct(purls, pkgURL) { - v.logger.Warnw("Filtered out the detected vulnerability", - zap.String("vulnerability-id", string(*vuln.CVE)), - zap.String("status", string(status)), - zap.String("relationship", string(relationship))) + v.logger.Warn("Filtered out the detected vulnerability", + log.String("vulnerability-id", string(*vuln.CVE)), + log.String("status", string(status)), + log.String("relationship", string(relationship))) return status } } @@ -130,7 +129,7 @@ func purlsFromProductIdentificationHelpers(helpers []*csaf.ProductIdentification } p, err := purl.FromString(string(*helper.PURL)) if err != nil { - log.Logger.Errorw("Invalid PURL", zap.String("purl", string(*helper.PURL)), zap.Error(err)) + log.Error("Invalid PURL", log.String("purl", string(*helper.PURL)), log.Err(err)) return nil, false } return p, true diff --git a/pkg/vex/cyclonedx.go b/pkg/vex/cyclonedx.go index 685fefebf304..7bee16d32c81 100644 --- a/pkg/vex/cyclonedx.go +++ b/pkg/vex/cyclonedx.go @@ -3,7 +3,6 @@ package vex import ( cdx "github.com/CycloneDX/cyclonedx-go" "github.com/samber/lo" - "go.uber.org/zap" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/sbom/core" @@ -13,7 +12,7 @@ import ( type CycloneDX struct { sbom *core.BOM statements []Statement - logger *zap.SugaredLogger + logger *log.Logger } type Statement struct { @@ -41,7 +40,7 @@ func newCycloneDX(sbom *core.BOM, vex *cdx.BOM) *CycloneDX { return &CycloneDX{ sbom: sbom, statements: stmts, - logger: log.Logger.With(zap.String("VEX format", "CycloneDX")), + logger: log.WithPrefix("vex").With(log.String("format", "CycloneDX")), } } @@ -67,13 +66,13 @@ func (v *CycloneDX) affected(vuln types.DetectedVulnerability, stmt Statement) b // Affect must be BOM-Link at the moment link, err := cdx.ParseBOMLink(affect) if err != nil { - v.logger.Warnw("Unable to parse BOM-Link", zap.String("affect", affect)) + v.logger.Warn("Unable to parse BOM-Link", log.String("affect", affect)) continue } if v.sbom.SerialNumber != link.SerialNumber() || v.sbom.Version != link.Version() { - v.logger.Warnw("URN doesn't match with SBOM", - zap.String("serial number", link.SerialNumber()), - zap.Int("version", link.Version())) + v.logger.Warn("URN doesn't match with SBOM", + log.String("serial number", link.SerialNumber()), + log.Int("version", link.Version())) continue } if vuln.PkgIdentifier.Match(link.Reference()) && (stmt.Status == types.FindingStatusNotAffected || stmt.Status == types.FindingStatusFixed) { diff --git a/pkg/vulnerability/vulnerability.go b/pkg/vulnerability/vulnerability.go index 504d9293e873..56dfb7e1f1b3 100644 --- a/pkg/vulnerability/vulnerability.go +++ b/pkg/vulnerability/vulnerability.go @@ -72,7 +72,7 @@ func (c Client) FillInfo(vulns []types.DetectedVulnerability) { vulnID := vulns[i].VulnerabilityID vuln, err := c.dbc.GetVulnerability(vulnID) if err != nil { - log.Logger.Warnf("Error while getting vulnerability details: %s", err) + log.Warn("Error while getting vulnerability details", log.Err(err)) continue }