fix: reduce warn levels to debug for non-actionable errors (#3645)

Signed-off-by: Keith Zantow <kzantow@gmail.com>
This commit is contained in:
Keith Zantow 2025-02-07 13:22:55 -05:00 committed by GitHub
parent 52d543f3c1
commit 2328b20082
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
42 changed files with 85 additions and 85 deletions

View File

@ -61,7 +61,7 @@ type backgroundLineReader struct {
func (m *Handler) handleAttestationStarted(e partybus.Event) []tea.Model { func (m *Handler) handleAttestationStarted(e partybus.Event) []tea.Model {
reader, prog, taskInfo, err := syftEventParsers.ParseAttestationStartedEvent(e) reader, prog, taskInfo, err := syftEventParsers.ParseAttestationStartedEvent(e)
if err != nil { if err != nil {
log.WithFields("error", err).Warn("unable to parse event") log.WithFields("error", err).Debug("unable to parse event")
return nil return nil
} }

View File

@ -89,7 +89,7 @@ func (cts catalogerTaskModel) View() string {
func (m *Handler) handleCatalogerTaskStarted(e partybus.Event) ([]tea.Model, tea.Cmd) { func (m *Handler) handleCatalogerTaskStarted(e partybus.Event) ([]tea.Model, tea.Cmd) {
mon, info, err := syftEventParsers.ParseCatalogerTaskStarted(e) mon, info, err := syftEventParsers.ParseCatalogerTaskStarted(e)
if err != nil { if err != nil {
log.WithFields("error", err).Warn("unable to parse event") log.WithFields("error", err).Debug("unable to parse event")
return nil, nil return nil, nil
} }

View File

@ -12,7 +12,7 @@ import (
func (m *Handler) handleFetchImage(e partybus.Event) []tea.Model { func (m *Handler) handleFetchImage(e partybus.Event) []tea.Model {
imgName, prog, err := stereoEventParsers.ParseFetchImage(e) imgName, prog, err := stereoEventParsers.ParseFetchImage(e)
if err != nil { if err != nil {
log.WithFields("error", err).Warn("unable to parse event") log.WithFields("error", err).Debug("unable to parse event")
return nil return nil
} }

View File

@ -12,7 +12,7 @@ import (
func (m *Handler) handleFileIndexingStarted(e partybus.Event) []tea.Model { func (m *Handler) handleFileIndexingStarted(e partybus.Event) []tea.Model {
path, prog, err := syftEventParsers.ParseFileIndexingStarted(e) path, prog, err := syftEventParsers.ParseFileIndexingStarted(e)
if err != nil { if err != nil {
log.WithFields("error", err).Warn("unable to parse event") log.WithFields("error", err).Debug("unable to parse event")
return nil return nil
} }

View File

@ -43,7 +43,7 @@ type containerdPullStatusFormatter struct {
func (m *Handler) handlePullContainerdImage(e partybus.Event) []tea.Model { func (m *Handler) handlePullContainerdImage(e partybus.Event) []tea.Model {
_, pullStatus, err := stereoscopeParsers.ParsePullContainerdImage(e) _, pullStatus, err := stereoscopeParsers.ParsePullContainerdImage(e)
if err != nil { if err != nil {
log.WithFields("error", err).Warn("unable to parse event") log.WithFields("error", err).Debug("unable to parse event")
return nil return nil
} }

View File

@ -44,7 +44,7 @@ type dockerPullStatusFormatter struct {
func (m *Handler) handlePullDockerImage(e partybus.Event) []tea.Model { func (m *Handler) handlePullDockerImage(e partybus.Event) []tea.Model {
_, pullStatus, err := stereoscopeParsers.ParsePullDockerImage(e) _, pullStatus, err := stereoscopeParsers.ParsePullDockerImage(e)
if err != nil { if err != nil {
log.WithFields("error", err).Warn("unable to parse event") log.WithFields("error", err).Debug("unable to parse event")
return nil return nil
} }

View File

@ -12,7 +12,7 @@ import (
func (m *Handler) handleReadImage(e partybus.Event) []tea.Model { func (m *Handler) handleReadImage(e partybus.Event) []tea.Model {
imgMetadata, prog, err := stereoEventParsers.ParseReadImage(e) imgMetadata, prog, err := stereoEventParsers.ParseReadImage(e)
if err != nil { if err != nil {
log.WithFields("error", err).Warn("unable to parse event") log.WithFields("error", err).Debug("unable to parse event")
return nil return nil
} }

View File

@ -150,7 +150,7 @@ func (m *UI) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
} }
case partybus.Event: case partybus.Event:
log.WithFields("component", "ui").Tracef("event: %q", msg.Type) log.WithFields("component", "ui", "event", msg.Type).Trace("event")
switch msg.Type { switch msg.Type {
case event.CLIReport, event.CLINotification, event.CLIAppUpdateAvailable: case event.CLIReport, event.CLINotification, event.CLIAppUpdateAvailable:

View File

@ -12,7 +12,7 @@ import (
// CloseAndLogError closes the given io.Closer and reports any errors found as a warning in the log // CloseAndLogError closes the given io.Closer and reports any errors found as a warning in the log
func CloseAndLogError(closer io.Closer, location string) { func CloseAndLogError(closer io.Closer, location string) {
if err := closer.Close(); err != nil { if err := closer.Close(); err != nil {
log.Warnf("unable to close file for location=%q: %+v", location, err) log.Debugf("unable to close file for location=%q: %+v", location, err)
} }
} }

View File

@ -101,7 +101,7 @@ func finalizePkgCatalogerResults(cfg CatalogingFactoryConfig, resolver file.Path
// create file-to-package relationships for files owned by the package // create file-to-package relationships for files owned by the package
owningRelationships, err := packageFileOwnershipRelationships(p, resolver) owningRelationships, err := packageFileOwnershipRelationships(p, resolver)
if err != nil { if err != nil {
log.Warnf("unable to create any package-file relationships for package name=%q type=%q: %v", p.Name, p.Type, err) log.Debugf("unable to create any package-file relationships for package name=%q type=%q: %v", p.Name, p.Type, err)
} else { } else {
relationships = append(relationships, owningRelationships...) relationships = append(relationships, owningRelationships...)
} }
@ -174,7 +174,7 @@ func applyComplianceRules(p *pkg.Package, cfg cataloging.ComplianceConfig) (bool
return true return true
case cataloging.ComplianceActionKeep: case cataloging.ComplianceActionKeep:
log.WithFields("pkg", p.String(), "location", loc).Tracef("package with missing %s, taking no action", fieldName) log.WithFields("pkg", p.String(), "location", loc, "field", fieldName).Trace("package with missing field, taking no action")
} }
return false return false
} }

View File

@ -23,7 +23,7 @@ func ProcessPathErrors(err error) error {
} }
} }
unknowns, remainingErrors := ExtractCoordinateErrors(err) unknowns, remainingErrors := ExtractCoordinateErrors(err)
log.Warn(remainingErrors) log.Debug(remainingErrors)
var out []error var out []error
for _, u := range unknowns { for _, u := range unknowns {

View File

@ -86,14 +86,14 @@ func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]file.E
func processExecutableLocation(loc file.Location, resolver file.Resolver) (*file.Executable, error) { func processExecutableLocation(loc file.Location, resolver file.Resolver) (*file.Executable, error) {
reader, err := resolver.FileContentsByLocation(loc) reader, err := resolver.FileContentsByLocation(loc)
if err != nil { if err != nil {
log.WithFields("error", err).Warnf("unable to get file contents for %q", loc.RealPath) log.WithFields("error", err, "path", loc.RealPath).Debug("unable to get file contents")
return nil, fmt.Errorf("unable to get file contents: %w", err) return nil, fmt.Errorf("unable to get file contents: %w", err)
} }
defer internal.CloseAndLogError(reader, loc.RealPath) defer internal.CloseAndLogError(reader, loc.RealPath)
uReader, err := unionreader.GetUnionReader(reader) uReader, err := unionreader.GetUnionReader(reader)
if err != nil { if err != nil {
log.WithFields("error", err).Warnf("unable to get union reader for %q", loc.RealPath) log.WithFields("error", err, "path", loc.RealPath).Debug("unable to get union reader")
return nil, fmt.Errorf("unable to get union reader: %w", err) return nil, fmt.Errorf("unable to get union reader: %w", err)
} }
@ -168,17 +168,17 @@ func processExecutable(loc file.Location, reader unionreader.UnionReader) (*file
switch format { switch format {
case file.ELF: case file.ELF:
if err = findELFFeatures(&data, reader); err != nil { if err = findELFFeatures(&data, reader); err != nil {
log.WithFields("error", err).Tracef("unable to determine ELF features for %q", loc.RealPath) log.WithFields("error", err, "path", loc.RealPath).Trace("unable to determine ELF features")
err = fmt.Errorf("unable to determine ELF features: %w", err) err = fmt.Errorf("unable to determine ELF features: %w", err)
} }
case file.PE: case file.PE:
if err = findPEFeatures(&data, reader); err != nil { if err = findPEFeatures(&data, reader); err != nil {
log.WithFields("error", err).Tracef("unable to determine PE features for %q", loc.RealPath) log.WithFields("error", err, "path", loc.RealPath).Trace("unable to determine PE features")
err = fmt.Errorf("unable to determine PE features: %w", err) err = fmt.Errorf("unable to determine PE features: %w", err)
} }
case file.MachO: case file.MachO:
if err = findMachoFeatures(&data, reader); err != nil { if err = findMachoFeatures(&data, reader); err != nil {
log.WithFields("error", err).Tracef("unable to determine Macho features for %q", loc.RealPath) log.WithFields("error", err, "path", loc.RealPath).Trace("unable to determine Macho features")
err = fmt.Errorf("unable to determine Macho features: %w", err) err = fmt.Errorf("unable to determine Macho features: %w", err)
} }
} }

View File

@ -14,14 +14,14 @@ func AllRegularFiles(ctx context.Context, resolver file.Resolver) (locations []f
for location := range resolver.AllLocations(ctx) { for location := range resolver.AllLocations(ctx) {
resolvedLocations, err := resolver.FilesByPath(location.RealPath) resolvedLocations, err := resolver.FilesByPath(location.RealPath)
if err != nil { if err != nil {
log.Warnf("unable to resolve %+v: %+v", location, err) log.Debugf("unable to resolve %+v: %+v", location, err)
continue continue
} }
for _, resolvedLocation := range resolvedLocations { for _, resolvedLocation := range resolvedLocations {
metadata, err := resolver.FileMetadataByLocation(resolvedLocation) metadata, err := resolver.FileMetadataByLocation(resolvedLocation)
if err != nil { if err != nil {
log.Warnf("unable to get metadata for %+v: %+v", location, err) log.Debugf("unable to get metadata for %+v: %+v", location, err)
continue continue
} }

View File

@ -24,7 +24,7 @@ func (c Coordinates) ID() artifact.ID {
f, err := artifact.IDByHash(c) f, err := artifact.IDByHash(c)
if err != nil { if err != nil {
// TODO: what to do in this case? // TODO: what to do in this case?
log.Warnf("unable to get fingerprint of location coordinate=%+v: %+v", c, err) log.Debugf("unable to get fingerprint of location coordinate=%+v: %+v", c, err)
return "" return ""
} }

View File

@ -302,7 +302,7 @@ func toBomDescriptorComponent(srcMetadata source.Description) *cyclonedx.Compone
} }
bomRef, err := artifact.IDByHash(metadata.ID) bomRef, err := artifact.IDByHash(metadata.ID)
if err != nil { if err != nil {
log.Warnf("unable to get fingerprint of source image metadata=%s: %+v", metadata.ID, err) log.Debugf("unable to get fingerprint of source image metadata=%s: %+v", metadata.ID, err)
} }
return &cyclonedx.Component{ return &cyclonedx.Component{
BOMRef: string(bomRef), BOMRef: string(bomRef),
@ -316,7 +316,7 @@ func toBomDescriptorComponent(srcMetadata source.Description) *cyclonedx.Compone
} }
bomRef, err := artifact.IDByHash(metadata.Path) bomRef, err := artifact.IDByHash(metadata.Path)
if err != nil { if err != nil {
log.Warnf("unable to get fingerprint of source directory metadata path=%s: %+v", metadata.Path, err) log.Debugf("unable to get fingerprint of source directory metadata path=%s: %+v", metadata.Path, err)
} }
return &cyclonedx.Component{ return &cyclonedx.Component{
BOMRef: string(bomRef), BOMRef: string(bomRef),
@ -331,7 +331,7 @@ func toBomDescriptorComponent(srcMetadata source.Description) *cyclonedx.Compone
} }
bomRef, err := artifact.IDByHash(metadata.Path) bomRef, err := artifact.IDByHash(metadata.Path)
if err != nil { if err != nil {
log.Warnf("unable to get fingerprint of source file metadata path=%s: %+v", metadata.Path, err) log.Debugf("unable to get fingerprint of source file metadata path=%s: %+v", metadata.Path, err)
} }
return &cyclonedx.Component{ return &cyclonedx.Component{
BOMRef: string(bomRef), BOMRef: string(bomRef),

View File

@ -164,7 +164,7 @@ func toDependencies(s *sbom.SBOM, p pkg.Package) (out []string) {
func dependencyName(p pkg.Package) string { func dependencyName(p pkg.Package) string {
purl, err := packageurl.FromString(p.PURL) purl, err := packageurl.FromString(p.PURL)
if err != nil { if err != nil {
log.Warnf("Invalid PURL for package: '%s' PURL: '%s' (%w)", p.Name, p.PURL, err) log.Debugf("Invalid PURL for package: '%s' PURL: '%s' (%w)", p.Name, p.PURL, err)
return "" return ""
} }
// don't use qualifiers for this // don't use qualifiers for this

View File

@ -136,7 +136,7 @@ func encode(out map[string]string, value reflect.Value, prefix string, fn FieldN
encode(out, value.MapIndex(key), fmt.Sprintf("%s:%v", prefix, key.Interface()), fn) encode(out, value.MapIndex(key), fmt.Sprintf("%s:%v", prefix, key.Interface()), fn)
} }
default: default:
log.Warnf("skipping encoding of unsupported property: %s", prefix) log.Debugf("skipping encoding of unsupported property: %s", prefix)
} }
} }
@ -213,7 +213,7 @@ func decode(vals map[string]string, value reflect.Value, prefix string, fn Field
} }
if decode(vals, v.Elem(), prefix, fn) && value.CanSet() { if decode(vals, v.Elem(), prefix, fn) && value.CanSet() {
o := v.Interface() o := v.Interface()
log.Infof("%v", o) log.Tracef("%v", o)
value.Set(v) value.Set(v)
} else { } else {
return false return false
@ -355,7 +355,7 @@ func decode(vals map[string]string, value reflect.Value, prefix string, fn Field
} }
return values return values
default: default:
log.Warnf("unable to set field: %s", prefix) log.Debugf("unable to set field: %s", prefix)
return false return false
} }
return true return true

View File

@ -160,7 +160,7 @@ func toFileMetadataEntry(coordinates file.Coordinates, metadata *file.Metadata)
mode, err = strconv.Atoi(fmt.Sprintf("%o", metadata.Mode())) mode, err = strconv.Atoi(fmt.Sprintf("%o", metadata.Mode()))
if err != nil { if err != nil {
log.Warnf("invalid mode found in file catalog @ location=%+v mode=%q: %+v", coordinates, metadata.Mode, err) log.Debugf("invalid mode found in file catalog @ location=%+v mode=%q: %+v", coordinates, metadata.Mode, err)
mode = 0 mode = 0
} }

View File

@ -34,7 +34,7 @@ type pathSkipper struct {
func skipPathsByMountTypeAndName(root string) PathIndexVisitor { func skipPathsByMountTypeAndName(root string) PathIndexVisitor {
infos, err := mountinfo.GetMounts(nil) infos, err := mountinfo.GetMounts(nil)
if err != nil { if err != nil {
log.WithFields("error", err).Warnf("unable to get system mounts") log.WithFields("error", err).Debug("unable to get system mounts")
return func(_ string, _ string, _ os.FileInfo, _ error) error { return func(_ string, _ string, _ os.FileInfo, _ error) error {
return nil return nil
} }

View File

@ -82,12 +82,12 @@ func parseApkDB(_ context.Context, resolver file.Resolver, env *generic.Environm
field := parseApkField(line) field := parseApkField(line)
if field == nil { if field == nil {
log.Warnf("unable to parse field data from line %q", line) log.Debugf("unable to parse field data from line %q", line)
errs = unknown.Appendf(errs, reader, "unable to parse field data from line %q", line) errs = unknown.Appendf(errs, reader, "unable to parse field data from line %q", line)
continue continue
} }
if len(field.name) == 0 { if len(field.name) == 0 {
log.Warnf("failed to parse field name from line %q", line) log.Debugf("failed to parse field name from line %q", line)
errs = unknown.Appendf(errs, reader, "failed to parse field name from line %q", line) errs = unknown.Appendf(errs, reader, "failed to parse field name from line %q", line)
continue continue
} }
@ -234,7 +234,7 @@ func (f apkField) apply(p *parsedData, ctx *apkFileParsingContext) {
case "S": case "S":
i, err := strconv.Atoi(f.value) i, err := strconv.Atoi(f.value)
if err != nil { if err != nil {
log.Warnf("unable to parse value %q for field %q: %w", f.value, f.name, err) log.Debugf("unable to parse value %q for field %q: %w", f.value, f.name, err)
return return
} }
@ -242,7 +242,7 @@ func (f apkField) apply(p *parsedData, ctx *apkFileParsingContext) {
case "I": case "I":
i, err := strconv.Atoi(f.value) i, err := strconv.Atoi(f.value)
if err != nil { if err != nil {
log.Warnf("unable to parse value %q for field %q: %w", f.value, f.name, err) log.Debugf("unable to parse value %q for field %q: %w", f.value, f.name, err)
return return
} }
@ -272,7 +272,7 @@ func (f apkField) apply(p *parsedData, ctx *apkFileParsingContext) {
var ok bool var ok bool
latest.OwnerUID, latest.OwnerGID, latest.Permissions, ok = processFileInfo(f.value) latest.OwnerUID, latest.OwnerGID, latest.Permissions, ok = processFileInfo(f.value)
if !ok { if !ok {
log.Warnf("unexpected value for APK ACL field %q: %q", f.name, f.value) log.Debugf("unexpected value for APK ACL field %q: %q", f.name, f.value)
return return
} }
@ -298,7 +298,7 @@ func (f apkField) apply(p *parsedData, ctx *apkFileParsingContext) {
var ok bool var ok bool
latest.OwnerUID, latest.OwnerGID, latest.Permissions, ok = processFileInfo(f.value) latest.OwnerUID, latest.OwnerGID, latest.Permissions, ok = processFileInfo(f.value)
if !ok { if !ok {
log.Warnf("unexpected value for APK ACL field %q: %q", f.name, f.value) log.Debugf("unexpected value for APK ACL field %q: %q", f.name, f.value)
return return
} }

View File

@ -72,12 +72,12 @@ func parseRebarLock(_ context.Context, _ file.Resolver, _ *generic.Environment,
sourcePkg := pkgMap[name] sourcePkg := pkgMap[name]
if sourcePkg == nil { if sourcePkg == nil {
log.WithFields("package", name).Warn("unable find source package") log.WithFields("package", name).Debug("unable find source package")
continue continue
} }
metadata, ok := sourcePkg.Metadata.(pkg.ErlangRebarLockEntry) metadata, ok := sourcePkg.Metadata.(pkg.ErlangRebarLockEntry)
if !ok { if !ok {
log.WithFields("package", name).Warn("unable to extract rebar.lock metadata to add hash metadata") log.WithFields("package", name).Debug("unable to extract rebar.lock metadata to add hash metadata")
continue continue
} }

View File

@ -63,7 +63,7 @@ func (c *Cataloger) WithParserByGlobs(parser Parser, globs ...string) *Cataloger
matches, err := resolver.FilesByGlob(g) matches, err := resolver.FilesByGlob(g)
if err != nil { if err != nil {
log.Warnf("unable to process glob=%q: %+v", g, err) log.Debugf("unable to process glob=%q: %+v", g, err)
continue continue
} }
requests = append(requests, makeRequests(parser, matches)...) requests = append(requests, makeRequests(parser, matches)...)
@ -81,7 +81,7 @@ func (c *Cataloger) WithParserByMimeTypes(parser Parser, types ...string) *Catal
log.WithFields("mimetypes", types).Trace("searching for paths matching mimetype") log.WithFields("mimetypes", types).Trace("searching for paths matching mimetype")
matches, err := resolver.FilesByMIMEType(types...) matches, err := resolver.FilesByMIMEType(types...)
if err != nil { if err != nil {
log.Warnf("unable to process mimetypes=%+v: %+v", types, err) log.Debugf("unable to process mimetypes=%+v: %+v", types, err)
return nil return nil
} }
requests = append(requests, makeRequests(parser, matches)...) requests = append(requests, makeRequests(parser, matches)...)
@ -100,7 +100,7 @@ func (c *Cataloger) WithParserByPath(parser Parser, paths ...string) *Cataloger
matches, err := resolver.FilesByPath(p) matches, err := resolver.FilesByPath(p)
if err != nil { if err != nil {
log.Warnf("unable to process path=%q: %+v", p, err) log.Debugf("unable to process path=%q: %+v", p, err)
continue continue
} }
requests = append(requests, makeRequests(parser, matches)...) requests = append(requests, makeRequests(parser, matches)...)
@ -192,7 +192,7 @@ func (c *Cataloger) process(ctx context.Context, resolver file.Resolver, pkgs []
func invokeParser(ctx context.Context, resolver file.Resolver, location file.Location, logger logger.Logger, parser Parser, env *Environment) ([]pkg.Package, []artifact.Relationship, error) { func invokeParser(ctx context.Context, resolver file.Resolver, location file.Location, logger logger.Logger, parser Parser, env *Environment) ([]pkg.Package, []artifact.Relationship, error) {
contentReader, err := resolver.FileContentsByLocation(location) contentReader, err := resolver.FileContentsByLocation(location)
if err != nil { if err != nil {
logger.WithFields("location", location.RealPath, "error", err).Warn("unable to fetch contents") logger.WithFields("location", location.RealPath, "error", err).Debug("unable to fetch contents")
return nil, nil, err return nil, nil, err
} }
defer internal.CloseAndLogError(contentReader, location.AccessPath) defer internal.CloseAndLogError(contentReader, location.AccessPath)

View File

@ -34,7 +34,7 @@ func parsePortageContents(_ context.Context, resolver file.Resolver, _ *generic.
name, version := cpvMatch[1], cpvMatch[2] name, version := cpvMatch[1], cpvMatch[2]
if name == "" || version == "" { if name == "" || version == "" {
log.WithFields("path", reader.Location.RealPath).Warnf("failed to parse portage name and version") log.WithFields("path", reader.Location.RealPath).Debug("failed to parse portage name and version")
return nil, nil, fmt.Errorf("failed to parse portage name and version") return nil, nil, fmt.Errorf("failed to parse portage name and version")
} }
@ -63,7 +63,7 @@ func parsePortageContents(_ context.Context, resolver file.Resolver, _ *generic.
func addFiles(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) { func addFiles(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) {
contentsReader, err := resolver.FileContentsByLocation(dbLocation) contentsReader, err := resolver.FileContentsByLocation(dbLocation)
if err != nil { if err != nil {
log.WithFields("path", dbLocation.RealPath).Warnf("failed to fetch portage contents (package=%s): %+v", p.Name, err) log.WithFields("path", dbLocation.RealPath, "package", p.Name, "error", err).Debug("failed to fetch portage contents")
return return
} }
defer internal.CloseAndLogError(contentsReader, dbLocation.RealPath) defer internal.CloseAndLogError(contentsReader, dbLocation.RealPath)
@ -105,7 +105,7 @@ func addLicenses(resolver file.Resolver, dbLocation file.Location, p *pkg.Packag
licenseReader, err := resolver.FileContentsByLocation(*location) licenseReader, err := resolver.FileContentsByLocation(*location)
if err != nil { if err != nil {
log.WithFields("path", dbLocation.RealPath).Warnf("failed to fetch portage LICENSE: %+v", err) log.WithFields("path", dbLocation.RealPath, "error", err).Debug("failed to fetch portage LICENSE")
return return
} }
defer internal.CloseAndLogError(licenseReader, location.RealPath) defer internal.CloseAndLogError(licenseReader, location.RealPath)
@ -141,7 +141,7 @@ func addSize(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) {
sizeReader, err := resolver.FileContentsByLocation(*location) sizeReader, err := resolver.FileContentsByLocation(*location)
if err != nil { if err != nil {
log.WithFields("name", p.Name).Warnf("failed to fetch portage SIZE: %+v", err) log.WithFields("name", p.Name, "error", err).Debug("failed to fetch portage SIZE")
return return
} }
defer internal.CloseAndLogError(sizeReader, location.RealPath) defer internal.CloseAndLogError(sizeReader, location.RealPath)

View File

@ -80,7 +80,7 @@ func defaultGoModDir() string {
if goPath == "" { if goPath == "" {
homeDir, err := homedir.Dir() homeDir, err := homedir.Dir()
if err != nil { if err != nil {
log.Warnf("unable to determine GOPATH or user home dir: %w", err) log.Debugf("unable to determine GOPATH or user home dir: %w", err)
return "" return ""
} }
goPath = filepath.Join(homeDir, "go") goPath = filepath.Join(homeDir, "go")

View File

@ -60,7 +60,7 @@ func newGoLicenseResolver(catalogerName string, opts CatalogerConfig) goLicenseR
if vendorDir == "" { if vendorDir == "" {
wd, err := os.Getwd() wd, err := os.Getwd()
if err != nil { if err != nil {
log.Warn("unable to get CWD while resolving the local go vendor dir: %v", err) log.Debug("unable to get CWD while resolving the local go vendor dir: %v", err)
} else { } else {
vendorDir = filepath.Join(wd, "vendor") vendorDir = filepath.Join(wd, "vendor")
} }

View File

@ -26,7 +26,7 @@ func scanFile(location file.Location, reader unionreader.UnionReader) ([]*extend
// with more than one binary // with more than one binary
readers, errs := unionreader.GetReaders(reader) readers, errs := unionreader.GetReaders(reader)
if errs != nil { if errs != nil {
log.WithFields("error", errs).Warnf("failed to open a golang binary") log.WithFields("error", errs).Debug("failed to open a golang binary")
return nil, fmt.Errorf("failed to open a golang binary: %w", errs) return nil, fmt.Errorf("failed to open a golang binary: %w", errs)
} }

View File

@ -50,7 +50,7 @@ func parseStackLock(_ context.Context, _ file.Resolver, _ *generic.Environment,
var lockFile stackLock var lockFile stackLock
if err := yaml.Unmarshal(bytes, &lockFile); err != nil { if err := yaml.Unmarshal(bytes, &lockFile); err != nil {
log.WithFields("error", err).Tracef("failed to parse stack.yaml.lock file %q", reader.RealPath) log.WithFields("error", err, "path", reader.RealPath).Trace("failed to parse stack.yaml.lock")
return nil, nil, fmt.Errorf("failed to parse stack.yaml.lock file") return nil, nil, fmt.Errorf("failed to parse stack.yaml.lock file")
} }

View File

@ -31,7 +31,7 @@ func parseStackYaml(_ context.Context, _ file.Resolver, _ *generic.Environment,
var stackFile stackYaml var stackFile stackYaml
if err := yaml.Unmarshal(bytes, &stackFile); err != nil { if err := yaml.Unmarshal(bytes, &stackFile); err != nil {
log.WithFields("error", err).Tracef("failed to parse stack.yaml file %q", reader.RealPath) log.WithFields("error", err, "path", reader.RealPath).Trace("failed to parse stack.yaml")
return nil, nil, fmt.Errorf("failed to parse stack.yaml file") return nil, nil, fmt.Errorf("failed to parse stack.yaml file")
} }

View File

@ -210,7 +210,7 @@ func finalizePackage(p *pkg.Package) {
p.Type = pkg.JenkinsPluginPkg p.Type = pkg.JenkinsPluginPkg
} }
} else { } else {
log.WithFields("package", p.String()).Warn("unable to extract java metadata to generate purl") log.WithFields("package", p.String()).Debug("unable to extract java metadata to generate purl")
} }
p.SetID() p.SetID()
@ -237,7 +237,7 @@ func (j *archiveParser) discoverMainPackage(ctx context.Context) (*pkg.Package,
manifestContents := contents[manifestMatches[0]] manifestContents := contents[manifestMatches[0]]
manifest, err := parseJavaManifest(j.archivePath, strings.NewReader(manifestContents)) manifest, err := parseJavaManifest(j.archivePath, strings.NewReader(manifestContents))
if err != nil { if err != nil {
log.Warnf("failed to parse java manifest (%s): %+v", j.location, err) log.Debugf("failed to parse java manifest (%s): %+v", j.location, err)
return nil, nil return nil, nil
} }
@ -485,7 +485,7 @@ func getDigestsFromArchive(archivePath string) ([]file.Digest, error) {
// grab and assign digest for the entire archive // grab and assign digest for the entire archive
digests, err := intFile.NewDigestsFromFile(archiveCloser, javaArchiveHashes) digests, err := intFile.NewDigestsFromFile(archiveCloser, javaArchiveHashes)
if err != nil { if err != nil {
log.Warnf("failed to create digest for file=%q: %+v", archivePath, err) log.Debugf("failed to create digest for file=%q: %+v", archivePath, err)
} }
return digests, nil return digests, nil
@ -549,7 +549,7 @@ func discoverPkgsFromOpeners(ctx context.Context, location file.Location, opener
for pathWithinArchive, archiveOpener := range openers { for pathWithinArchive, archiveOpener := range openers {
nestedPkgs, nestedRelationships, err := discoverPkgsFromOpener(ctx, location, pathWithinArchive, archiveOpener, cfg, parentPkg) nestedPkgs, nestedRelationships, err := discoverPkgsFromOpener(ctx, location, pathWithinArchive, archiveOpener, cfg, parentPkg)
if err != nil { if err != nil {
log.WithFields("location", location.Path()).Warnf("unable to discover java packages from opener: %+v", err) log.WithFields("location", location.Path(), "error", err).Debug("unable to discover java packages from opener")
continue continue
} }
@ -578,7 +578,7 @@ func discoverPkgsFromOpener(ctx context.Context, location file.Location, pathWit
} }
defer func() { defer func() {
if closeErr := archiveReadCloser.Close(); closeErr != nil { if closeErr := archiveReadCloser.Close(); closeErr != nil {
log.Warnf("unable to close archived file from tempdir: %+v", closeErr) log.Debugf("unable to close archived file from tempdir: %+v", closeErr)
} }
}() }()
@ -607,7 +607,7 @@ func pomPropertiesByParentPath(archivePath string, location file.Location, extra
for filePath, fileContents := range contentsOfMavenPropertiesFiles { for filePath, fileContents := range contentsOfMavenPropertiesFiles {
pomProperties, err := parsePomProperties(filePath, strings.NewReader(fileContents)) pomProperties, err := parsePomProperties(filePath, strings.NewReader(fileContents))
if err != nil { if err != nil {
log.WithFields("contents-path", filePath, "location", location.Path()).Warnf("failed to parse pom.properties: %+v", err) log.WithFields("contents-path", filePath, "location", location.Path(), "error", err).Debug("failed to parse pom.properties")
continue continue
} }
@ -637,7 +637,7 @@ func pomProjectByParentPath(archivePath string, location file.Location, extractP
// TODO: when we support locations of paths within archives we should start passing the specific pom.xml location object instead of the top jar // TODO: when we support locations of paths within archives we should start passing the specific pom.xml location object instead of the top jar
pom, err := maven.ParsePomXML(strings.NewReader(fileContents)) pom, err := maven.ParsePomXML(strings.NewReader(fileContents))
if err != nil { if err != nil {
log.WithFields("contents-path", filePath, "location", location.Path()).Warnf("failed to parse pom.xml: %+v", err) log.WithFields("contents-path", filePath, "location", location.Path(), "error", err).Debug("failed to parse pom.xml")
continue continue
} }
if pom == nil { if pom == nil {

View File

@ -20,7 +20,7 @@ import (
func newPackageJSONPackage(u packageJSON, indexLocation file.Location) pkg.Package { func newPackageJSONPackage(u packageJSON, indexLocation file.Location) pkg.Package {
licenseCandidates, err := u.licensesFromJSON() licenseCandidates, err := u.licensesFromJSON()
if err != nil { if err != nil {
log.Warnf("unable to extract licenses from javascript package.json: %+v", err) log.Debugf("unable to extract licenses from javascript package.json: %+v", err)
} }
license := pkg.NewLicensesFromLocation(indexLocation, licenseCandidates...) license := pkg.NewLicensesFromLocation(indexLocation, licenseCandidates...)
@ -73,7 +73,7 @@ func newPackageLockV1Package(cfg CatalogerConfig, resolver file.Resolver, locati
licenseSet = pkg.NewLicenseSet(licenses...) licenseSet = pkg.NewLicenseSet(licenses...)
} }
if err != nil { if err != nil {
log.Warnf("unable to extract licenses from javascript yarn.lock for package %s:%s: %+v", name, version, err) log.Debugf("unable to extract licenses from javascript yarn.lock for package %s:%s: %+v", name, version, err)
} }
} }
@ -105,7 +105,7 @@ func newPackageLockV2Package(cfg CatalogerConfig, resolver file.Resolver, locati
licenseSet = pkg.NewLicenseSet(licenses...) licenseSet = pkg.NewLicenseSet(licenses...)
} }
if err != nil { if err != nil {
log.Warnf("unable to extract licenses from javascript yarn.lock for package %s:%s: %+v", name, u.Version, err) log.Debugf("unable to extract licenses from javascript yarn.lock for package %s:%s: %+v", name, u.Version, err)
} }
} }
@ -150,7 +150,7 @@ func newYarnLockPackage(cfg CatalogerConfig, resolver file.Resolver, location fi
licenseSet = pkg.NewLicenseSet(licenses...) licenseSet = pkg.NewLicenseSet(licenses...)
} }
if err != nil { if err != nil {
log.Warnf("unable to extract licenses from javascript yarn.lock for package %s:%s: %+v", name, version, err) log.Debugf("unable to extract licenses from javascript yarn.lock for package %s:%s: %+v", name, version, err)
} }
} }
return finalizeLockPkg( return finalizeLockPkg(

View File

@ -73,7 +73,7 @@ func parseLinuxKernelMetadata(magicType []string) (p pkg.LinuxKernel) {
swapDevStr := strings.TrimPrefix(t, "swap_dev ") swapDevStr := strings.TrimPrefix(t, "swap_dev ")
swapDev, err := strconv.ParseInt(swapDevStr, 16, 32) swapDev, err := strconv.ParseInt(swapDevStr, 16, 32)
if err != nil { if err != nil {
log.Warnf("unable to parse swap device: %s", err) log.Debugf("unable to parse swap device: %s", err)
continue continue
} }
p.SwapDevice = int(swapDev) p.SwapDevice = int(swapDev)
@ -81,7 +81,7 @@ func parseLinuxKernelMetadata(magicType []string) (p pkg.LinuxKernel) {
rootDevStr := strings.TrimPrefix(t, "root_dev ") rootDevStr := strings.TrimPrefix(t, "root_dev ")
rootDev, err := strconv.ParseInt(rootDevStr, 16, 32) rootDev, err := strconv.ParseInt(rootDevStr, 16, 32)
if err != nil { if err != nil {
log.Warnf("unable to parse root device: %s", err) log.Debugf("unable to parse root device: %s", err)
continue continue
} }
p.SwapDevice = int(rootDev) p.SwapDevice = int(rootDev)

View File

@ -68,13 +68,13 @@ func (c *storeCataloger) Catalog(ctx context.Context, resolver file.Resolver) ([
p := &pkgs[i] p := &pkgs[i]
locations := p.Locations.ToSlice() locations := p.Locations.ToSlice()
if len(locations) == 0 { if len(locations) == 0 {
log.WithFields("package", p.Name).Warn("nix package has no evidence locations associated") log.WithFields("package", p.Name).Debug("nix package has no evidence locations associated")
continue continue
} }
parentStorePath := locations[0].RealPath parentStorePath := locations[0].RealPath
files, ok := filesByPath[parentStorePath] files, ok := filesByPath[parentStorePath]
if !ok { if !ok {
log.WithFields("path", parentStorePath, "nix-store-path", parentStorePath).Warn("found a nix store file for a non-existent package") log.WithFields("path", parentStorePath, "nix-store-path", parentStorePath).Debug("found a nix store file for a non-existent package")
continue continue
} }
appendFiles(p, files.ToSlice()...) appendFiles(p, files.ToSlice()...)
@ -86,7 +86,7 @@ func (c *storeCataloger) Catalog(ctx context.Context, resolver file.Resolver) ([
func appendFiles(p *pkg.Package, location ...file.Location) { func appendFiles(p *pkg.Package, location ...file.Location) {
metadata, ok := p.Metadata.(pkg.NixStoreEntry) metadata, ok := p.Metadata.(pkg.NixStoreEntry)
if !ok { if !ok {
log.WithFields("package", p.Name).Warn("nix package metadata missing") log.WithFields("package", p.Name).Debug("nix package metadata missing")
return return
} }

View File

@ -104,20 +104,20 @@ func findLicenses(ctx context.Context, scanner licenses.Scanner, resolver file.R
// If we have a license file then resolve and parse it // If we have a license file then resolve and parse it
found, err := resolver.FilesByPath(m.LicenseLocation.Path()) found, err := resolver.FilesByPath(m.LicenseLocation.Path())
if err != nil { if err != nil {
log.WithFields("error", err).Tracef("unable to resolve python license path %s", m.LicenseLocation.Path()) log.WithFields("error", err, "path", m.LicenseLocation.Path()).Trace("unable to resolve python license")
} }
if len(found) > 0 { if len(found) > 0 {
metadataContents, err := resolver.FileContentsByLocation(found[0]) metadataContents, err := resolver.FileContentsByLocation(found[0])
if err == nil { if err == nil {
parsed, err := scanner.PkgSearch(ctx, file.NewLocationReadCloser(m.LicenseLocation, metadataContents)) parsed, err := scanner.PkgSearch(ctx, file.NewLocationReadCloser(m.LicenseLocation, metadataContents))
if err != nil { if err != nil {
log.WithFields("error", err).Tracef("unable to parse a license from the file in %s", m.LicenseLocation.Path()) log.WithFields("error", err, "path", m.LicenseLocation.Path()).Trace("unable to parse a license from the file")
} }
if len(parsed) > 0 { if len(parsed) > 0 {
licenseSet = pkg.NewLicenseSet(parsed...) licenseSet = pkg.NewLicenseSet(parsed...)
} }
} else { } else {
log.WithFields("error", err).Tracef("unable to read file contents at %s", m.LicenseLocation.Path()) log.WithFields("error", err, "path", m.LicenseLocation.Path()).Trace("unable to read file contents")
} }
} }
} }

View File

@ -127,7 +127,7 @@ func (rp requirementsParser) parseRequirementsTxt(_ context.Context, _ file.Reso
req := newRequirement(line) req := newRequirement(line)
if req == nil { if req == nil {
log.WithFields("path", reader.RealPath).Warnf("unable to parse requirements.txt line: %q", line) log.WithFields("path", reader.RealPath, "line", line).Debug("unable to parse requirements.txt line")
errs = unknown.Appendf(errs, reader, "unable to parse requirements.txt line: %q", line) errs = unknown.Appendf(errs, reader, "unable to parse requirements.txt line: %q", line)
continue continue
} }
@ -136,7 +136,7 @@ func (rp requirementsParser) parseRequirementsTxt(_ context.Context, _ file.Reso
version := parseVersion(req.VersionConstraint, rp.guessUnpinnedRequirements) version := parseVersion(req.VersionConstraint, rp.guessUnpinnedRequirements)
if version == "" { if version == "" {
log.WithFields("path", reader.RealPath).Tracef("unable to determine package version in requirements.txt line: %q", line) log.WithFields("path", reader.RealPath, "line", line).Trace("unable to determine package version in requirements.txt line")
errs = unknown.Appendf(errs, reader, "unable to determine package version in requirements.txt line: %q", line) errs = unknown.Appendf(errs, reader, "unable to determine package version in requirements.txt line: %q", line)
continue continue
} }

View File

@ -95,7 +95,7 @@ func extractRFC5322Fields(locationReader file.LocationReadCloser) (map[string]an
fields[key] = handleSingleOrMultiField(fields[key], val) fields[key] = handleSingleOrMultiField(fields[key], val)
} else { } else {
log.Warnf("cannot parse field from path: %q from line: %q", locationReader.Path(), line) log.Debugf("cannot parse field from path: %q from line: %q", locationReader.Path(), line)
} }
} }
} }

View File

@ -25,12 +25,12 @@ func parseWheelOrEggRecord(reader io.Reader) []pkg.PythonFileRecord {
break break
} }
if err != nil { if err != nil {
log.Warnf("unable to read python record file: %w", err) log.Debugf("unable to read python record file: %w", err)
continue continue
} }
if len(recordList) != 3 { if len(recordList) != 3 {
log.Warnf("python record an unexpected length=%d: %q", len(recordList), recordList) log.Debugf("python record an unexpected length=%d: %q", len(recordList), recordList)
continue continue
} }
@ -46,7 +46,7 @@ func parseWheelOrEggRecord(reader io.Reader) []pkg.PythonFileRecord {
} }
fields := strings.SplitN(item, "=", 2) fields := strings.SplitN(item, "=", 2)
if len(fields) != 2 { if len(fields) != 2 {
log.Warnf("unexpected python record digest: %q", item) log.Debugf("unexpected python record digest: %q", item)
continue continue
} }

View File

@ -17,7 +17,7 @@ import (
func NewDBCataloger() pkg.Cataloger { func NewDBCataloger() pkg.Cataloger {
// check if a sqlite driver is available // check if a sqlite driver is available
if !isSqliteDriverAvailable() { if !isSqliteDriverAvailable() {
log.Warnf("sqlite driver is not available, newer RPM databases might not be cataloged") log.Debugf("sqlite driver is not available, newer RPM databases might not be cataloged")
} }
return generic.NewCataloger("rpm-db-cataloger"). return generic.NewCataloger("rpm-db-cataloger").

View File

@ -127,7 +127,7 @@ func extractRpmFileRecords(resolver file.PathResolver, entry rpmdb.PackageInfo)
files, err := entry.InstalledFiles() files, err := entry.InstalledFiles()
if err != nil { if err != nil {
log.Warnf("unable to parse listing of installed files for RPM DB entry: %s", err.Error()) log.Debugf("unable to parse listing of installed files for RPM DB entry: %s", err.Error())
return records, fmt.Errorf("unable to parse listing of installed files for RPM DB entry: %w", err) return records, fmt.Errorf("unable to parse listing of installed files for RPM DB entry: %w", err)
} }

View File

@ -35,12 +35,12 @@ func parseRpmManifest(_ context.Context, _ file.Resolver, _ *generic.Environment
metadata, err := newMetadataFromManifestLine(strings.TrimSuffix(line, "\n")) metadata, err := newMetadataFromManifestLine(strings.TrimSuffix(line, "\n"))
if err != nil { if err != nil {
log.Warnf("unable to parse RPM manifest entry: %+v", err) log.Debugf("unable to parse RPM manifest entry: %+v", err)
continue continue
} }
if metadata == nil { if metadata == nil {
log.Warn("unable to parse RPM manifest entry: no metadata found") log.Debug("unable to parse RPM manifest entry: no metadata found")
continue continue
} }

View File

@ -42,7 +42,7 @@ func parseAuditBinaryEntry(reader unionreader.UnionReader, filename string) ([]r
// with more than one binary // with more than one binary
readers, err := unionreader.GetReaders(reader) readers, err := unionreader.GetReaders(reader)
if err != nil { if err != nil {
log.Warnf("rust cataloger: failed to open a binary: %v", err) log.Debugf("rust cataloger: failed to open a binary: %v", err)
return nil, fmt.Errorf("rust cataloger: failed to open a binary: %w", err) return nil, fmt.Errorf("rust cataloger: failed to open a binary: %w", err)
} }

View File

@ -54,7 +54,7 @@ func (c *Collection) Package(id artifact.ID) *Package {
} }
var p Package var p Package
if err := copier.Copy(&p, &v); err != nil { if err := copier.Copy(&p, &v); err != nil {
log.Warnf("unable to copy package id=%q name=%q: %+v", id, v.Name, err) log.Debugf("unable to copy package id=%q name=%q: %+v", id, v.Name, err)
return nil return nil
} }
p.id = v.id p.id = v.id
@ -111,7 +111,7 @@ func (c *Collection) add(p Package) {
id := p.ID() id := p.ID()
if id == "" { if id == "" {
log.Warnf("found package with empty ID while adding to the collection: %+v", p) log.Debugf("found package with empty ID while adding to the collection: %+v", p)
p.SetID() p.SetID()
id = p.ID() id = p.ID()
} }
@ -119,7 +119,7 @@ func (c *Collection) add(p Package) {
if existing, exists := c.byID[id]; exists { if existing, exists := c.byID[id]; exists {
// there is already a package with this fingerprint merge the existing record with the new one // there is already a package with this fingerprint merge the existing record with the new one
if err := existing.merge(p); err != nil { if err := existing.merge(p); err != nil {
log.Warnf("failed to merge packages: %+v", err) log.Debugf("failed to merge packages: %+v", err)
} else { } else {
c.byID[id] = existing c.byID[id] = existing
c.addPathsToIndex(p) c.addPathsToIndex(p)

View File

@ -38,7 +38,7 @@ func (p *Package) SetID() {
id, err := artifact.IDByHash(p) id, err := artifact.IDByHash(p)
if err != nil { if err != nil {
// TODO: what to do in this case? // TODO: what to do in this case?
log.Warnf("unable to get fingerprint of package=%s@%s: %+v", p.Name, p.Version, err) log.Debugf("unable to get fingerprint of package=%s@%s: %+v", p.Name, p.Version, err)
return return
} }
p.id = id p.id = id
@ -59,7 +59,7 @@ func (p *Package) merge(other Package) error {
} }
if p.PURL != other.PURL { if p.PURL != other.PURL {
log.Warnf("merging packages have with different pURLs: %q=%q vs %q=%q", p.id, p.PURL, other.id, other.PURL) log.Debugf("merging packages have with different pURLs: %q=%q vs %q=%q", p.id, p.PURL, other.id, other.PURL)
} }
p.Locations.Add(other.Locations.ToSlice()...) p.Locations.Add(other.Locations.ToSlice()...)