chore(deps): update tools to latest versions (#3775)

---------
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
This commit is contained in:
anchore-actions-token-generator[bot] 2025-04-03 17:35:26 +00:00 committed by GitHub
parent 12f36420dd
commit f11377fe30
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
57 changed files with 147 additions and 137 deletions

View File

@ -26,7 +26,7 @@ tools:
# used for linting
- name: golangci-lint
version:
want: v1.64.8
want: v2.0.2
method: github-release
with:
repo: golangci/golangci-lint
@ -58,7 +58,7 @@ tools:
# used to release all artifacts
- name: goreleaser
version:
want: v2.8.1
want: v2.8.2
method: github-release
with:
repo: goreleaser/goreleaser

View File

@ -1,15 +1,8 @@
issues:
max-same-issues: 25
uniq-by-line: false
# TODO: enable this when we have coverage on docstring comments
# # The list of ids of default excludes to include or disable.
# include:
# - EXC0002 # disable excluding of issues about comments from golint
version: "2"
run:
tests: false
linters:
# inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint
disable-all: true
default: none
enable:
- asciicheck
- bodyclose
@ -22,11 +15,8 @@ linters:
- goconst
- gocritic
- gocyclo
- gofmt
- goimports
- goprintffuncname
- gosec
- gosimple
- govet
- ineffassign
- misspell
@ -34,37 +24,35 @@ linters:
- nolintlint
- revive
- staticcheck
- stylecheck
- typecheck
- unconvert
- unparam
- unused
- whitespace
linters-settings:
funlen:
# Checks the number of lines in a function.
# If lower than 0, disable the check.
# Default: 60
lines: 70
# Checks the number of statements in a function.
# If lower than 0, disable the check.
# Default: 40
statements: 50
gocritic:
enabled-checks:
- deferInLoop
- ruleguard
settings:
ruleguard:
rules: "test/rules/rules.go"
gosec:
excludes:
- G115
run:
timeout: 10m
tests: false
settings:
funlen:
lines: 70
statements: 50
gocritic:
enabled-checks:
- deferInLoop
- ruleguard
settings:
ruleguard:
rules: test/rules/rules.go
gosec:
excludes:
- G115
exclusions:
generated: lax
presets:
- comments
- common-false-positives
- legacy
- std-error-handling
paths:
- third_party$
- builtin$
- examples$
# do not enable...
# - deadcode # The owner seems to have abandoned the linter. Replaced by "unused".
@ -91,3 +79,23 @@ run:
# - testpackage
# - varcheck # The owner seems to have abandoned the linter. Replaced by "unused".
# - wsl # this doens't have an auto-fixer yet and is pretty noisy (https://github.com/bombsimon/wsl/issues/90)
issues:
max-same-issues: 25
uniq-by-line: false
# TODO: enable this when we have coverage on docstring comments
# # The list of ids of default excludes to include or disable.
# include:
# - EXC0002 # disable excluding of issues about comments from golint
formatters:
enable:
- gofmt
- goimports
exclusions:
generated: lax
paths:
- third_party$
- builtin$
- examples$

View File

@ -52,7 +52,7 @@ func Attest(app clio.Application) *cobra.Command {
opts := defaultAttestOptions()
// template format explicitly not allowed
opts.Format.Template.Enabled = false
opts.Template.Enabled = false
return app.SetupCommand(&cobra.Command{
Use: "attest --output [FORMAT] <IMAGE>",
@ -136,7 +136,7 @@ func writeSBOMToFormattedFile(s *sbom.SBOM, sbomFile io.Writer, opts *attestOpti
return fmt.Errorf("no output file provided")
}
encs, err := opts.Format.Encoders()
encs, err := opts.Encoders()
if err != nil {
return fmt.Errorf("unable to create encoders: %w", err)
}

View File

@ -110,7 +110,7 @@ func (o *scanOptions) PostLoad() error {
}
func (o *scanOptions) validateLegacyOptionsNotUsed() error {
if len(fangs.Flatten(o.Config.ConfigFile)) == 0 {
if len(fangs.Flatten(o.ConfigFile)) == 0 {
return nil
}
@ -122,7 +122,7 @@ func (o *scanOptions) validateLegacyOptionsNotUsed() error {
File any `yaml:"file" json:"file" mapstructure:"file"`
}
for _, f := range fangs.Flatten(o.Config.ConfigFile) {
for _, f := range fangs.Flatten(o.ConfigFile) {
by, err := os.ReadFile(f)
if err != nil {
return fmt.Errorf("unable to read config file during validations %q: %w", f, err)

View File

@ -89,7 +89,7 @@ func (o Output) SBOMWriter() (sbom.Writer, error) {
usesTemplateOutput := names.Has(string(template.ID))
if usesTemplateOutput && o.Format.Template.Path == "" {
if usesTemplateOutput && o.Template.Path == "" {
return nil, fmt.Errorf(`must specify path to template file when using "template" output format`)
}

View File

@ -36,11 +36,7 @@ func capture(target **os.File, writer io.Writer, bufSize int) func() {
}()
buf := make([]byte, bufSize)
for {
if original == nil {
break
}
for original != nil {
n, err := r.Read(buf)
if n > 0 {
_, _ = writer.Write(buf[0:n])

View File

@ -6,6 +6,7 @@ import (
"io"
"path"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
@ -76,7 +77,6 @@ func newAlpineConfiguration(resolver file.Resolver) (*AlpineConfiguration, []fil
return &AlpineConfiguration{
APKKeys: keys,
}, locations, nil
}
func getVersion(resolver file.Resolver) (string, []file.Location, error) {
@ -92,6 +92,7 @@ func getVersion(resolver file.Resolver) (string, []file.Location, error) {
if err != nil {
return "", nil, fmt.Errorf("unable to read alpine version: %w", err)
}
defer internal.CloseAndLogError(reader, locations[0].RealPath)
version, err := io.ReadAll(reader)
if err != nil {
@ -111,7 +112,11 @@ func getAPKKeys(resolver file.Resolver) (map[string]string, []file.Location, err
}
for _, location := range locations {
basename := path.Base(location.RealPath)
//nolint:gocritic
reader, err := resolver.FileContentsByLocation(location)
if err != nil {
return nil, nil, fmt.Errorf("unable to resolve file contents by location at %s: %w", location.RealPath, err)
}
content, err := io.ReadAll(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to read apk key content at %s: %w", location.RealPath, err)

View File

@ -124,5 +124,4 @@ func showAlpineConfiguration(s sbom.SBOM) {
panic(err)
}
fmt.Println(string(meta))
}

View File

@ -30,7 +30,6 @@ func main() {
if err := enc.Encode(sbom.Descriptor.Configuration); err != nil {
panic(err)
}
}
func imageReference() string {

View File

@ -13,5 +13,5 @@ func DefaultLocationComparer(x, y file.Location) bool {
}
func LocationComparerWithoutLayer(x, y file.Location) bool {
return cmp.Equal(x.Coordinates.RealPath, y.Coordinates.RealPath) && cmp.Equal(x.AccessPath, y.AccessPath)
return cmp.Equal(x.RealPath, y.RealPath) && cmp.Equal(x.AccessPath, y.AccessPath)
}

View File

@ -23,7 +23,7 @@ func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...strin
defer file.Close()
// ignore directories
if file.FileInfo.IsDir() {
if file.IsDir() {
return nil
}

View File

@ -28,7 +28,7 @@ func NewZipFileManifest(archivePath string) (ZipFileManifest, error) {
}
}()
for _, file := range zipReader.Reader.File {
for _, file := range zipReader.File {
manifest.Add(file.Name, file.FileInfo())
}
return manifest, nil

View File

@ -53,7 +53,7 @@ func TraverseFilesInZip(archivePath string, visitor func(*zip.File) error, paths
}
}()
for _, file := range zipReader.Reader.File {
for _, file := range zipReader.File {
// if no paths are given then assume that all files should be traversed
if len(paths) > 0 {
if _, ok := request[file.Name]; !ok {

View File

@ -22,7 +22,7 @@ func less(i, j artifact.Relationship) bool {
jFrom, ok3 := j.From.(pkg.Package)
jTo, ok4 := j.To.(pkg.Package)
if !(ok1 && ok2 && ok3 && ok4) {
if !ok1 && !ok2 && !ok3 && !ok4 {
return false
}

View File

@ -245,10 +245,10 @@ func packageFileOwnershipRelationships(p pkg.Package, resolver file.PathResolver
}
for _, ref := range pathRefs {
if oldRef, ok := locations[ref.Coordinates.ID()]; ok {
if oldRef, ok := locations[ref.ID()]; ok {
log.Debugf("found path duplicate of %s", oldRef.RealPath)
}
locations[ref.Coordinates.ID()] = ref
locations[ref.ID()] = ref
}
}

View File

@ -48,15 +48,15 @@ func (m *LocationMetadata) merge(other LocationMetadata) error {
}
func (l Location) WithAnnotation(key, value string) Location {
if l.LocationMetadata.Annotations == nil {
l.LocationMetadata.Annotations = map[string]string{}
if l.Annotations == nil {
l.Annotations = map[string]string{}
}
l.LocationMetadata.Annotations[key] = value
l.Annotations[key] = value
return l
}
func (l Location) WithoutAnnotations() Location {
l.LocationMetadata.Annotations = map[string]string{}
l.Annotations = map[string]string{}
return l
}

View File

@ -361,6 +361,7 @@ func collectDocRelationships(spdxIDMap map[string]any, doc *spdx.Document) (out
from, fromOk := a.(pkg.Package)
toPackage, toPackageOk := b.(pkg.Package)
toLocation, toLocationOk := b.(file.Location)
//nolint:staticcheck
if !fromOk || !(toPackageOk || toLocationOk) {
log.Debugf("unable to find valid relationship mapping from SPDX, ignoring: (from: %+v) (to: %+v)", a, b)
continue

View File

@ -213,13 +213,15 @@ func reduceOuter(expression string) string {
func isBalanced(expression string) bool {
count := 0
for _, c := range expression {
if c == '(' {
switch c {
case '(':
count++
} else if c == ')' {
case ')':
count--
if count < 0 {
return false
}
default:
}
}
return count == 0

View File

@ -228,7 +228,7 @@ func toSyftRelationships(doc *model.Document, catalog *pkg.Collection, relations
idMap[string(p.ID())] = p
locations := p.Locations.ToSlice()
for _, l := range locations {
idMap[string(l.Coordinates.ID())] = l.Coordinates
idMap[string(l.ID())] = l.Coordinates
}
}

View File

@ -57,7 +57,7 @@ func (r *ContainerImageAllLayers) fileByRef(ref stereoscopeFile.Reference, uniqu
return nil, fmt.Errorf("unable to fetch metadata (ref=%+v): %w", ref, err)
}
if entry.Metadata.Type == stereoscopeFile.TypeHardLink || entry.Metadata.Type == stereoscopeFile.TypeSymLink {
if entry.Type == stereoscopeFile.TypeHardLink || entry.Type == stereoscopeFile.TypeSymLink {
// a link may resolve in this layer or higher, assuming a squashed tree is used to search
// we should search all possible resolutions within the valid source
for _, subLayerIdx := range r.layers[layerIdx:] {
@ -102,7 +102,7 @@ func (r *ContainerImageAllLayers) FilesByPath(paths ...string) ([]file.Location,
if err != nil {
return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", ref.RealPath, err)
}
if metadata.Metadata.IsDir() {
if metadata.IsDir() {
continue
}
}
@ -146,7 +146,7 @@ func (r *ContainerImageAllLayers) FilesByGlob(patterns ...string) ([]file.Locati
return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", result.RequestPath, err)
}
// don't consider directories
if metadata.Metadata.IsDir() {
if metadata.IsDir() {
continue
}
}
@ -192,7 +192,7 @@ func (r *ContainerImageAllLayers) FileContentsByLocation(location file.Location)
return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err)
}
switch entry.Metadata.Type {
switch entry.Type {
case stereoscopeFile.TypeSymLink, stereoscopeFile.TypeHardLink:
// the location we are searching may be a symlink, we should always work with the resolved file
newLocation := r.RelativeFileByPath(location, location.AccessPath)

View File

@ -58,7 +58,7 @@ func (r *ContainerImageSquash) FilesByPath(paths ...string) ([]file.Location, er
return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", ref.RealPath, err)
}
// don't consider directories
if metadata.Metadata.IsDir() {
if metadata.IsDir() {
continue
}
}
@ -106,7 +106,7 @@ func (r *ContainerImageSquash) FilesByGlob(patterns ...string) ([]file.Location,
return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", result.RequestPath, err)
}
// don't consider directories
if metadata.Metadata.IsDir() {
if metadata.IsDir() {
continue
}
}
@ -151,7 +151,7 @@ func (r *ContainerImageSquash) FileContentsByLocation(location file.Location) (i
return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err)
}
switch entry.Metadata.Type {
switch entry.Type {
case stereoscopeFile.TypeSymLink, stereoscopeFile.TypeHardLink:
// the location we are searching may be a symlink, we should always work with the resolved file
locations, err := r.FilesByPath(location.RealPath)

View File

@ -59,7 +59,7 @@ func (r *Directory) buildIndex() error {
r.tree = tree
r.index = index
r.filetreeResolver.searchContext = filetree.NewSearchContext(tree, index)
r.searchContext = filetree.NewSearchContext(tree, index)
return nil
}

View File

@ -51,7 +51,7 @@ func (r *File) buildIndex() error {
r.tree = tree
r.index = index
r.filetreeResolver.searchContext = filetree.NewSearchContext(tree, index)
r.searchContext = filetree.NewSearchContext(tree, index)
return nil
}

View File

@ -68,7 +68,7 @@ func (r filetreeResolver) FilesByPath(userPaths ...string) ([]file.Location, err
}
// don't consider directories
if entry.Metadata.IsDir() {
if entry.IsDir() {
continue
}
@ -114,17 +114,17 @@ func (r filetreeResolver) FilesByGlob(patterns ...string) ([]file.Location, erro
}
entry, err := r.index.Get(*refVia.Reference)
if err != nil {
return nil, fmt.Errorf("unable to get file metadata for reference %s: %w", refVia.Reference.RealPath, err)
return nil, fmt.Errorf("unable to get file metadata for reference %s: %w", refVia.RealPath, err)
}
// don't consider directories
if entry.Metadata.IsDir() {
if entry.IsDir() {
continue
}
loc := file.NewVirtualLocationFromDirectory(
r.responsePath(string(refVia.Reference.RealPath)), // the actual path relative to the resolver root
r.responsePath(string(refVia.RequestPath)), // the path used to access this file, relative to the resolver root
r.responsePath(string(refVia.RealPath)), // the actual path relative to the resolver root
r.responsePath(string(refVia.RequestPath)), // the path used to access this file, relative to the resolver root
*refVia.Reference,
)
uniqueFileIDs.Add(*refVia.Reference)
@ -217,7 +217,7 @@ func (r *filetreeResolver) FilesByMIMEType(types ...string) ([]file.Location, er
continue
}
location := file.NewVirtualLocationFromDirectory(
r.responsePath(string(refVia.Reference.RealPath)),
r.responsePath(string(refVia.RealPath)),
r.responsePath(string(refVia.RequestPath)),
*refVia.Reference,
)

View File

@ -123,7 +123,7 @@ func parseApkDB(_ context.Context, resolver file.Resolver, env *generic.Environm
// This should get fixed with https://gitlab.alpinelinux.org/alpine/apk-tools/-/issues/10875
if r == nil {
// find the repositories file from the relative directory of the DB file
releases := findReleases(resolver, reader.Location.RealPath)
releases := findReleases(resolver, reader.RealPath)
if len(releases) > 0 {
r = &releases[0]
@ -173,7 +173,7 @@ func parseReleasesFromAPKRepository(reader file.LocationReadCloser) []linux.Rele
reposB, err := io.ReadAll(reader)
if err != nil {
log.Tracef("unable to read APK repositories file %q: %+v", reader.Location.RealPath, err)
log.Tracef("unable to read APK repositories file %q: %+v", reader.RealPath, err)
return nil
}

View File

@ -80,7 +80,7 @@ func parseAlpmDB(_ context.Context, resolver file.Resolver, env *generic.Environ
newPackage(
data,
env.LinuxRelease,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
locs...,
),
}, nil, errs

View File

@ -33,13 +33,13 @@ func parseSBOM(_ context.Context, resolver file.Resolver, _ *generic.Environment
}
if s == nil {
log.WithFields("path", reader.Location.RealPath).Trace("file is not an SBOM")
log.WithFields("path", reader.RealPath).Trace("file is not an SBOM")
return nil, nil, nil
}
// Bitnami exclusively uses SPDX JSON SBOMs
if sFormat != "spdx-json" {
log.WithFields("path", reader.Location.RealPath).Trace("file is not an SPDX JSON SBOM")
log.WithFields("path", reader.RealPath).Trace("file is not an SPDX JSON SBOM")
return nil, nil, nil
}
@ -59,7 +59,7 @@ func parseSBOM(_ context.Context, resolver file.Resolver, _ *generic.Environment
// where there is evidence of this file, and the catalogers have not run against any file other than,
// the SBOM, this is the only location that is relevant for this cataloger.
p.Locations = file.NewLocationSet(
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
// Parse the Bitnami-specific metadata
@ -70,7 +70,7 @@ func parseSBOM(_ context.Context, resolver file.Resolver, _ *generic.Environment
// Bitnami packages reported in a SPDX file are shipped under the same directory
// as the SPDX file itself.
metadata.Path = filepath.Dir(reader.Location.RealPath)
metadata.Path = filepath.Dir(reader.RealPath)
if p.ID() != mainPkgID {
metadata.Files = packageFiles(s.Relationships, p, metadata.Path)
secondaryPkgsFiles = append(secondaryPkgsFiles, metadata.Files...)
@ -86,12 +86,12 @@ func parseSBOM(_ context.Context, resolver file.Resolver, _ *generic.Environment
}
// Resolve all files owned by the main package in the SBOM and update the metadata
if mainPkgFiles, err := mainPkgFiles(resolver, reader.Location.RealPath, secondaryPkgsFiles); err == nil {
if mainPkgFiles, err := mainPkgFiles(resolver, reader.RealPath, secondaryPkgsFiles); err == nil {
for i, p := range pkgs {
if p.ID() == mainPkgID {
metadata, ok := p.Metadata.(*pkg.BitnamiSBOMEntry)
if !ok {
log.WithFields("spdx-filepath", reader.Location.RealPath).Trace("main package in SBOM does not have Bitnami metadata")
log.WithFields("spdx-filepath", reader.RealPath).Trace("main package in SBOM does not have Bitnami metadata")
continue
}
@ -100,7 +100,7 @@ func parseSBOM(_ context.Context, resolver file.Resolver, _ *generic.Environment
}
}
} else {
log.WithFields("spdx-filepath", reader.Location.RealPath, "error", err).Trace("unable to resolve owned files for main package in SBOM")
log.WithFields("spdx-filepath", reader.RealPath, "error", err).Trace("unable to resolve owned files for main package in SBOM")
}
return pkgs, filterRelationships(s.Relationships, pkgs), nil

View File

@ -48,7 +48,7 @@ func parseConanfile(_ context.Context, _ file.Resolver, _ *generic.Environment,
p := newConanfilePackage(
m,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
if p == nil {
continue

View File

@ -60,7 +60,7 @@ func parseFullRequiresLine(line string, reader file.LocationReadCloser, pkgs *[]
p := newConaninfoPackage(
meta,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
if p != nil {
*pkgs = append(*pkgs, *p)
@ -83,7 +83,7 @@ func parseConaninfo(_ context.Context, _ file.Resolver, _ *generic.Environment,
// First set the base package info by checking the relative path
fullFilePath := string(reader.Location.LocationData.Reference().RealPath)
if len(fullFilePath) == 0 {
fullFilePath = reader.Location.LocationData.RealPath
fullFilePath = reader.RealPath
}
mainMetadata, err := parseConanMetadataFromFilePath(fullFilePath)
@ -102,7 +102,7 @@ func parseConaninfo(_ context.Context, _ file.Resolver, _ *generic.Environment,
case errors.Is(err, io.EOF):
mainPackage := newConaninfoPackage(
mainMetadata,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
mainPackageRef := *mainPackage

View File

@ -91,7 +91,7 @@ func handleConanLockV1(cl conanLock, reader file.LocationReadCloser, parsedPkgRe
p := newConanlockPackage(
metadata,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
if p != nil {
@ -115,7 +115,7 @@ func handleConanLockV2(cl conanLock, reader file.LocationReadCloser, indexToPkgM
p := newConanReferencePackage(
reference,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
if p != nil {

View File

@ -102,7 +102,7 @@ func parsePubspecLock(_ context.Context, _ file.Resolver, _ *generic.Environment
newPubspecLockPackage(
name,
pubPkg,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
}

View File

@ -36,7 +36,7 @@ func parseDpkgDB(ctx context.Context, resolver file.Resolver, env *generic.Envir
return nil, nil, fmt.Errorf("unable to catalog dpkg DB=%q: %w", reader.RealPath, err)
}
dbLoc := reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)
dbLoc := reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)
var pkgs []pkg.Package
_ = sync.CollectSlice(&ctx, cataloging.ExecutorFile, sync.ToSeq(metadata), func(m pkg.DpkgDBEntry) (pkg.Package, error) {
return newDpkgPackage(m, dbLoc, resolver, env.LinuxRelease, findDpkgInfoFiles(m.Package, resolver, reader.Location)...), nil

View File

@ -69,7 +69,7 @@ func parseDotnetPackagesLock(_ context.Context, _ file.Resolver, _ *generic.Envi
name, _ := extractNameAndVersion(nameVersion)
dep := allDependencies[nameVersion]
dotnetPkg := newDotnetPackagesLockPackage(name, dep, reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation))
dotnetPkg := newDotnetPackagesLockPackage(name, dep, reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation))
if dotnetPkg != nil {
pkgs = append(pkgs, *dotnetPkg)
pkgMap[nameVersion] = *dotnetPkg

View File

@ -61,7 +61,7 @@ func parseMixLock(_ context.Context, _ file.Resolver, _ *generic.Environment, re
PkgHash: hash,
PkgHashExt: hashExt,
},
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
}

View File

@ -35,7 +35,7 @@ func parseOTPApp(_ context.Context, _ file.Resolver, _ *generic.Environment, rea
p := newPackageFromOTP(
name, version,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
packages = append(packages, p)

View File

@ -56,7 +56,7 @@ func parseRebarLock(_ context.Context, _ file.Resolver, _ *generic.Environment,
Name: name,
Version: version,
},
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
pkgMap[name] = &p

View File

@ -27,14 +27,14 @@ var (
// parses individual CONTENTS files from the portage flat-file store (e.g. /var/db/pkg/*/*/CONTENTS).
func parsePortageContents(_ context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
cpvMatch := cpvRe.FindStringSubmatch(reader.Location.RealPath)
cpvMatch := cpvRe.FindStringSubmatch(reader.RealPath)
if cpvMatch == nil {
return nil, nil, fmt.Errorf("failed to match package and version in %s", reader.Location.RealPath)
return nil, nil, fmt.Errorf("failed to match package and version in %s", reader.RealPath)
}
name, version := cpvMatch[1], cpvMatch[2]
if name == "" || version == "" {
log.WithFields("path", reader.Location.RealPath).Debug("failed to parse portage name and version")
log.WithFields("path", reader.RealPath).Debug("failed to parse portage name and version")
return nil, nil, fmt.Errorf("failed to parse portage name and version")
}
@ -43,7 +43,7 @@ func parsePortageContents(_ context.Context, resolver file.Resolver, _ *generic.
Version: version,
PURL: packageURL(name, version),
Locations: file.NewLocationSet(
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
Type: pkg.PortagePkg,
Metadata: pkg.PortageEntry{

View File

@ -343,7 +343,7 @@ func getGOARCHFromBin(r io.ReaderAt) (string, error) {
if err != nil {
return "", fmt.Errorf("unrecognized file format: %w", err)
}
arch = fmt.Sprintf("%d", f.FileHeader.TargetMachine)
arch = fmt.Sprintf("%d", f.TargetMachine)
default:
return "", errUnrecognizedFormat
}

View File

@ -61,7 +61,7 @@ func (c *goModCataloger) parseGoModFile(ctx context.Context, resolver file.Resol
Name: m.Mod.Path,
Version: m.Mod.Version,
Licenses: pkg.NewLicenseSet(lics...),
Locations: file.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
Locations: file.NewLocationSet(reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
PURL: packageURL(m.Mod.Path, m.Mod.Version),
Language: pkg.Go,
Type: pkg.GoModulePkg,
@ -83,7 +83,7 @@ func (c *goModCataloger) parseGoModFile(ctx context.Context, resolver file.Resol
Name: m.New.Path,
Version: m.New.Version,
Licenses: pkg.NewLicenseSet(lics...),
Locations: file.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
Locations: file.NewLocationSet(reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
PURL: packageURL(m.New.Path, m.New.Version),
Language: pkg.Go,
Type: pkg.GoModulePkg,
@ -120,7 +120,7 @@ func parseGoSumFile(resolver file.Resolver, reader file.LocationReadCloser) (map
return out, fmt.Errorf("no resolver provided")
}
goSumPath := strings.TrimSuffix(reader.Location.RealPath, ".mod") + ".sum"
goSumPath := strings.TrimSuffix(reader.RealPath, ".mod") + ".sum"
goSumLocation := resolver.RelativeFileByPath(reader.Location, goSumPath)
if goSumLocation == nil {
return nil, fmt.Errorf("unable to resolve: %s", goSumPath)

View File

@ -275,7 +275,7 @@ func (ni nativeImageElf) fetchPkgs() (pkgs []pkg.Package, relationships []artifa
if dataSection == nil {
return nil, nil, fmt.Errorf("no .data section found in binary: %w", err)
}
dataSectionBase := dataSection.SectionHeader.Addr
dataSectionBase := dataSection.Addr
data, err := dataSection.Data()
if err != nil {
return nil, nil, fmt.Errorf("cannot read the .data section: %w", err)

View File

@ -63,7 +63,7 @@ func parseGradleLockfile(_ context.Context, _ file.Resolver, _ *generic.Environm
Name: dep.Name,
Version: dep.Version,
Locations: file.NewLocationSet(
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
Language: pkg.Java,
Type: pkg.JavaPkg,

View File

@ -67,7 +67,7 @@ func parsePackageJSON(_ context.Context, _ file.Resolver, _ *generic.Environment
// a compliance filter later will remove these packages based on compliance rules
pkgs = append(
pkgs,
newPackageJSONPackage(p, reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
newPackageJSONPackage(p, reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
)
}

View File

@ -28,7 +28,7 @@ func parseLinuxKernelModuleFile(_ context.Context, _ file.Resolver, _ *generic.E
return nil, nil, nil
}
metadata.Path = reader.Location.RealPath
metadata.Path = reader.RealPath
return []pkg.Package{
newLinuxKernelModulePackage(

View File

@ -74,7 +74,7 @@ func parseRockspec(_ context.Context, _ file.Resolver, _ *generic.Environment, r
Homepage: homepage,
Description: description,
},
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
return []pkg.Package{p}, nil, nil

View File

@ -39,7 +39,7 @@ func parseOpamPackage(_ context.Context, _ file.Resolver, _ *generic.Environment
// If name is inferred from file name/path
var name, version string
var licenses []string
loc := reader.Location.LocationData.AccessPath
loc := reader.AccessPath
dir, file := path.Split(loc)
if file == "opam" {
@ -95,7 +95,7 @@ func parseOpamPackage(_ context.Context, _ file.Resolver, _ *generic.Environment
pkgs,
newOpamPackage(
entry,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)

View File

@ -99,7 +99,7 @@ func poetryLockPackages(reader file.LocationReadCloser) ([]pkg.Package, error) {
p.Name,
p.Version,
newPythonPoetryLockEntry(p),
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
}

View File

@ -153,7 +153,7 @@ func (rp requirementsParser) parseRequirementsTxt(_ context.Context, _ file.Reso
URL: parseURL(req.URL),
Markers: req.Markers,
},
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
}

View File

@ -59,7 +59,7 @@ func parseSetup(_ context.Context, _ file.Resolver, _ *generic.Environment, read
newPackageForIndex(
name,
version,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
}

View File

@ -19,7 +19,7 @@ import (
func parseRpmArchive(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
rpm, err := rpmutils.ReadRpm(reader)
if err != nil {
return nil, nil, fmt.Errorf("RPM file found but unable to read: %s (%w)", reader.Location.RealPath, err)
return nil, nil, fmt.Errorf("RPM file found but unable to read: %s (%w)", reader.RealPath, err)
}
nevra, err := rpm.Header.GetNEVRA()

View File

@ -47,7 +47,7 @@ func parseGemFileLockEntries(_ context.Context, _ file.Resolver, _ *generic.Envi
newGemfileLockPackage(
candidate[0],
strings.Trim(candidate[1], "()"),
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
}

View File

@ -43,7 +43,7 @@ func parseCargoLock(_ context.Context, _ file.Resolver, _ *generic.Environment,
}
newPkg := newPackageFromCargoMetadata(
p,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
pkgs = append(
pkgs,

View File

@ -40,7 +40,7 @@ func NewCataloger() pkg.Cataloger {
func parseSBOM(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
readSeeker, err := adaptToReadSeeker(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to read SBOM file %q: %w", reader.Location.RealPath, err)
return nil, nil, fmt.Errorf("unable to read SBOM file %q: %w", reader.RealPath, err)
}
s, _, _, err := format.Decode(readSeeker)
if err != nil {
@ -48,7 +48,7 @@ func parseSBOM(_ context.Context, _ file.Resolver, _ *generic.Environment, reade
}
if s == nil {
log.WithFields("path", reader.Location.RealPath).Trace("file is not an SBOM")
log.WithFields("path", reader.RealPath).Trace("file is not an SBOM")
return nil, nil, nil
}
@ -60,14 +60,14 @@ func parseSBOM(_ context.Context, _ file.Resolver, _ *generic.Environment, reade
// where there is evidence of this file, and the catalogers have not run against any file other than,
// the SBOM, this is the only location that is relevant for this cataloger.
p.Locations = file.NewLocationSet(
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
p.FoundBy = catalogerName
pkgs = append(pkgs, p)
relationships = append(relationships, artifact.Relationship{
From: p,
To: reader.Location.Coordinates,
To: reader.Coordinates,
Type: artifact.DescribedByRelationship,
})
}

View File

@ -94,7 +94,7 @@ func parsePackageResolved(_ context.Context, _ file.Resolver, _ *generic.Environ
pkgPin.Version,
pkgPin.Location,
pkgPin.Revision,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
}

View File

@ -67,7 +67,7 @@ func parsePodfileLock(_ context.Context, _ file.Resolver, _ *generic.Environment
podName,
podVersion,
pkgHash,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
}

View File

@ -62,7 +62,7 @@ func parsePackPackage(_ context.Context, _ file.Resolver, _ *generic.Environment
pkgs,
newSwiplPackPackage(
entry,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)

View File

@ -36,7 +36,7 @@ func parseTerraformLock(_ context.Context, _ file.Resolver, _ *generic.Environme
p := pkg.Package{
Name: provider.URL,
Version: provider.Version,
Locations: file.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
Locations: file.NewLocationSet(reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
Licenses: pkg.NewLicenseSet(), // TODO: license could be found in .terraform/providers/${name}/${version}/${arch}/LICENSE.txt
Language: pkg.Go,
Type: pkg.TerraformPkg,

View File

@ -44,7 +44,7 @@ func parseWordpressPluginFiles(_ context.Context, _ file.Resolver, _ *generic.En
_, err := reader.Read(buffer)
if err != nil {
return nil, nil, fmt.Errorf("failed to read %s file: %w", reader.Location.Path(), err)
return nil, nil, fmt.Errorf("failed to read %s file: %w", reader.Path(), err)
}
fields := extractFields(string(buffer))