chore(deps): update tools to latest versions (#3775)

---------
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
This commit is contained in:
anchore-actions-token-generator[bot] 2025-04-03 17:35:26 +00:00 committed by GitHub
parent 12f36420dd
commit f11377fe30
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
57 changed files with 147 additions and 137 deletions

View File

@ -26,7 +26,7 @@ tools:
# used for linting # used for linting
- name: golangci-lint - name: golangci-lint
version: version:
want: v1.64.8 want: v2.0.2
method: github-release method: github-release
with: with:
repo: golangci/golangci-lint repo: golangci/golangci-lint
@ -58,7 +58,7 @@ tools:
# used to release all artifacts # used to release all artifacts
- name: goreleaser - name: goreleaser
version: version:
want: v2.8.1 want: v2.8.2
method: github-release method: github-release
with: with:
repo: goreleaser/goreleaser repo: goreleaser/goreleaser

View File

@ -1,15 +1,8 @@
issues: version: "2"
max-same-issues: 25 run:
uniq-by-line: false tests: false
# TODO: enable this when we have coverage on docstring comments
# # The list of ids of default excludes to include or disable.
# include:
# - EXC0002 # disable excluding of issues about comments from golint
linters: linters:
# inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint default: none
disable-all: true
enable: enable:
- asciicheck - asciicheck
- bodyclose - bodyclose
@ -22,11 +15,8 @@ linters:
- goconst - goconst
- gocritic - gocritic
- gocyclo - gocyclo
- gofmt
- goimports
- goprintffuncname - goprintffuncname
- gosec - gosec
- gosimple
- govet - govet
- ineffassign - ineffassign
- misspell - misspell
@ -34,37 +24,35 @@ linters:
- nolintlint - nolintlint
- revive - revive
- staticcheck - staticcheck
- stylecheck
- typecheck
- unconvert - unconvert
- unparam - unparam
- unused - unused
- whitespace - whitespace
settings:
linters-settings: funlen:
funlen: lines: 70
# Checks the number of lines in a function. statements: 50
# If lower than 0, disable the check. gocritic:
# Default: 60 enabled-checks:
lines: 70 - deferInLoop
# Checks the number of statements in a function. - ruleguard
# If lower than 0, disable the check. settings:
# Default: 40 ruleguard:
statements: 50 rules: test/rules/rules.go
gocritic: gosec:
enabled-checks: excludes:
- deferInLoop - G115
- ruleguard exclusions:
settings: generated: lax
ruleguard: presets:
rules: "test/rules/rules.go" - comments
gosec: - common-false-positives
excludes: - legacy
- G115 - std-error-handling
paths:
run: - third_party$
timeout: 10m - builtin$
tests: false - examples$
# do not enable... # do not enable...
# - deadcode # The owner seems to have abandoned the linter. Replaced by "unused". # - deadcode # The owner seems to have abandoned the linter. Replaced by "unused".
@ -91,3 +79,23 @@ run:
# - testpackage # - testpackage
# - varcheck # The owner seems to have abandoned the linter. Replaced by "unused". # - varcheck # The owner seems to have abandoned the linter. Replaced by "unused".
# - wsl # this doens't have an auto-fixer yet and is pretty noisy (https://github.com/bombsimon/wsl/issues/90) # - wsl # this doens't have an auto-fixer yet and is pretty noisy (https://github.com/bombsimon/wsl/issues/90)
issues:
max-same-issues: 25
uniq-by-line: false
# TODO: enable this when we have coverage on docstring comments
# # The list of ids of default excludes to include or disable.
# include:
# - EXC0002 # disable excluding of issues about comments from golint
formatters:
enable:
- gofmt
- goimports
exclusions:
generated: lax
paths:
- third_party$
- builtin$
- examples$

View File

@ -52,7 +52,7 @@ func Attest(app clio.Application) *cobra.Command {
opts := defaultAttestOptions() opts := defaultAttestOptions()
// template format explicitly not allowed // template format explicitly not allowed
opts.Format.Template.Enabled = false opts.Template.Enabled = false
return app.SetupCommand(&cobra.Command{ return app.SetupCommand(&cobra.Command{
Use: "attest --output [FORMAT] <IMAGE>", Use: "attest --output [FORMAT] <IMAGE>",
@ -136,7 +136,7 @@ func writeSBOMToFormattedFile(s *sbom.SBOM, sbomFile io.Writer, opts *attestOpti
return fmt.Errorf("no output file provided") return fmt.Errorf("no output file provided")
} }
encs, err := opts.Format.Encoders() encs, err := opts.Encoders()
if err != nil { if err != nil {
return fmt.Errorf("unable to create encoders: %w", err) return fmt.Errorf("unable to create encoders: %w", err)
} }

View File

@ -110,7 +110,7 @@ func (o *scanOptions) PostLoad() error {
} }
func (o *scanOptions) validateLegacyOptionsNotUsed() error { func (o *scanOptions) validateLegacyOptionsNotUsed() error {
if len(fangs.Flatten(o.Config.ConfigFile)) == 0 { if len(fangs.Flatten(o.ConfigFile)) == 0 {
return nil return nil
} }
@ -122,7 +122,7 @@ func (o *scanOptions) validateLegacyOptionsNotUsed() error {
File any `yaml:"file" json:"file" mapstructure:"file"` File any `yaml:"file" json:"file" mapstructure:"file"`
} }
for _, f := range fangs.Flatten(o.Config.ConfigFile) { for _, f := range fangs.Flatten(o.ConfigFile) {
by, err := os.ReadFile(f) by, err := os.ReadFile(f)
if err != nil { if err != nil {
return fmt.Errorf("unable to read config file during validations %q: %w", f, err) return fmt.Errorf("unable to read config file during validations %q: %w", f, err)

View File

@ -89,7 +89,7 @@ func (o Output) SBOMWriter() (sbom.Writer, error) {
usesTemplateOutput := names.Has(string(template.ID)) usesTemplateOutput := names.Has(string(template.ID))
if usesTemplateOutput && o.Format.Template.Path == "" { if usesTemplateOutput && o.Template.Path == "" {
return nil, fmt.Errorf(`must specify path to template file when using "template" output format`) return nil, fmt.Errorf(`must specify path to template file when using "template" output format`)
} }

View File

@ -36,11 +36,7 @@ func capture(target **os.File, writer io.Writer, bufSize int) func() {
}() }()
buf := make([]byte, bufSize) buf := make([]byte, bufSize)
for { for original != nil {
if original == nil {
break
}
n, err := r.Read(buf) n, err := r.Read(buf)
if n > 0 { if n > 0 {
_, _ = writer.Write(buf[0:n]) _, _ = writer.Write(buf[0:n])

View File

@ -6,6 +6,7 @@ import (
"io" "io"
"path" "path"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
@ -76,7 +77,6 @@ func newAlpineConfiguration(resolver file.Resolver) (*AlpineConfiguration, []fil
return &AlpineConfiguration{ return &AlpineConfiguration{
APKKeys: keys, APKKeys: keys,
}, locations, nil }, locations, nil
} }
func getVersion(resolver file.Resolver) (string, []file.Location, error) { func getVersion(resolver file.Resolver) (string, []file.Location, error) {
@ -92,6 +92,7 @@ func getVersion(resolver file.Resolver) (string, []file.Location, error) {
if err != nil { if err != nil {
return "", nil, fmt.Errorf("unable to read alpine version: %w", err) return "", nil, fmt.Errorf("unable to read alpine version: %w", err)
} }
defer internal.CloseAndLogError(reader, locations[0].RealPath)
version, err := io.ReadAll(reader) version, err := io.ReadAll(reader)
if err != nil { if err != nil {
@ -111,7 +112,11 @@ func getAPKKeys(resolver file.Resolver) (map[string]string, []file.Location, err
} }
for _, location := range locations { for _, location := range locations {
basename := path.Base(location.RealPath) basename := path.Base(location.RealPath)
//nolint:gocritic
reader, err := resolver.FileContentsByLocation(location) reader, err := resolver.FileContentsByLocation(location)
if err != nil {
return nil, nil, fmt.Errorf("unable to resolve file contents by location at %s: %w", location.RealPath, err)
}
content, err := io.ReadAll(reader) content, err := io.ReadAll(reader)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("unable to read apk key content at %s: %w", location.RealPath, err) return nil, nil, fmt.Errorf("unable to read apk key content at %s: %w", location.RealPath, err)

View File

@ -124,5 +124,4 @@ func showAlpineConfiguration(s sbom.SBOM) {
panic(err) panic(err)
} }
fmt.Println(string(meta)) fmt.Println(string(meta))
} }

View File

@ -30,7 +30,6 @@ func main() {
if err := enc.Encode(sbom.Descriptor.Configuration); err != nil { if err := enc.Encode(sbom.Descriptor.Configuration); err != nil {
panic(err) panic(err)
} }
} }
func imageReference() string { func imageReference() string {

View File

@ -13,5 +13,5 @@ func DefaultLocationComparer(x, y file.Location) bool {
} }
func LocationComparerWithoutLayer(x, y file.Location) bool { func LocationComparerWithoutLayer(x, y file.Location) bool {
return cmp.Equal(x.Coordinates.RealPath, y.Coordinates.RealPath) && cmp.Equal(x.AccessPath, y.AccessPath) return cmp.Equal(x.RealPath, y.RealPath) && cmp.Equal(x.AccessPath, y.AccessPath)
} }

View File

@ -23,7 +23,7 @@ func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...strin
defer file.Close() defer file.Close()
// ignore directories // ignore directories
if file.FileInfo.IsDir() { if file.IsDir() {
return nil return nil
} }

View File

@ -28,7 +28,7 @@ func NewZipFileManifest(archivePath string) (ZipFileManifest, error) {
} }
}() }()
for _, file := range zipReader.Reader.File { for _, file := range zipReader.File {
manifest.Add(file.Name, file.FileInfo()) manifest.Add(file.Name, file.FileInfo())
} }
return manifest, nil return manifest, nil

View File

@ -53,7 +53,7 @@ func TraverseFilesInZip(archivePath string, visitor func(*zip.File) error, paths
} }
}() }()
for _, file := range zipReader.Reader.File { for _, file := range zipReader.File {
// if no paths are given then assume that all files should be traversed // if no paths are given then assume that all files should be traversed
if len(paths) > 0 { if len(paths) > 0 {
if _, ok := request[file.Name]; !ok { if _, ok := request[file.Name]; !ok {

View File

@ -22,7 +22,7 @@ func less(i, j artifact.Relationship) bool {
jFrom, ok3 := j.From.(pkg.Package) jFrom, ok3 := j.From.(pkg.Package)
jTo, ok4 := j.To.(pkg.Package) jTo, ok4 := j.To.(pkg.Package)
if !(ok1 && ok2 && ok3 && ok4) { if !ok1 && !ok2 && !ok3 && !ok4 {
return false return false
} }

View File

@ -245,10 +245,10 @@ func packageFileOwnershipRelationships(p pkg.Package, resolver file.PathResolver
} }
for _, ref := range pathRefs { for _, ref := range pathRefs {
if oldRef, ok := locations[ref.Coordinates.ID()]; ok { if oldRef, ok := locations[ref.ID()]; ok {
log.Debugf("found path duplicate of %s", oldRef.RealPath) log.Debugf("found path duplicate of %s", oldRef.RealPath)
} }
locations[ref.Coordinates.ID()] = ref locations[ref.ID()] = ref
} }
} }

View File

@ -48,15 +48,15 @@ func (m *LocationMetadata) merge(other LocationMetadata) error {
} }
func (l Location) WithAnnotation(key, value string) Location { func (l Location) WithAnnotation(key, value string) Location {
if l.LocationMetadata.Annotations == nil { if l.Annotations == nil {
l.LocationMetadata.Annotations = map[string]string{} l.Annotations = map[string]string{}
} }
l.LocationMetadata.Annotations[key] = value l.Annotations[key] = value
return l return l
} }
func (l Location) WithoutAnnotations() Location { func (l Location) WithoutAnnotations() Location {
l.LocationMetadata.Annotations = map[string]string{} l.Annotations = map[string]string{}
return l return l
} }

View File

@ -361,6 +361,7 @@ func collectDocRelationships(spdxIDMap map[string]any, doc *spdx.Document) (out
from, fromOk := a.(pkg.Package) from, fromOk := a.(pkg.Package)
toPackage, toPackageOk := b.(pkg.Package) toPackage, toPackageOk := b.(pkg.Package)
toLocation, toLocationOk := b.(file.Location) toLocation, toLocationOk := b.(file.Location)
//nolint:staticcheck
if !fromOk || !(toPackageOk || toLocationOk) { if !fromOk || !(toPackageOk || toLocationOk) {
log.Debugf("unable to find valid relationship mapping from SPDX, ignoring: (from: %+v) (to: %+v)", a, b) log.Debugf("unable to find valid relationship mapping from SPDX, ignoring: (from: %+v) (to: %+v)", a, b)
continue continue

View File

@ -213,13 +213,15 @@ func reduceOuter(expression string) string {
func isBalanced(expression string) bool { func isBalanced(expression string) bool {
count := 0 count := 0
for _, c := range expression { for _, c := range expression {
if c == '(' { switch c {
case '(':
count++ count++
} else if c == ')' { case ')':
count-- count--
if count < 0 { if count < 0 {
return false return false
} }
default:
} }
} }
return count == 0 return count == 0

View File

@ -228,7 +228,7 @@ func toSyftRelationships(doc *model.Document, catalog *pkg.Collection, relations
idMap[string(p.ID())] = p idMap[string(p.ID())] = p
locations := p.Locations.ToSlice() locations := p.Locations.ToSlice()
for _, l := range locations { for _, l := range locations {
idMap[string(l.Coordinates.ID())] = l.Coordinates idMap[string(l.ID())] = l.Coordinates
} }
} }

View File

@ -57,7 +57,7 @@ func (r *ContainerImageAllLayers) fileByRef(ref stereoscopeFile.Reference, uniqu
return nil, fmt.Errorf("unable to fetch metadata (ref=%+v): %w", ref, err) return nil, fmt.Errorf("unable to fetch metadata (ref=%+v): %w", ref, err)
} }
if entry.Metadata.Type == stereoscopeFile.TypeHardLink || entry.Metadata.Type == stereoscopeFile.TypeSymLink { if entry.Type == stereoscopeFile.TypeHardLink || entry.Type == stereoscopeFile.TypeSymLink {
// a link may resolve in this layer or higher, assuming a squashed tree is used to search // a link may resolve in this layer or higher, assuming a squashed tree is used to search
// we should search all possible resolutions within the valid source // we should search all possible resolutions within the valid source
for _, subLayerIdx := range r.layers[layerIdx:] { for _, subLayerIdx := range r.layers[layerIdx:] {
@ -102,7 +102,7 @@ func (r *ContainerImageAllLayers) FilesByPath(paths ...string) ([]file.Location,
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", ref.RealPath, err) return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", ref.RealPath, err)
} }
if metadata.Metadata.IsDir() { if metadata.IsDir() {
continue continue
} }
} }
@ -146,7 +146,7 @@ func (r *ContainerImageAllLayers) FilesByGlob(patterns ...string) ([]file.Locati
return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", result.RequestPath, err) return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", result.RequestPath, err)
} }
// don't consider directories // don't consider directories
if metadata.Metadata.IsDir() { if metadata.IsDir() {
continue continue
} }
} }
@ -192,7 +192,7 @@ func (r *ContainerImageAllLayers) FileContentsByLocation(location file.Location)
return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err) return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err)
} }
switch entry.Metadata.Type { switch entry.Type {
case stereoscopeFile.TypeSymLink, stereoscopeFile.TypeHardLink: case stereoscopeFile.TypeSymLink, stereoscopeFile.TypeHardLink:
// the location we are searching may be a symlink, we should always work with the resolved file // the location we are searching may be a symlink, we should always work with the resolved file
newLocation := r.RelativeFileByPath(location, location.AccessPath) newLocation := r.RelativeFileByPath(location, location.AccessPath)

View File

@ -58,7 +58,7 @@ func (r *ContainerImageSquash) FilesByPath(paths ...string) ([]file.Location, er
return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", ref.RealPath, err) return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", ref.RealPath, err)
} }
// don't consider directories // don't consider directories
if metadata.Metadata.IsDir() { if metadata.IsDir() {
continue continue
} }
} }
@ -106,7 +106,7 @@ func (r *ContainerImageSquash) FilesByGlob(patterns ...string) ([]file.Location,
return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", result.RequestPath, err) return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", result.RequestPath, err)
} }
// don't consider directories // don't consider directories
if metadata.Metadata.IsDir() { if metadata.IsDir() {
continue continue
} }
} }
@ -151,7 +151,7 @@ func (r *ContainerImageSquash) FileContentsByLocation(location file.Location) (i
return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err) return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err)
} }
switch entry.Metadata.Type { switch entry.Type {
case stereoscopeFile.TypeSymLink, stereoscopeFile.TypeHardLink: case stereoscopeFile.TypeSymLink, stereoscopeFile.TypeHardLink:
// the location we are searching may be a symlink, we should always work with the resolved file // the location we are searching may be a symlink, we should always work with the resolved file
locations, err := r.FilesByPath(location.RealPath) locations, err := r.FilesByPath(location.RealPath)

View File

@ -59,7 +59,7 @@ func (r *Directory) buildIndex() error {
r.tree = tree r.tree = tree
r.index = index r.index = index
r.filetreeResolver.searchContext = filetree.NewSearchContext(tree, index) r.searchContext = filetree.NewSearchContext(tree, index)
return nil return nil
} }

View File

@ -51,7 +51,7 @@ func (r *File) buildIndex() error {
r.tree = tree r.tree = tree
r.index = index r.index = index
r.filetreeResolver.searchContext = filetree.NewSearchContext(tree, index) r.searchContext = filetree.NewSearchContext(tree, index)
return nil return nil
} }

View File

@ -68,7 +68,7 @@ func (r filetreeResolver) FilesByPath(userPaths ...string) ([]file.Location, err
} }
// don't consider directories // don't consider directories
if entry.Metadata.IsDir() { if entry.IsDir() {
continue continue
} }
@ -114,17 +114,17 @@ func (r filetreeResolver) FilesByGlob(patterns ...string) ([]file.Location, erro
} }
entry, err := r.index.Get(*refVia.Reference) entry, err := r.index.Get(*refVia.Reference)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to get file metadata for reference %s: %w", refVia.Reference.RealPath, err) return nil, fmt.Errorf("unable to get file metadata for reference %s: %w", refVia.RealPath, err)
} }
// don't consider directories // don't consider directories
if entry.Metadata.IsDir() { if entry.IsDir() {
continue continue
} }
loc := file.NewVirtualLocationFromDirectory( loc := file.NewVirtualLocationFromDirectory(
r.responsePath(string(refVia.Reference.RealPath)), // the actual path relative to the resolver root r.responsePath(string(refVia.RealPath)), // the actual path relative to the resolver root
r.responsePath(string(refVia.RequestPath)), // the path used to access this file, relative to the resolver root r.responsePath(string(refVia.RequestPath)), // the path used to access this file, relative to the resolver root
*refVia.Reference, *refVia.Reference,
) )
uniqueFileIDs.Add(*refVia.Reference) uniqueFileIDs.Add(*refVia.Reference)
@ -217,7 +217,7 @@ func (r *filetreeResolver) FilesByMIMEType(types ...string) ([]file.Location, er
continue continue
} }
location := file.NewVirtualLocationFromDirectory( location := file.NewVirtualLocationFromDirectory(
r.responsePath(string(refVia.Reference.RealPath)), r.responsePath(string(refVia.RealPath)),
r.responsePath(string(refVia.RequestPath)), r.responsePath(string(refVia.RequestPath)),
*refVia.Reference, *refVia.Reference,
) )

View File

@ -123,7 +123,7 @@ func parseApkDB(_ context.Context, resolver file.Resolver, env *generic.Environm
// This should get fixed with https://gitlab.alpinelinux.org/alpine/apk-tools/-/issues/10875 // This should get fixed with https://gitlab.alpinelinux.org/alpine/apk-tools/-/issues/10875
if r == nil { if r == nil {
// find the repositories file from the relative directory of the DB file // find the repositories file from the relative directory of the DB file
releases := findReleases(resolver, reader.Location.RealPath) releases := findReleases(resolver, reader.RealPath)
if len(releases) > 0 { if len(releases) > 0 {
r = &releases[0] r = &releases[0]
@ -173,7 +173,7 @@ func parseReleasesFromAPKRepository(reader file.LocationReadCloser) []linux.Rele
reposB, err := io.ReadAll(reader) reposB, err := io.ReadAll(reader)
if err != nil { if err != nil {
log.Tracef("unable to read APK repositories file %q: %+v", reader.Location.RealPath, err) log.Tracef("unable to read APK repositories file %q: %+v", reader.RealPath, err)
return nil return nil
} }

View File

@ -80,7 +80,7 @@ func parseAlpmDB(_ context.Context, resolver file.Resolver, env *generic.Environ
newPackage( newPackage(
data, data,
env.LinuxRelease, env.LinuxRelease,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
locs..., locs...,
), ),
}, nil, errs }, nil, errs

View File

@ -33,13 +33,13 @@ func parseSBOM(_ context.Context, resolver file.Resolver, _ *generic.Environment
} }
if s == nil { if s == nil {
log.WithFields("path", reader.Location.RealPath).Trace("file is not an SBOM") log.WithFields("path", reader.RealPath).Trace("file is not an SBOM")
return nil, nil, nil return nil, nil, nil
} }
// Bitnami exclusively uses SPDX JSON SBOMs // Bitnami exclusively uses SPDX JSON SBOMs
if sFormat != "spdx-json" { if sFormat != "spdx-json" {
log.WithFields("path", reader.Location.RealPath).Trace("file is not an SPDX JSON SBOM") log.WithFields("path", reader.RealPath).Trace("file is not an SPDX JSON SBOM")
return nil, nil, nil return nil, nil, nil
} }
@ -59,7 +59,7 @@ func parseSBOM(_ context.Context, resolver file.Resolver, _ *generic.Environment
// where there is evidence of this file, and the catalogers have not run against any file other than, // where there is evidence of this file, and the catalogers have not run against any file other than,
// the SBOM, this is the only location that is relevant for this cataloger. // the SBOM, this is the only location that is relevant for this cataloger.
p.Locations = file.NewLocationSet( p.Locations = file.NewLocationSet(
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
// Parse the Bitnami-specific metadata // Parse the Bitnami-specific metadata
@ -70,7 +70,7 @@ func parseSBOM(_ context.Context, resolver file.Resolver, _ *generic.Environment
// Bitnami packages reported in a SPDX file are shipped under the same directory // Bitnami packages reported in a SPDX file are shipped under the same directory
// as the SPDX file itself. // as the SPDX file itself.
metadata.Path = filepath.Dir(reader.Location.RealPath) metadata.Path = filepath.Dir(reader.RealPath)
if p.ID() != mainPkgID { if p.ID() != mainPkgID {
metadata.Files = packageFiles(s.Relationships, p, metadata.Path) metadata.Files = packageFiles(s.Relationships, p, metadata.Path)
secondaryPkgsFiles = append(secondaryPkgsFiles, metadata.Files...) secondaryPkgsFiles = append(secondaryPkgsFiles, metadata.Files...)
@ -86,12 +86,12 @@ func parseSBOM(_ context.Context, resolver file.Resolver, _ *generic.Environment
} }
// Resolve all files owned by the main package in the SBOM and update the metadata // Resolve all files owned by the main package in the SBOM and update the metadata
if mainPkgFiles, err := mainPkgFiles(resolver, reader.Location.RealPath, secondaryPkgsFiles); err == nil { if mainPkgFiles, err := mainPkgFiles(resolver, reader.RealPath, secondaryPkgsFiles); err == nil {
for i, p := range pkgs { for i, p := range pkgs {
if p.ID() == mainPkgID { if p.ID() == mainPkgID {
metadata, ok := p.Metadata.(*pkg.BitnamiSBOMEntry) metadata, ok := p.Metadata.(*pkg.BitnamiSBOMEntry)
if !ok { if !ok {
log.WithFields("spdx-filepath", reader.Location.RealPath).Trace("main package in SBOM does not have Bitnami metadata") log.WithFields("spdx-filepath", reader.RealPath).Trace("main package in SBOM does not have Bitnami metadata")
continue continue
} }
@ -100,7 +100,7 @@ func parseSBOM(_ context.Context, resolver file.Resolver, _ *generic.Environment
} }
} }
} else { } else {
log.WithFields("spdx-filepath", reader.Location.RealPath, "error", err).Trace("unable to resolve owned files for main package in SBOM") log.WithFields("spdx-filepath", reader.RealPath, "error", err).Trace("unable to resolve owned files for main package in SBOM")
} }
return pkgs, filterRelationships(s.Relationships, pkgs), nil return pkgs, filterRelationships(s.Relationships, pkgs), nil

View File

@ -48,7 +48,7 @@ func parseConanfile(_ context.Context, _ file.Resolver, _ *generic.Environment,
p := newConanfilePackage( p := newConanfilePackage(
m, m,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
if p == nil { if p == nil {
continue continue

View File

@ -60,7 +60,7 @@ func parseFullRequiresLine(line string, reader file.LocationReadCloser, pkgs *[]
p := newConaninfoPackage( p := newConaninfoPackage(
meta, meta,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
if p != nil { if p != nil {
*pkgs = append(*pkgs, *p) *pkgs = append(*pkgs, *p)
@ -83,7 +83,7 @@ func parseConaninfo(_ context.Context, _ file.Resolver, _ *generic.Environment,
// First set the base package info by checking the relative path // First set the base package info by checking the relative path
fullFilePath := string(reader.Location.LocationData.Reference().RealPath) fullFilePath := string(reader.Location.LocationData.Reference().RealPath)
if len(fullFilePath) == 0 { if len(fullFilePath) == 0 {
fullFilePath = reader.Location.LocationData.RealPath fullFilePath = reader.RealPath
} }
mainMetadata, err := parseConanMetadataFromFilePath(fullFilePath) mainMetadata, err := parseConanMetadataFromFilePath(fullFilePath)
@ -102,7 +102,7 @@ func parseConaninfo(_ context.Context, _ file.Resolver, _ *generic.Environment,
case errors.Is(err, io.EOF): case errors.Is(err, io.EOF):
mainPackage := newConaninfoPackage( mainPackage := newConaninfoPackage(
mainMetadata, mainMetadata,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
mainPackageRef := *mainPackage mainPackageRef := *mainPackage

View File

@ -91,7 +91,7 @@ func handleConanLockV1(cl conanLock, reader file.LocationReadCloser, parsedPkgRe
p := newConanlockPackage( p := newConanlockPackage(
metadata, metadata,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
if p != nil { if p != nil {
@ -115,7 +115,7 @@ func handleConanLockV2(cl conanLock, reader file.LocationReadCloser, indexToPkgM
p := newConanReferencePackage( p := newConanReferencePackage(
reference, reference,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
if p != nil { if p != nil {

View File

@ -102,7 +102,7 @@ func parsePubspecLock(_ context.Context, _ file.Resolver, _ *generic.Environment
newPubspecLockPackage( newPubspecLockPackage(
name, name,
pubPkg, pubPkg,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
) )
} }

View File

@ -36,7 +36,7 @@ func parseDpkgDB(ctx context.Context, resolver file.Resolver, env *generic.Envir
return nil, nil, fmt.Errorf("unable to catalog dpkg DB=%q: %w", reader.RealPath, err) return nil, nil, fmt.Errorf("unable to catalog dpkg DB=%q: %w", reader.RealPath, err)
} }
dbLoc := reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation) dbLoc := reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)
var pkgs []pkg.Package var pkgs []pkg.Package
_ = sync.CollectSlice(&ctx, cataloging.ExecutorFile, sync.ToSeq(metadata), func(m pkg.DpkgDBEntry) (pkg.Package, error) { _ = sync.CollectSlice(&ctx, cataloging.ExecutorFile, sync.ToSeq(metadata), func(m pkg.DpkgDBEntry) (pkg.Package, error) {
return newDpkgPackage(m, dbLoc, resolver, env.LinuxRelease, findDpkgInfoFiles(m.Package, resolver, reader.Location)...), nil return newDpkgPackage(m, dbLoc, resolver, env.LinuxRelease, findDpkgInfoFiles(m.Package, resolver, reader.Location)...), nil

View File

@ -69,7 +69,7 @@ func parseDotnetPackagesLock(_ context.Context, _ file.Resolver, _ *generic.Envi
name, _ := extractNameAndVersion(nameVersion) name, _ := extractNameAndVersion(nameVersion)
dep := allDependencies[nameVersion] dep := allDependencies[nameVersion]
dotnetPkg := newDotnetPackagesLockPackage(name, dep, reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)) dotnetPkg := newDotnetPackagesLockPackage(name, dep, reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation))
if dotnetPkg != nil { if dotnetPkg != nil {
pkgs = append(pkgs, *dotnetPkg) pkgs = append(pkgs, *dotnetPkg)
pkgMap[nameVersion] = *dotnetPkg pkgMap[nameVersion] = *dotnetPkg

View File

@ -61,7 +61,7 @@ func parseMixLock(_ context.Context, _ file.Resolver, _ *generic.Environment, re
PkgHash: hash, PkgHash: hash,
PkgHashExt: hashExt, PkgHashExt: hashExt,
}, },
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
) )
} }

View File

@ -35,7 +35,7 @@ func parseOTPApp(_ context.Context, _ file.Resolver, _ *generic.Environment, rea
p := newPackageFromOTP( p := newPackageFromOTP(
name, version, name, version,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
packages = append(packages, p) packages = append(packages, p)

View File

@ -56,7 +56,7 @@ func parseRebarLock(_ context.Context, _ file.Resolver, _ *generic.Environment,
Name: name, Name: name,
Version: version, Version: version,
}, },
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
pkgMap[name] = &p pkgMap[name] = &p

View File

@ -27,14 +27,14 @@ var (
// parses individual CONTENTS files from the portage flat-file store (e.g. /var/db/pkg/*/*/CONTENTS). // parses individual CONTENTS files from the portage flat-file store (e.g. /var/db/pkg/*/*/CONTENTS).
func parsePortageContents(_ context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { func parsePortageContents(_ context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
cpvMatch := cpvRe.FindStringSubmatch(reader.Location.RealPath) cpvMatch := cpvRe.FindStringSubmatch(reader.RealPath)
if cpvMatch == nil { if cpvMatch == nil {
return nil, nil, fmt.Errorf("failed to match package and version in %s", reader.Location.RealPath) return nil, nil, fmt.Errorf("failed to match package and version in %s", reader.RealPath)
} }
name, version := cpvMatch[1], cpvMatch[2] name, version := cpvMatch[1], cpvMatch[2]
if name == "" || version == "" { if name == "" || version == "" {
log.WithFields("path", reader.Location.RealPath).Debug("failed to parse portage name and version") log.WithFields("path", reader.RealPath).Debug("failed to parse portage name and version")
return nil, nil, fmt.Errorf("failed to parse portage name and version") return nil, nil, fmt.Errorf("failed to parse portage name and version")
} }
@ -43,7 +43,7 @@ func parsePortageContents(_ context.Context, resolver file.Resolver, _ *generic.
Version: version, Version: version,
PURL: packageURL(name, version), PURL: packageURL(name, version),
Locations: file.NewLocationSet( Locations: file.NewLocationSet(
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
Type: pkg.PortagePkg, Type: pkg.PortagePkg,
Metadata: pkg.PortageEntry{ Metadata: pkg.PortageEntry{

View File

@ -343,7 +343,7 @@ func getGOARCHFromBin(r io.ReaderAt) (string, error) {
if err != nil { if err != nil {
return "", fmt.Errorf("unrecognized file format: %w", err) return "", fmt.Errorf("unrecognized file format: %w", err)
} }
arch = fmt.Sprintf("%d", f.FileHeader.TargetMachine) arch = fmt.Sprintf("%d", f.TargetMachine)
default: default:
return "", errUnrecognizedFormat return "", errUnrecognizedFormat
} }

View File

@ -61,7 +61,7 @@ func (c *goModCataloger) parseGoModFile(ctx context.Context, resolver file.Resol
Name: m.Mod.Path, Name: m.Mod.Path,
Version: m.Mod.Version, Version: m.Mod.Version,
Licenses: pkg.NewLicenseSet(lics...), Licenses: pkg.NewLicenseSet(lics...),
Locations: file.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Locations: file.NewLocationSet(reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
PURL: packageURL(m.Mod.Path, m.Mod.Version), PURL: packageURL(m.Mod.Path, m.Mod.Version),
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,
@ -83,7 +83,7 @@ func (c *goModCataloger) parseGoModFile(ctx context.Context, resolver file.Resol
Name: m.New.Path, Name: m.New.Path,
Version: m.New.Version, Version: m.New.Version,
Licenses: pkg.NewLicenseSet(lics...), Licenses: pkg.NewLicenseSet(lics...),
Locations: file.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Locations: file.NewLocationSet(reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
PURL: packageURL(m.New.Path, m.New.Version), PURL: packageURL(m.New.Path, m.New.Version),
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,
@ -120,7 +120,7 @@ func parseGoSumFile(resolver file.Resolver, reader file.LocationReadCloser) (map
return out, fmt.Errorf("no resolver provided") return out, fmt.Errorf("no resolver provided")
} }
goSumPath := strings.TrimSuffix(reader.Location.RealPath, ".mod") + ".sum" goSumPath := strings.TrimSuffix(reader.RealPath, ".mod") + ".sum"
goSumLocation := resolver.RelativeFileByPath(reader.Location, goSumPath) goSumLocation := resolver.RelativeFileByPath(reader.Location, goSumPath)
if goSumLocation == nil { if goSumLocation == nil {
return nil, fmt.Errorf("unable to resolve: %s", goSumPath) return nil, fmt.Errorf("unable to resolve: %s", goSumPath)

View File

@ -275,7 +275,7 @@ func (ni nativeImageElf) fetchPkgs() (pkgs []pkg.Package, relationships []artifa
if dataSection == nil { if dataSection == nil {
return nil, nil, fmt.Errorf("no .data section found in binary: %w", err) return nil, nil, fmt.Errorf("no .data section found in binary: %w", err)
} }
dataSectionBase := dataSection.SectionHeader.Addr dataSectionBase := dataSection.Addr
data, err := dataSection.Data() data, err := dataSection.Data()
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("cannot read the .data section: %w", err) return nil, nil, fmt.Errorf("cannot read the .data section: %w", err)

View File

@ -63,7 +63,7 @@ func parseGradleLockfile(_ context.Context, _ file.Resolver, _ *generic.Environm
Name: dep.Name, Name: dep.Name,
Version: dep.Version, Version: dep.Version,
Locations: file.NewLocationSet( Locations: file.NewLocationSet(
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
Language: pkg.Java, Language: pkg.Java,
Type: pkg.JavaPkg, Type: pkg.JavaPkg,

View File

@ -67,7 +67,7 @@ func parsePackageJSON(_ context.Context, _ file.Resolver, _ *generic.Environment
// a compliance filter later will remove these packages based on compliance rules // a compliance filter later will remove these packages based on compliance rules
pkgs = append( pkgs = append(
pkgs, pkgs,
newPackageJSONPackage(p, reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), newPackageJSONPackage(p, reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
) )
} }

View File

@ -28,7 +28,7 @@ func parseLinuxKernelModuleFile(_ context.Context, _ file.Resolver, _ *generic.E
return nil, nil, nil return nil, nil, nil
} }
metadata.Path = reader.Location.RealPath metadata.Path = reader.RealPath
return []pkg.Package{ return []pkg.Package{
newLinuxKernelModulePackage( newLinuxKernelModulePackage(

View File

@ -74,7 +74,7 @@ func parseRockspec(_ context.Context, _ file.Resolver, _ *generic.Environment, r
Homepage: homepage, Homepage: homepage,
Description: description, Description: description,
}, },
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
return []pkg.Package{p}, nil, nil return []pkg.Package{p}, nil, nil

View File

@ -39,7 +39,7 @@ func parseOpamPackage(_ context.Context, _ file.Resolver, _ *generic.Environment
// If name is inferred from file name/path // If name is inferred from file name/path
var name, version string var name, version string
var licenses []string var licenses []string
loc := reader.Location.LocationData.AccessPath loc := reader.AccessPath
dir, file := path.Split(loc) dir, file := path.Split(loc)
if file == "opam" { if file == "opam" {
@ -95,7 +95,7 @@ func parseOpamPackage(_ context.Context, _ file.Resolver, _ *generic.Environment
pkgs, pkgs,
newOpamPackage( newOpamPackage(
entry, entry,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
) )

View File

@ -99,7 +99,7 @@ func poetryLockPackages(reader file.LocationReadCloser) ([]pkg.Package, error) {
p.Name, p.Name,
p.Version, p.Version,
newPythonPoetryLockEntry(p), newPythonPoetryLockEntry(p),
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
) )
} }

View File

@ -153,7 +153,7 @@ func (rp requirementsParser) parseRequirementsTxt(_ context.Context, _ file.Reso
URL: parseURL(req.URL), URL: parseURL(req.URL),
Markers: req.Markers, Markers: req.Markers,
}, },
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
) )
} }

View File

@ -59,7 +59,7 @@ func parseSetup(_ context.Context, _ file.Resolver, _ *generic.Environment, read
newPackageForIndex( newPackageForIndex(
name, name,
version, version,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
) )
} }

View File

@ -19,7 +19,7 @@ import (
func parseRpmArchive(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { func parseRpmArchive(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
rpm, err := rpmutils.ReadRpm(reader) rpm, err := rpmutils.ReadRpm(reader)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("RPM file found but unable to read: %s (%w)", reader.Location.RealPath, err) return nil, nil, fmt.Errorf("RPM file found but unable to read: %s (%w)", reader.RealPath, err)
} }
nevra, err := rpm.Header.GetNEVRA() nevra, err := rpm.Header.GetNEVRA()

View File

@ -47,7 +47,7 @@ func parseGemFileLockEntries(_ context.Context, _ file.Resolver, _ *generic.Envi
newGemfileLockPackage( newGemfileLockPackage(
candidate[0], candidate[0],
strings.Trim(candidate[1], "()"), strings.Trim(candidate[1], "()"),
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
) )
} }

View File

@ -43,7 +43,7 @@ func parseCargoLock(_ context.Context, _ file.Resolver, _ *generic.Environment,
} }
newPkg := newPackageFromCargoMetadata( newPkg := newPackageFromCargoMetadata(
p, p,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
pkgs = append( pkgs = append(
pkgs, pkgs,

View File

@ -40,7 +40,7 @@ func NewCataloger() pkg.Cataloger {
func parseSBOM(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { func parseSBOM(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
readSeeker, err := adaptToReadSeeker(reader) readSeeker, err := adaptToReadSeeker(reader)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("unable to read SBOM file %q: %w", reader.Location.RealPath, err) return nil, nil, fmt.Errorf("unable to read SBOM file %q: %w", reader.RealPath, err)
} }
s, _, _, err := format.Decode(readSeeker) s, _, _, err := format.Decode(readSeeker)
if err != nil { if err != nil {
@ -48,7 +48,7 @@ func parseSBOM(_ context.Context, _ file.Resolver, _ *generic.Environment, reade
} }
if s == nil { if s == nil {
log.WithFields("path", reader.Location.RealPath).Trace("file is not an SBOM") log.WithFields("path", reader.RealPath).Trace("file is not an SBOM")
return nil, nil, nil return nil, nil, nil
} }
@ -60,14 +60,14 @@ func parseSBOM(_ context.Context, _ file.Resolver, _ *generic.Environment, reade
// where there is evidence of this file, and the catalogers have not run against any file other than, // where there is evidence of this file, and the catalogers have not run against any file other than,
// the SBOM, this is the only location that is relevant for this cataloger. // the SBOM, this is the only location that is relevant for this cataloger.
p.Locations = file.NewLocationSet( p.Locations = file.NewLocationSet(
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
p.FoundBy = catalogerName p.FoundBy = catalogerName
pkgs = append(pkgs, p) pkgs = append(pkgs, p)
relationships = append(relationships, artifact.Relationship{ relationships = append(relationships, artifact.Relationship{
From: p, From: p,
To: reader.Location.Coordinates, To: reader.Coordinates,
Type: artifact.DescribedByRelationship, Type: artifact.DescribedByRelationship,
}) })
} }

View File

@ -94,7 +94,7 @@ func parsePackageResolved(_ context.Context, _ file.Resolver, _ *generic.Environ
pkgPin.Version, pkgPin.Version,
pkgPin.Location, pkgPin.Location,
pkgPin.Revision, pkgPin.Revision,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
) )
} }

View File

@ -67,7 +67,7 @@ func parsePodfileLock(_ context.Context, _ file.Resolver, _ *generic.Environment
podName, podName,
podVersion, podVersion,
pkgHash, pkgHash,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
) )
} }

View File

@ -62,7 +62,7 @@ func parsePackPackage(_ context.Context, _ file.Resolver, _ *generic.Environment
pkgs, pkgs,
newSwiplPackPackage( newSwiplPackPackage(
entry, entry,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
) )

View File

@ -36,7 +36,7 @@ func parseTerraformLock(_ context.Context, _ file.Resolver, _ *generic.Environme
p := pkg.Package{ p := pkg.Package{
Name: provider.URL, Name: provider.URL,
Version: provider.Version, Version: provider.Version,
Locations: file.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Locations: file.NewLocationSet(reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
Licenses: pkg.NewLicenseSet(), // TODO: license could be found in .terraform/providers/${name}/${version}/${arch}/LICENSE.txt Licenses: pkg.NewLicenseSet(), // TODO: license could be found in .terraform/providers/${name}/${version}/${arch}/LICENSE.txt
Language: pkg.Go, Language: pkg.Go,
Type: pkg.TerraformPkg, Type: pkg.TerraformPkg,

View File

@ -44,7 +44,7 @@ func parseWordpressPluginFiles(_ context.Context, _ file.Resolver, _ *generic.En
_, err := reader.Read(buffer) _, err := reader.Read(buffer)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("failed to read %s file: %w", reader.Location.Path(), err) return nil, nil, fmt.Errorf("failed to read %s file: %w", reader.Path(), err)
} }
fields := extractFields(string(buffer)) fields := extractFields(string(buffer))