Add annotations for evidence on package locations (#1723)

* add location annotations + deb evidence annotations

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* rename LocationData struct and Annotation helper function

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* add failing integration test for evidence coverage

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* add evidence to aplm cataloger locations

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* change location annotation helper to return a location copy

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* add evidence to binary cataloger locations

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* updated remaining catalogers with location annotations

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* fix unit tests

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* fix linting

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* bump json schema

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* partial addressing of review comments

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* rename location.WithAnnotation

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

---------

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
Alex Goodman 2023-04-13 17:02:29 -04:00 committed by GitHub
parent 05715489c4
commit 5d156b8241
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
67 changed files with 2511 additions and 849 deletions

View File

@ -6,5 +6,5 @@ const (
// JSONSchemaVersion is the current schema version output by the JSON encoder // JSONSchemaVersion is the current schema version output by the JSON encoder
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment. // This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
JSONSchemaVersion = "7.1.1" JSONSchemaVersion = "7.1.2"
) )

File diff suppressed because it is too large Load Diff

View File

@ -29,7 +29,7 @@ func Test_encodeComponentProperties(t *testing.T) {
input: pkg.Package{ input: pkg.Package{
FoundBy: "cataloger", FoundBy: "cataloger",
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{Coordinates: source.Coordinates{RealPath: "test"}}, source.NewLocationFromCoordinates(source.Coordinates{RealPath: "test"}),
), ),
Metadata: pkg.ApkMetadata{ Metadata: pkg.ApkMetadata{
Package: "libc-utils", Package: "libc-utils",

View File

@ -220,10 +220,8 @@ func toSyftCoordinates(f *spdx.File) source.Coordinates {
} }
func toSyftLocation(f *spdx.File) *source.Location { func toSyftLocation(f *spdx.File) *source.Location {
return &source.Location{ l := source.NewVirtualLocationFromCoordinates(toSyftCoordinates(f), f.FileName)
Coordinates: toSyftCoordinates(f), return &l
VirtualPath: f.FileName,
}
} }
func requireAndTrimPrefix(val interface{}, prefix string) string { func requireAndTrimPrefix(val interface{}, prefix string) string {

View File

@ -36,36 +36,30 @@ func Test_toGithubModel(t *testing.T) {
Name: "pkg-1", Name: "pkg-1",
Version: "1.0.1", Version: "1.0.1",
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(source.Coordinates{
Coordinates: source.Coordinates{ RealPath: "/usr/lib",
RealPath: "/usr/lib", FileSystemID: "fsid-1",
FileSystemID: "fsid-1", }),
},
},
), ),
}, },
{ {
Name: "pkg-2", Name: "pkg-2",
Version: "2.0.2", Version: "2.0.2",
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(source.Coordinates{
Coordinates: source.Coordinates{ RealPath: "/usr/lib",
RealPath: "/usr/lib", FileSystemID: "fsid-1",
FileSystemID: "fsid-1", }),
},
},
), ),
}, },
{ {
Name: "pkg-3", Name: "pkg-3",
Version: "3.0.3", Version: "3.0.3",
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(source.Coordinates{
Coordinates: source.Coordinates{ RealPath: "/etc",
RealPath: "/etc", FileSystemID: "fsid-1",
FileSystemID: "fsid-1", }),
},
},
), ),
}, },
} { } {

View File

@ -53,11 +53,9 @@ func TestEncodeFullJSONDocument(t *testing.T) {
Name: "package-1", Name: "package-1",
Version: "1.0.1", Version: "1.0.1",
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(source.Coordinates{
Coordinates: source.Coordinates{ RealPath: "/a/place/a",
RealPath: "/a/place/a", }),
},
},
), ),
Type: pkg.PythonPkg, Type: pkg.PythonPkg,
FoundBy: "the-cataloger-1", FoundBy: "the-cataloger-1",
@ -79,11 +77,9 @@ func TestEncodeFullJSONDocument(t *testing.T) {
Name: "package-2", Name: "package-2",
Version: "2.0.1", Version: "2.0.1",
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(source.Coordinates{
Coordinates: source.Coordinates{ RealPath: "/b/place/b",
RealPath: "/b/place/b", }),
},
},
), ),
Type: pkg.DebPkg, Type: pkg.DebPkg,
FoundBy: "the-cataloger-2", FoundBy: "the-cataloger-2",

View File

@ -21,16 +21,16 @@ type Package struct {
// PackageBasicData contains non-ambiguous values (type-wise) from pkg.Package. // PackageBasicData contains non-ambiguous values (type-wise) from pkg.Package.
type PackageBasicData struct { type PackageBasicData struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name"` Name string `json:"name"`
Version string `json:"version"` Version string `json:"version"`
Type pkg.Type `json:"type"` Type pkg.Type `json:"type"`
FoundBy string `json:"foundBy"` FoundBy string `json:"foundBy"`
Locations []source.Coordinates `json:"locations"` Locations []source.Location `json:"locations"`
Licenses []string `json:"licenses"` Licenses []string `json:"licenses"`
Language pkg.Language `json:"language"` Language pkg.Language `json:"language"`
CPEs []string `json:"cpes"` CPEs []string `json:"cpes"`
PURL string `json:"purl"` PURL string `json:"purl"`
} }
// PackageCustomData contains ambiguous values (type-wise) from pkg.Package. // PackageCustomData contains ambiguous values (type-wise) from pkg.Package.

View File

@ -89,7 +89,7 @@
} }
}, },
"schema": { "schema": {
"version": "7.1.1", "version": "7.1.2",
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-7.1.1.json" "url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-7.1.2.json"
} }
} }

View File

@ -185,7 +185,7 @@
} }
}, },
"schema": { "schema": {
"version": "7.1.1", "version": "7.1.2",
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-7.1.1.json" "url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-7.1.2.json"
} }
} }

View File

@ -112,7 +112,7 @@
} }
}, },
"schema": { "schema": {
"version": "7.1.1", "version": "7.1.2",
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-7.1.1.json" "url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-7.1.2.json"
} }
} }

View File

@ -195,12 +195,6 @@ func toPackageModel(p pkg.Package) model.Package {
licenses = p.Licenses licenses = p.Licenses
} }
locations := p.Locations.ToSlice()
var coordinates = make([]source.Coordinates, len(locations))
for i, l := range locations {
coordinates[i] = l.Coordinates
}
return model.Package{ return model.Package{
PackageBasicData: model.PackageBasicData{ PackageBasicData: model.PackageBasicData{
ID: string(p.ID()), ID: string(p.ID()),
@ -208,7 +202,7 @@ func toPackageModel(p pkg.Package) model.Package {
Version: p.Version, Version: p.Version,
Type: p.Type, Type: p.Type,
FoundBy: p.FoundBy, FoundBy: p.FoundBy,
Locations: coordinates, Locations: p.Locations.ToSlice(),
Licenses: licenses, Licenses: licenses,
Language: p.Language, Language: p.Language,
CPEs: cpes, CPEs: cpes,

View File

@ -202,16 +202,11 @@ func toSyftPackage(p model.Package, idAliases map[string]string) pkg.Package {
cpes = append(cpes, value) cpes = append(cpes, value)
} }
var locations = make([]source.Location, len(p.Locations))
for i, c := range p.Locations {
locations[i] = source.NewLocationFromCoordinates(c)
}
out := pkg.Package{ out := pkg.Package{
Name: p.Name, Name: p.Name,
Version: p.Version, Version: p.Version,
FoundBy: p.FoundBy, FoundBy: p.FoundBy,
Locations: source.NewLocationSet(locations...), Locations: source.NewLocationSet(p.Locations...),
Licenses: p.Licenses, Licenses: p.Licenses,
Language: p.Language, Language: p.Language,
Type: p.Type, Type: p.Type,

View File

@ -327,45 +327,45 @@ func TestCatalog_MergeRecords(t *testing.T) {
{ {
CPEs: []cpe.CPE{cpe.Must("cpe:2.3:a:package:1:1:*:*:*:*:*:*:*")}, CPEs: []cpe.CPE{cpe.Must("cpe:2.3:a:package:1:1:*:*:*:*:*:*:*")},
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewVirtualLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/b/path", RealPath: "/b/path",
FileSystemID: "a", FileSystemID: "a",
}, },
VirtualPath: "/another/path", "/another/path",
}, ),
), ),
Type: RpmPkg, Type: RpmPkg,
}, },
{ {
CPEs: []cpe.CPE{cpe.Must("cpe:2.3:b:package:1:1:*:*:*:*:*:*:*")}, CPEs: []cpe.CPE{cpe.Must("cpe:2.3:b:package:1:1:*:*:*:*:*:*:*")},
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewVirtualLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/b/path", RealPath: "/b/path",
FileSystemID: "b", FileSystemID: "b",
}, },
VirtualPath: "/another/path", "/another/path",
}, ),
), ),
Type: RpmPkg, Type: RpmPkg,
}, },
}, },
expectedLocations: []source.Location{ expectedLocations: []source.Location{
{ source.NewVirtualLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/b/path", RealPath: "/b/path",
FileSystemID: "a", FileSystemID: "a",
}, },
VirtualPath: "/another/path", "/another/path",
}, ),
{ source.NewVirtualLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/b/path", RealPath: "/b/path",
FileSystemID: "b", FileSystemID: "b",
}, },
VirtualPath: "/another/path", "/another/path",
}, ),
}, },
expectedCPECount: 2, expectedCPECount: 2,
}, },

View File

@ -70,7 +70,11 @@ func parseAlpmDB(resolver source.FileResolver, env *generic.Environment, reader
} }
return []pkg.Package{ return []pkg.Package{
newPackage(*metadata, env.LinuxRelease, reader.Location), newPackage(
*metadata,
env.LinuxRelease,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
}, nil, nil }, nil, nil
} }

View File

@ -123,7 +123,7 @@ func parseApkDB(resolver source.FileResolver, env *generic.Environment, reader s
pkgs := make([]pkg.Package, 0, len(apks)) pkgs := make([]pkg.Package, 0, len(apks))
for _, apk := range apks { for _, apk := range apks {
pkgs = append(pkgs, newPackage(apk, r, reader.Location)) pkgs = append(pkgs, newPackage(apk, r, reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)))
} }
return pkgs, discoverPackageDependencies(pkgs), nil return pkgs, discoverPackageDependencies(pkgs), nil

View File

@ -829,12 +829,12 @@ func match(classifier string, paths ...string) pkg.ClassifierMatch {
} }
return pkg.ClassifierMatch{ return pkg.ClassifierMatch{
Classifier: classifier, Classifier: classifier,
Location: source.Location{ Location: source.NewVirtualLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: realPath, RealPath: realPath,
}, },
VirtualPath: virtualPath, virtualPath,
}, ),
} }
} }

View File

@ -7,7 +7,6 @@ import (
"debug/pe" "debug/pe"
"fmt" "fmt"
"io" "io"
"reflect"
"regexp" "regexp"
"strings" "strings"
"text/template" "text/template"
@ -107,7 +106,13 @@ func fileNameTemplateVersionMatcher(fileNamePattern string, contentTemplate stri
} }
matchMetadata := internal.MatchNamedCaptureGroups(tmplPattern, string(contents)) matchMetadata := internal.MatchNamedCaptureGroups(tmplPattern, string(contents))
return singlePackage(classifier, location, matchMetadata), nil
p := newPackage(classifier, location, matchMetadata)
if p == nil {
return nil, nil
}
return []pkg.Package{*p}, nil
} }
} }
@ -120,7 +125,13 @@ func fileContentsVersionMatcher(pattern string) evidenceMatcher {
} }
matchMetadata := internal.MatchNamedCaptureGroups(pat, string(contents)) matchMetadata := internal.MatchNamedCaptureGroups(pat, string(contents))
return singlePackage(classifier, location, matchMetadata), nil
p := newPackage(classifier, location, matchMetadata)
if p == nil {
return nil, nil
}
return []pkg.Package{*p}, nil
} }
} }
@ -141,8 +152,8 @@ func sharedLibraryLookup(sharedLibraryPattern string, sharedLibraryMatcher evide
if err != nil { if err != nil {
return nil, err return nil, err
} }
for _, libraryLication := range locations { for _, libraryLocation := range locations {
pkgs, err := sharedLibraryMatcher(resolver, classifier, libraryLication) pkgs, err := sharedLibraryMatcher(resolver, classifier, libraryLocation)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -176,58 +187,6 @@ func mustPURL(purl string) packageurl.PackageURL {
return p return p
} }
func singlePackage(classifier classifier, location source.Location, matchMetadata map[string]string) []pkg.Package {
version, ok := matchMetadata["version"]
if !ok {
return nil
}
update := matchMetadata["update"]
var cpes []cpe.CPE
for _, c := range classifier.CPEs {
c.Version = version
c.Update = update
cpes = append(cpes, c)
}
p := pkg.Package{
Name: classifier.Package,
Version: version,
Locations: source.NewLocationSet(location),
Type: pkg.BinaryPkg,
CPEs: cpes,
FoundBy: catalogerName,
MetadataType: pkg.BinaryMetadataType,
Metadata: pkg.BinaryMetadata{
Matches: []pkg.ClassifierMatch{
{
Classifier: classifier.Class,
Location: location,
},
},
},
}
if classifier.Type != "" {
p.Type = classifier.Type
}
if !reflect.DeepEqual(classifier.PURL, emptyPURL) {
purl := classifier.PURL
purl.Version = version
p.PURL = purl.ToString()
}
if classifier.Language != "" {
p.Language = classifier.Language
}
p.SetID()
return []pkg.Package{p}
}
func getContents(resolver source.FileResolver, location source.Location) ([]byte, error) { func getContents(resolver source.FileResolver, location source.Location) ([]byte, error) {
reader, err := resolver.FileContentsByLocation(location) reader, err := resolver.FileContentsByLocation(location)
if err != nil { if err != nil {

View File

@ -0,0 +1,63 @@
package binary
import (
"reflect"
"github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
)
func newPackage(classifier classifier, location source.Location, matchMetadata map[string]string) *pkg.Package {
version, ok := matchMetadata["version"]
if !ok {
return nil
}
update := matchMetadata["update"]
var cpes []cpe.CPE
for _, c := range classifier.CPEs {
c.Version = version
c.Update = update
cpes = append(cpes, c)
}
p := pkg.Package{
Name: classifier.Package,
Version: version,
Locations: source.NewLocationSet(
location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
Type: pkg.BinaryPkg,
CPEs: cpes,
FoundBy: catalogerName,
MetadataType: pkg.BinaryMetadataType,
Metadata: pkg.BinaryMetadata{
Matches: []pkg.ClassifierMatch{
{
Classifier: classifier.Class,
Location: location,
},
},
},
}
if classifier.Type != "" {
p.Type = classifier.Type
}
if !reflect.DeepEqual(classifier.PURL, emptyPURL) {
purl := classifier.PURL
purl.Version = version
p.PURL = purl.ToString()
}
if classifier.Language != "" {
p.Language = classifier.Language
}
p.SetID()
return &p
}

View File

@ -48,7 +48,10 @@ func parseConanfile(_ source.FileResolver, _ *generic.Environment, reader source
continue continue
} }
p := newConanfilePackage(m, reader.Location) p := newConanfilePackage(
m,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
if p == nil { if p == nil {
continue continue
} }

View File

@ -44,7 +44,10 @@ func parseConanlock(_ source.FileResolver, _ *generic.Environment, reader source
Context: node.Context, Context: node.Context,
} }
p := newConanlockPackage(metadata, reader.Location) p := newConanlockPackage(
metadata,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
if p != nil { if p != nil {
pkgs = append(pkgs, *p) pkgs = append(pkgs, *p)

View File

@ -58,7 +58,13 @@ func parsePubspecLock(_ source.FileResolver, _ *generic.Environment, reader sour
for _, name := range names { for _, name := range names {
pubPkg := p.Packages[name] pubPkg := p.Packages[name]
pkgs = append(pkgs, newPubspecLockPackage(name, pubPkg, reader.Location)) pkgs = append(pkgs,
newPubspecLockPackage(
name,
pubPkg,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
return pkgs, nil, nil return pkgs, nil, nil

View File

@ -1,5 +1,5 @@
/* /*
Package dpkg provides a concrete Cataloger implementation for Debian package DB status files. Package deb provides a concrete Cataloger implementation for Debian package DB status files.
*/ */
package deb package deb

View File

@ -25,7 +25,7 @@ func newDpkgPackage(d pkg.DpkgMetadata, dbLocation source.Location, resolver sou
p := pkg.Package{ p := pkg.Package{
Name: d.Package, Name: d.Package,
Version: d.Version, Version: d.Version,
Locations: source.NewLocationSet(dbLocation), Locations: source.NewLocationSet(dbLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
PURL: packageURL(d, release), PURL: packageURL(d, release),
Type: pkg.DebPkg, Type: pkg.DebPkg,
MetadataType: pkg.DpkgMetadataType, MetadataType: pkg.DpkgMetadataType,
@ -162,6 +162,7 @@ func getAdditionalFileListing(resolver source.FileResolver, dbLocation source.Lo
return files, locations return files, locations
} }
//nolint:dupl
func fetchMd5Contents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) { func fetchMd5Contents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) {
var md5Reader io.ReadCloser var md5Reader io.ReadCloser
var err error var err error
@ -182,17 +183,22 @@ func fetchMd5Contents(resolver source.FileResolver, dbLocation source.Location,
location = resolver.RelativeFileByPath(dbLocation, path.Join(parentPath, "info", m.Package+md5sumsExt)) location = resolver.RelativeFileByPath(dbLocation, path.Join(parentPath, "info", m.Package+md5sumsExt))
} }
// this is unexpected, but not a show-stopper if location == nil {
if location != nil { return nil, nil
md5Reader, err = resolver.FileContentsByLocation(*location)
if err != nil {
log.Warnf("failed to fetch deb md5 contents (package=%s): %+v", m.Package, err)
}
} }
return md5Reader, location // this is unexpected, but not a show-stopper
md5Reader, err = resolver.FileContentsByLocation(*location)
if err != nil {
log.Warnf("failed to fetch deb md5 contents (package=%s): %+v", m.Package, err)
}
l := location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation)
return md5Reader, &l
} }
//nolint:dupl
func fetchConffileContents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) { func fetchConffileContents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) {
var reader io.ReadCloser var reader io.ReadCloser
var err error var err error
@ -213,15 +219,19 @@ func fetchConffileContents(resolver source.FileResolver, dbLocation source.Locat
location = resolver.RelativeFileByPath(dbLocation, path.Join(parentPath, "info", m.Package+conffilesExt)) location = resolver.RelativeFileByPath(dbLocation, path.Join(parentPath, "info", m.Package+conffilesExt))
} }
// this is unexpected, but not a show-stopper if location == nil {
if location != nil { return nil, nil
reader, err = resolver.FileContentsByLocation(*location)
if err != nil {
log.Warnf("failed to fetch deb conffiles contents (package=%s): %+v", m.Package, err)
}
} }
return reader, location // this is unexpected, but not a show-stopper
reader, err = resolver.FileContentsByLocation(*location)
if err != nil {
log.Warnf("failed to fetch deb conffiles contents (package=%s): %+v", m.Package, err)
}
l := location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation)
return reader, &l
} }
func fetchCopyrightContents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) { func fetchCopyrightContents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) {
@ -243,7 +253,9 @@ func fetchCopyrightContents(resolver source.FileResolver, dbLocation source.Loca
log.Warnf("failed to fetch deb copyright contents (package=%s): %w", m.Package, err) log.Warnf("failed to fetch deb copyright contents (package=%s): %w", m.Package, err)
} }
return reader, location l := location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation)
return reader, &l
} }
func md5Key(metadata pkg.DpkgMetadata) string { func md5Key(metadata pkg.DpkgMetadata) string {

View File

@ -45,7 +45,11 @@ func parseDotnetDeps(_ source.FileResolver, _ *generic.Environment, reader sourc
for _, nameVersion := range names { for _, nameVersion := range names {
lib := p.Libraries[nameVersion] lib := p.Libraries[nameVersion]
dotnetPkg := newDotnetDepsPackage(nameVersion, lib, reader.Location) dotnetPkg := newDotnetDepsPackage(
nameVersion,
lib,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
if dotnetPkg != nil { if dotnetPkg != nil {
pkgs = append(pkgs, *dotnetPkg) pkgs = append(pkgs, *dotnetPkg)

View File

@ -43,11 +43,16 @@ func parseMixLock(_ source.FileResolver, _ *generic.Environment, reader source.L
continue continue
} }
packages = append(packages, newPackage(pkg.MixLockMetadata{ packages = append(packages,
Name: name, newPackage(
Version: version, pkg.MixLockMetadata{
PkgHash: hash, Name: name,
PkgHashExt: hashExt, Version: version,
})) PkgHash: hash,
PkgHashExt: hashExt,
},
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
} }

View File

@ -6,15 +6,18 @@ import (
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
"github.com/anchore/syft/syft/source"
) )
func TestParseMixLock(t *testing.T) { func TestParseMixLock(t *testing.T) {
locations := source.NewLocationSet(source.NewLocation("test-fixtures/mix.lock"))
expected := []pkg.Package{ expected := []pkg.Package{
{ {
Name: "castore", Name: "castore",
Version: "0.1.17", Version: "0.1.17",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/castore@0.1.17", PURL: "pkg:hex/castore@0.1.17",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -29,6 +32,7 @@ func TestParseMixLock(t *testing.T) {
Version: "1.1.0", Version: "1.1.0",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/connection@1.1.0", PURL: "pkg:hex/connection@1.1.0",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -43,6 +47,7 @@ func TestParseMixLock(t *testing.T) {
Version: "2.9.0", Version: "2.9.0",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/cowboy@2.9.0", PURL: "pkg:hex/cowboy@2.9.0",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -57,6 +62,7 @@ func TestParseMixLock(t *testing.T) {
Version: "0.4.0", Version: "0.4.0",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/cowboy_telemetry@0.4.0", PURL: "pkg:hex/cowboy_telemetry@0.4.0",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -71,6 +77,7 @@ func TestParseMixLock(t *testing.T) {
Version: "2.11.0", Version: "2.11.0",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/cowlib@2.11.0", PURL: "pkg:hex/cowlib@2.11.0",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -85,6 +92,7 @@ func TestParseMixLock(t *testing.T) {
Version: "2.4.2", Version: "2.4.2",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/db_connection@2.4.2", PURL: "pkg:hex/db_connection@2.4.2",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -99,6 +107,7 @@ func TestParseMixLock(t *testing.T) {
Version: "2.0.0", Version: "2.0.0",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/decimal@2.0.0", PURL: "pkg:hex/decimal@2.0.0",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -113,6 +122,7 @@ func TestParseMixLock(t *testing.T) {
Version: "1.4.25", Version: "1.4.25",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/earmark_parser@1.4.25", PURL: "pkg:hex/earmark_parser@1.4.25",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -127,6 +137,7 @@ func TestParseMixLock(t *testing.T) {
Version: "3.8.1", Version: "3.8.1",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/ecto@3.8.1", PURL: "pkg:hex/ecto@3.8.1",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -141,6 +152,7 @@ func TestParseMixLock(t *testing.T) {
Version: "3.8.1", Version: "3.8.1",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/ecto_sql@3.8.1", PURL: "pkg:hex/ecto_sql@3.8.1",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -155,6 +167,7 @@ func TestParseMixLock(t *testing.T) {
Version: "0.5.0", Version: "0.5.0",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/esbuild@0.5.0", PURL: "pkg:hex/esbuild@0.5.0",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -169,6 +182,7 @@ func TestParseMixLock(t *testing.T) {
Version: "0.28.4", Version: "0.28.4",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/ex_doc@0.28.4", PURL: "pkg:hex/ex_doc@0.28.4",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -183,6 +197,7 @@ func TestParseMixLock(t *testing.T) {
Version: "0.19.1", Version: "0.19.1",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/gettext@0.19.1", PURL: "pkg:hex/gettext@0.19.1",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -197,6 +212,7 @@ func TestParseMixLock(t *testing.T) {
Version: "0.1.1", Version: "0.1.1",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/hpax@0.1.1", PURL: "pkg:hex/hpax@0.1.1",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{
@ -211,6 +227,7 @@ func TestParseMixLock(t *testing.T) {
Version: "1.3.0", Version: "1.3.0",
Language: pkg.Elixir, Language: pkg.Elixir,
Type: pkg.HexPkg, Type: pkg.HexPkg,
Locations: locations,
PURL: "pkg:hex/jason@1.3.0", PURL: "pkg:hex/jason@1.3.0",
MetadataType: pkg.MixLockMetadataType, MetadataType: pkg.MixLockMetadataType,
Metadata: pkg.MixLockMetadata{ Metadata: pkg.MixLockMetadata{

View File

@ -48,10 +48,13 @@ func parseRebarLock(_ source.FileResolver, _ *generic.Environment, reader source
version = versionNode.Get(2).Get(1).String() version = versionNode.Get(2).Get(1).String()
} }
p := newPackage(pkg.RebarLockMetadata{ p := newPackage(
Name: name, pkg.RebarLockMetadata{
Version: version, Name: name,
}) Version: version,
},
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
pkgMap[name] = &p pkgMap[name] = &p
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
"github.com/anchore/syft/syft/source"
) )
func TestParseRebarLock(t *testing.T) { func TestParseRebarLock(t *testing.T) {
@ -261,6 +262,10 @@ func TestParseRebarLock(t *testing.T) {
// TODO: relationships are not under test // TODO: relationships are not under test
var expectedRelationships []artifact.Relationship var expectedRelationships []artifact.Relationship
for idx := range test.expected {
test.expected[idx].Locations = source.NewLocationSet(source.NewLocation(test.fixture))
}
pkgtest.TestFileParser(t, test.fixture, parseRebarLock, test.expected, expectedRelationships) pkgtest.TestFileParser(t, test.fixture, parseRebarLock, test.expected, expectedRelationships)
}) })
} }

View File

@ -62,7 +62,15 @@ func (c *goBinaryCataloger) parseGoBinary(resolver source.FileResolver, _ *gener
func (c *goBinaryCataloger) makeGoMainPackage(resolver source.FileResolver, mod *debug.BuildInfo, arch string, location source.Location) pkg.Package { func (c *goBinaryCataloger) makeGoMainPackage(resolver source.FileResolver, mod *debug.BuildInfo, arch string, location source.Location) pkg.Package {
gbs := getBuildSettings(mod.Settings) gbs := getBuildSettings(mod.Settings)
main := c.newGoBinaryPackage(resolver, &mod.Main, mod.Main.Path, mod.GoVersion, arch, gbs, location) main := c.newGoBinaryPackage(
resolver,
&mod.Main,
mod.Main.Path,
mod.GoVersion,
arch,
gbs,
location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
if main.Version == devel { if main.Version == devel {
if version, ok := gbs["vcs.revision"]; ok { if version, ok := gbs["vcs.revision"]; ok {
if timestamp, ok := gbs["vcs.time"]; ok { if timestamp, ok := gbs["vcs.time"]; ok {
@ -204,7 +212,15 @@ func (c *goBinaryCataloger) buildGoPkgInfo(resolver source.FileResolver, locatio
if dep == nil { if dep == nil {
continue continue
} }
p := c.newGoBinaryPackage(resolver, dep, mod.Main.Path, mod.GoVersion, arch, nil, location) p := c.newGoBinaryPackage(
resolver,
dep,
mod.Main.Path,
mod.GoVersion,
arch,
nil,
location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
if pkg.IsValid(&p) { if pkg.IsValid(&p) {
pkgs = append(pkgs, p) pkgs = append(pkgs, p)
} }

View File

@ -136,12 +136,12 @@ func TestBuildGoPkgInfo(t *testing.T) {
Version: "(devel)", Version: "(devel)",
PURL: "pkg:golang/github.com/anchore/syft@(devel)", PURL: "pkg:golang/github.com/anchore/syft@(devel)",
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/a-path", RealPath: "/a-path",
FileSystemID: "layer-id", FileSystemID: "layer-id",
}, },
}, ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
MetadataType: pkg.GolangBinMetadataType, MetadataType: pkg.GolangBinMetadataType,
Metadata: pkg.GolangBinMetadata{ Metadata: pkg.GolangBinMetadata{
@ -183,12 +183,12 @@ func TestBuildGoPkgInfo(t *testing.T) {
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/a-path", RealPath: "/a-path",
FileSystemID: "layer-id", FileSystemID: "layer-id",
}, },
}, ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
MetadataType: pkg.GolangBinMetadataType, MetadataType: pkg.GolangBinMetadataType,
Metadata: pkg.GolangBinMetadata{}, Metadata: pkg.GolangBinMetadata{},
@ -226,12 +226,12 @@ func TestBuildGoPkgInfo(t *testing.T) {
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/a-path", RealPath: "/a-path",
FileSystemID: "layer-id", FileSystemID: "layer-id",
}, },
}, ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
MetadataType: pkg.GolangBinMetadataType, MetadataType: pkg.GolangBinMetadataType,
Metadata: pkg.GolangBinMetadata{ Metadata: pkg.GolangBinMetadata{
@ -262,12 +262,12 @@ func TestBuildGoPkgInfo(t *testing.T) {
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/a-path", RealPath: "/a-path",
FileSystemID: "layer-id", FileSystemID: "layer-id",
}, },
}, ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
MetadataType: pkg.GolangBinMetadataType, MetadataType: pkg.GolangBinMetadataType,
Metadata: pkg.GolangBinMetadata{ Metadata: pkg.GolangBinMetadata{
@ -320,12 +320,12 @@ func TestBuildGoPkgInfo(t *testing.T) {
Version: "v0.0.0-20221014195457-41bc6bb41035", Version: "v0.0.0-20221014195457-41bc6bb41035",
PURL: "pkg:golang/github.com/anchore/syft@v0.0.0-20221014195457-41bc6bb41035", PURL: "pkg:golang/github.com/anchore/syft@v0.0.0-20221014195457-41bc6bb41035",
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/a-path", RealPath: "/a-path",
FileSystemID: "layer-id", FileSystemID: "layer-id",
}, },
}, ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
MetadataType: pkg.GolangBinMetadataType, MetadataType: pkg.GolangBinMetadataType,
Metadata: pkg.GolangBinMetadata{ Metadata: pkg.GolangBinMetadata{
@ -375,12 +375,12 @@ func TestBuildGoPkgInfo(t *testing.T) {
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/a-path", RealPath: "/a-path",
FileSystemID: "layer-id", FileSystemID: "layer-id",
}, },
}, ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
MetadataType: pkg.GolangBinMetadataType, MetadataType: pkg.GolangBinMetadataType,
Metadata: pkg.GolangBinMetadata{ Metadata: pkg.GolangBinMetadata{
@ -397,12 +397,12 @@ func TestBuildGoPkgInfo(t *testing.T) {
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/a-path", RealPath: "/a-path",
FileSystemID: "layer-id", FileSystemID: "layer-id",
}, },
}, ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
MetadataType: pkg.GolangBinMetadataType, MetadataType: pkg.GolangBinMetadataType,
Metadata: pkg.GolangBinMetadata{ Metadata: pkg.GolangBinMetadata{
@ -452,12 +452,12 @@ func TestBuildGoPkgInfo(t *testing.T) {
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/a-path", RealPath: "/a-path",
FileSystemID: "layer-id", FileSystemID: "layer-id",
}, },
}, ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
MetadataType: pkg.GolangBinMetadataType, MetadataType: pkg.GolangBinMetadataType,
Metadata: pkg.GolangBinMetadata{ Metadata: pkg.GolangBinMetadata{
@ -473,12 +473,12 @@ func TestBuildGoPkgInfo(t *testing.T) {
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,
Locations: source.NewLocationSet( Locations: source.NewLocationSet(
source.Location{ source.NewLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/a-path", RealPath: "/a-path",
FileSystemID: "layer-id", FileSystemID: "layer-id",
}, },
}, ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
MetadataType: pkg.GolangBinMetadataType, MetadataType: pkg.GolangBinMetadataType,
Metadata: pkg.GolangBinMetadata{ Metadata: pkg.GolangBinMetadata{
@ -502,12 +502,12 @@ func TestBuildGoPkgInfo(t *testing.T) {
} }
p.SetID() p.SetID()
} }
location := source.Location{ location := source.NewLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "/a-path", RealPath: "/a-path",
FileSystemID: "layer-id", FileSystemID: "layer-id",
}, },
} )
c := goBinaryCataloger{} c := goBinaryCataloger{}
pkgs := c.buildGoPkgInfo(source.NewMockResolverForPaths(), location, test.mod, test.arch) pkgs := c.buildGoPkgInfo(source.NewMockResolverForPaths(), location, test.mod, test.arch)

View File

@ -51,7 +51,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic
Name: m.Mod.Path, Name: m.Mod.Path,
Version: m.Mod.Version, Version: m.Mod.Version,
Licenses: licenses, Licenses: licenses,
Locations: source.NewLocationSet(reader.Location), Locations: source.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
PURL: packageURL(m.Mod.Path, m.Mod.Version), PURL: packageURL(m.Mod.Path, m.Mod.Version),
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,
@ -73,7 +73,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic
Name: m.New.Path, Name: m.New.Path,
Version: m.New.Version, Version: m.New.Version,
Licenses: licenses, Licenses: licenses,
Locations: source.NewLocationSet(reader.Location), Locations: source.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
PURL: packageURL(m.New.Path, m.New.Version), PURL: packageURL(m.New.Path, m.New.Version),
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,

View File

@ -46,6 +46,14 @@ func parseCabalFreeze(_ source.FileResolver, _ *generic.Environment, reader sour
fields := strings.Split(line, " ==") fields := strings.Split(line, " ==")
pkgName, pkgVersion := fields[0], fields[1] pkgName, pkgVersion := fields[0], fields[1]
pkgs = append(pkgs, newPackage(pkgName, pkgVersion, nil, reader.Location)) pkgs = append(
pkgs,
newPackage(
pkgName,
pkgVersion,
nil,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
} }

View File

@ -62,10 +62,18 @@ func parseStackLock(_ source.FileResolver, _ *generic.Environment, reader source
for _, pack := range lockFile.Packages { for _, pack := range lockFile.Packages {
pkgName, pkgVersion, pkgHash := parseStackPackageEncoding(pack.Completed.Hackage) pkgName, pkgVersion, pkgHash := parseStackPackageEncoding(pack.Completed.Hackage)
pkgs = append(pkgs, newPackage(pkgName, pkgVersion, &pkg.HackageMetadata{ pkgs = append(
PkgHash: pkgHash, pkgs,
SnapshotURL: snapshotURL, newPackage(
}, reader.Location)) pkgName,
pkgVersion,
&pkg.HackageMetadata{
PkgHash: pkgHash,
SnapshotURL: snapshotURL,
},
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
return pkgs, nil, nil return pkgs, nil, nil

View File

@ -34,9 +34,17 @@ func parseStackYaml(_ source.FileResolver, _ *generic.Environment, reader source
var pkgs []pkg.Package var pkgs []pkg.Package
for _, dep := range stackFile.ExtraDeps { for _, dep := range stackFile.ExtraDeps {
pkgName, pkgVersion, pkgHash := parseStackPackageEncoding(dep) pkgName, pkgVersion, pkgHash := parseStackPackageEncoding(dep)
pkgs = append(pkgs, newPackage(pkgName, pkgVersion, &pkg.HackageMetadata{ pkgs = append(
PkgHash: pkgHash, pkgs,
}, reader.Location)) newPackage(
pkgName,
pkgVersion,
&pkg.HackageMetadata{
PkgHash: pkgHash,
},
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
return pkgs, nil, nil return pkgs, nil, nil

View File

@ -186,11 +186,13 @@ func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) {
} }
return &pkg.Package{ return &pkg.Package{
Name: selectName(manifest, j.fileInfo), Name: selectName(manifest, j.fileInfo),
Version: selectVersion(manifest, j.fileInfo), Version: selectVersion(manifest, j.fileInfo),
Licenses: selectLicense(manifest), Licenses: selectLicense(manifest),
Language: pkg.Java, Language: pkg.Java,
Locations: source.NewLocationSet(j.location), Locations: source.NewLocationSet(
j.location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
Type: j.fileInfo.pkgType(), Type: j.fileInfo.pkgType(),
MetadataType: pkg.JavaMetadataType, MetadataType: pkg.JavaMetadataType,
Metadata: pkg.JavaMetadata{ Metadata: pkg.JavaMetadata{
@ -380,9 +382,11 @@ func newPackageFromMavenData(pomProperties pkg.PomProperties, pomProject *pkg.Po
// discovered props = new package // discovered props = new package
p := pkg.Package{ p := pkg.Package{
Name: pomProperties.ArtifactID, Name: pomProperties.ArtifactID,
Version: pomProperties.Version, Version: pomProperties.Version,
Locations: source.NewLocationSet(location), Locations: source.NewLocationSet(
location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
Language: pkg.Java, Language: pkg.Java,
Type: pomProperties.PkgTypeIndicated(), Type: pomProperties.PkgTypeIndicated(),
MetadataType: pkg.JavaMetadataType, MetadataType: pkg.JavaMetadataType,

View File

@ -49,9 +49,11 @@ func parseGradleLockfile(_ source.FileResolver, _ *generic.Environment, reader s
// map the dependencies // map the dependencies
for _, dep := range dependencies { for _, dep := range dependencies {
mappedPkg := pkg.Package{ mappedPkg := pkg.Package{
Name: dep.Name, Name: dep.Name,
Version: dep.Version, Version: dep.Version,
Locations: source.NewLocationSet(reader.Location), Locations: source.NewLocationSet(
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
Language: pkg.Java, Language: pkg.Java,
Type: pkg.JavaPkg, Type: pkg.JavaPkg,
MetadataType: pkg.JavaMetadataType, MetadataType: pkg.JavaMetadataType,

View File

@ -29,7 +29,11 @@ func parserPomXML(_ source.FileResolver, _ *generic.Environment, reader source.L
var pkgs []pkg.Package var pkgs []pkg.Package
for _, dep := range pom.Dependencies { for _, dep := range pom.Dependencies {
p := newPackageFromPom(pom, dep, reader.Location) p := newPackageFromPom(
pom,
dep,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
if p.Name == "" { if p.Name == "" {
continue continue
} }

View File

@ -66,7 +66,7 @@ func newPackageLockV1Package(resolver source.FileResolver, location source.Locat
pkg.Package{ pkg.Package{
Name: name, Name: name,
Version: version, Version: version,
Locations: source.NewLocationSet(location), Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
PURL: packageURL(name, version), PURL: packageURL(name, version),
Language: pkg.JavaScript, Language: pkg.JavaScript,
Type: pkg.NpmPkg, Type: pkg.NpmPkg,
@ -89,7 +89,7 @@ func newPackageLockV2Package(resolver source.FileResolver, location source.Locat
pkg.Package{ pkg.Package{
Name: name, Name: name,
Version: u.Version, Version: u.Version,
Locations: source.NewLocationSet(location), Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
PURL: packageURL(name, u.Version), PURL: packageURL(name, u.Version),
Language: pkg.JavaScript, Language: pkg.JavaScript,
Type: pkg.NpmPkg, Type: pkg.NpmPkg,
@ -107,7 +107,7 @@ func newPnpmPackage(resolver source.FileResolver, location source.Location, name
pkg.Package{ pkg.Package{
Name: name, Name: name,
Version: version, Version: version,
Locations: source.NewLocationSet(location), Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
PURL: packageURL(name, version), PURL: packageURL(name, version),
Language: pkg.JavaScript, Language: pkg.JavaScript,
Type: pkg.NpmPkg, Type: pkg.NpmPkg,
@ -122,7 +122,7 @@ func newYarnLockPackage(resolver source.FileResolver, location source.Location,
pkg.Package{ pkg.Package{
Name: name, Name: name,
Version: version, Version: version,
Locations: source.NewLocationSet(location), Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
PURL: packageURL(name, version), PURL: packageURL(name, version),
Language: pkg.JavaScript, Language: pkg.JavaScript,
Type: pkg.NpmPkg, Type: pkg.NpmPkg,

View File

@ -68,7 +68,10 @@ func parsePackageJSON(_ source.FileResolver, _ *generic.Environment, reader sour
return nil, nil, nil return nil, nil, nil
} }
pkgs = append(pkgs, newPackageJSONPackage(p, reader.Location)) pkgs = append(
pkgs,
newPackageJSONPackage(p, reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
)
} }
pkg.Sort(pkgs) pkg.Sort(pkgs)

View File

@ -113,7 +113,10 @@ func parsePackageLock(resolver source.FileResolver, _ *generic.Environment, read
name = pkgMeta.Name name = pkgMeta.Name
} }
pkgs = append(pkgs, newPackageLockV2Package(resolver, reader.Location, getNameFromPath(name), pkgMeta)) pkgs = append(
pkgs,
newPackageLockV2Package(resolver, reader.Location, getNameFromPath(name), pkgMeta),
)
} }
} }

View File

@ -56,7 +56,7 @@ func (c *StoreCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, [
continue continue
} }
p := newNixStorePackage(*storePath, location) p := newNixStorePackage(*storePath, location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation))
pkgs = append(pkgs, p) pkgs = append(pkgs, p)
} }

View File

@ -32,7 +32,13 @@ func parseComposerLock(_ source.FileResolver, _ *generic.Environment, reader sou
return nil, nil, fmt.Errorf("failed to parse composer.lock file: %w", err) return nil, nil, fmt.Errorf("failed to parse composer.lock file: %w", err)
} }
for _, m := range lock.Packages { for _, m := range lock.Packages {
pkgs = append(pkgs, newComposerLockPackage(m, reader.Location)) pkgs = append(
pkgs,
newComposerLockPackage(
m,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
} }

View File

@ -53,7 +53,12 @@ func parseInstalledJSON(_ source.FileResolver, _ *generic.Environment, reader so
return nil, nil, fmt.Errorf("failed to parse installed.json file: %w", err) return nil, nil, fmt.Errorf("failed to parse installed.json file: %w", err)
} }
for _, pkgMeta := range lock.Packages { for _, pkgMeta := range lock.Packages {
pkgs = append(pkgs, newComposerLockPackage(pkgMeta, reader.Location)) pkgs = append(
pkgs,
newComposerLockPackage(pkgMeta,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
} }

View File

@ -37,10 +37,12 @@ func parsePortageContents(resolver source.FileResolver, _ *generic.Environment,
} }
p := pkg.Package{ p := pkg.Package{
Name: name, Name: name,
Version: version, Version: version,
PURL: packageURL(name, version), PURL: packageURL(name, version),
Locations: source.NewLocationSet(), Locations: source.NewLocationSet(
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
Type: pkg.PortagePkg, Type: pkg.PortagePkg,
MetadataType: pkg.PortageMetadataType, MetadataType: pkg.PortageMetadataType,
Metadata: pkg.PortageMetadata{ Metadata: pkg.PortageMetadata{
@ -117,7 +119,7 @@ func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pk
licenses := findings.ToSlice() licenses := findings.ToSlice()
sort.Strings(licenses) sort.Strings(licenses)
p.Licenses = licenses p.Licenses = licenses
p.Locations.Add(*location) p.Locations.Add(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation))
} }
func addSize(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { func addSize(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) {
@ -150,5 +152,5 @@ func addSize(resolver source.FileResolver, dbLocation source.Location, p *pkg.Pa
} }
p.Metadata = entry p.Metadata = entry
p.Locations.Add(*location) p.Locations.Add(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation))
} }

View File

@ -39,7 +39,14 @@ func parsePoetryLock(_ source.FileResolver, _ *generic.Environment, reader sourc
var pkgs []pkg.Package var pkgs []pkg.Package
for _, p := range metadata.Packages { for _, p := range metadata.Packages {
pkgs = append(pkgs, newPackageForIndex(p.Name, p.Version, reader.Location)) pkgs = append(
pkgs,
newPackageForIndex(
p.Name,
p.Version,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
return pkgs, nil, nil return pkgs, nil, nil

View File

@ -61,7 +61,14 @@ func parseRequirementsTxt(_ source.FileResolver, _ *generic.Environment, reader
log.WithFields("path", reader.RealPath).Debugf("found empty package in requirements.txt line: %q", line) log.WithFields("path", reader.RealPath).Debugf("found empty package in requirements.txt line: %q", line)
continue continue
} }
packages = append(packages, newPackageForIndex(name, version, reader.Location)) packages = append(
packages,
newPackageForIndex(
name,
version,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
if err := scanner.Err(); err != nil { if err := scanner.Err(); err != nil {

View File

@ -53,7 +53,14 @@ func parseSetup(_ source.FileResolver, _ *generic.Environment, reader source.Loc
continue continue
} }
packages = append(packages, newPackageForIndex(name, version, reader.Location)) packages = append(
packages,
newPackageForIndex(
name,
version,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
} }

View File

@ -47,7 +47,7 @@ func fetchInstalledFiles(resolver source.FileResolver, metadataLocation source.L
installedFilesRef := resolver.RelativeFileByPath(metadataLocation, installedFilesPath) installedFilesRef := resolver.RelativeFileByPath(metadataLocation, installedFilesPath)
if installedFilesRef != nil { if installedFilesRef != nil {
sources = append(sources, *installedFilesRef) sources = append(sources, installedFilesRef.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation))
installedFilesContents, err := resolver.FileContentsByLocation(*installedFilesRef) installedFilesContents, err := resolver.FileContentsByLocation(*installedFilesRef)
if err != nil { if err != nil {
@ -78,7 +78,7 @@ func fetchRecordFiles(resolver source.FileResolver, metadataLocation source.Loca
recordRef := resolver.RelativeFileByPath(metadataLocation, recordPath) recordRef := resolver.RelativeFileByPath(metadataLocation, recordPath)
if recordRef != nil { if recordRef != nil {
sources = append(sources, *recordRef) sources = append(sources, recordRef.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation))
recordContents, err := resolver.FileContentsByLocation(*recordRef) recordContents, err := resolver.FileContentsByLocation(*recordRef)
if err != nil { if err != nil {
@ -105,7 +105,7 @@ func fetchTopLevelPackages(resolver source.FileResolver, metadataLocation source
return nil, nil, nil return nil, nil, nil
} }
sources = append(sources, *topLevelLocation) sources = append(sources, topLevelLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation))
topLevelContents, err := resolver.FileContentsByLocation(*topLevelLocation) topLevelContents, err := resolver.FileContentsByLocation(*topLevelLocation)
if err != nil { if err != nil {
@ -134,7 +134,7 @@ func fetchDirectURLData(resolver source.FileResolver, metadataLocation source.Lo
return nil, nil, nil return nil, nil, nil
} }
sources = append(sources, *directURLLocation) sources = append(sources, directURLLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation))
directURLContents, err := resolver.FileContentsByLocation(*directURLLocation) directURLContents, err := resolver.FileContentsByLocation(*directURLLocation)
if err != nil { if err != nil {
@ -161,7 +161,9 @@ func fetchDirectURLData(resolver source.FileResolver, metadataLocation source.Lo
// assembleEggOrWheelMetadata discovers and accumulates python package metadata from multiple file sources and returns a single metadata object as well as a list of files where the metadata was derived from. // assembleEggOrWheelMetadata discovers and accumulates python package metadata from multiple file sources and returns a single metadata object as well as a list of files where the metadata was derived from.
func assembleEggOrWheelMetadata(resolver source.FileResolver, metadataLocation source.Location) (*pkg.PythonPackageMetadata, []source.Location, error) { func assembleEggOrWheelMetadata(resolver source.FileResolver, metadataLocation source.Location) (*pkg.PythonPackageMetadata, []source.Location, error) {
var sources = []source.Location{metadataLocation} var sources = []source.Location{
metadataLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
}
metadataContents, err := resolver.FileContentsByLocation(metadataLocation) metadataContents, err := resolver.FileContentsByLocation(metadataLocation)
if err != nil { if err != nil {

View File

@ -13,12 +13,12 @@ import (
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
func newPackage(dbLocation source.Location, metadata pkg.RpmMetadata, distro *linux.Release) pkg.Package { func newPackage(location source.Location, metadata pkg.RpmMetadata, distro *linux.Release) pkg.Package {
p := pkg.Package{ p := pkg.Package{
Name: metadata.Name, Name: metadata.Name,
Version: toELVersion(metadata), Version: toELVersion(metadata),
PURL: packageURL(metadata, distro), PURL: packageURL(metadata, distro),
Locations: source.NewLocationSet(dbLocation), Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
Type: pkg.RpmPkg, Type: pkg.RpmPkg,
MetadataType: pkg.RpmMetadataType, MetadataType: pkg.RpmMetadataType,
Metadata: metadata, Metadata: metadata,

View File

@ -44,7 +44,7 @@ func parseGemFileLockEntries(_ source.FileResolver, _ *generic.Environment, read
newGemfileLockPackage( newGemfileLockPackage(
candidate[0], candidate[0],
strings.Trim(candidate[1], "()"), strings.Trim(candidate[1], "()"),
reader.Location, reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),
) )
} }

View File

@ -95,7 +95,10 @@ func parseGemSpecEntries(_ source.FileResolver, _ *generic.Environment, reader s
return nil, nil, fmt.Errorf("unable to decode gem metadata: %w", err) return nil, nil, fmt.Errorf("unable to decode gem metadata: %w", err)
} }
pkgs = append(pkgs, newGemspecPackage(metadata, reader.Location)) pkgs = append(
pkgs,
newGemspecPackage(metadata, reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
)
} }
return pkgs, nil, nil return pkgs, nil, nil

View File

@ -31,7 +31,7 @@ func newPackagesFromAudit(location source.Location, versionInfo rustaudit.Versio
for _, dep := range versionInfo.Packages { for _, dep := range versionInfo.Packages {
dep := dep dep := dep
p := newPackageFromAudit(&dep, location) p := newPackageFromAudit(&dep, location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation))
if pkg.IsValid(&p) && dep.Kind == rustaudit.Runtime { if pkg.IsValid(&p) && dep.Kind == rustaudit.Runtime {
pkgs = append(pkgs, p) pkgs = append(pkgs, p)
} }

View File

@ -36,7 +36,13 @@ func parseCargoLock(_ source.FileResolver, _ *generic.Environment, reader source
if p.Dependencies == nil { if p.Dependencies == nil {
p.Dependencies = make([]string, 0) p.Dependencies = make([]string, 0)
} }
pkgs = append(pkgs, newPackageFromCargoMetadata(p, reader.Location)) pkgs = append(
pkgs,
newPackageFromCargoMetadata(
p,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
return pkgs, nil, nil return pkgs, nil, nil

View File

@ -47,7 +47,9 @@ func parseSBOM(_ source.FileResolver, _ *generic.Environment, reader source.Loca
// Why not keep the original list of locations? Since the "locations" field is meant to capture // Why not keep the original list of locations? Since the "locations" field is meant to capture
// where there is evidence of this file, and the catalogers have not run against any file other than, // where there is evidence of this file, and the catalogers have not run against any file other than,
// the SBOM, this is the only location that is relevant for this cataloger. // the SBOM, this is the only location that is relevant for this cataloger.
p.Locations = source.NewLocationSet(reader.Location) p.Locations = source.NewLocationSet(
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
p.FoundBy = catalogerName p.FoundBy = catalogerName
pkgs = append(pkgs, p) pkgs = append(pkgs, p)

View File

@ -59,7 +59,15 @@ func parsePodfileLock(_ source.FileResolver, _ *generic.Environment, reader sour
return nil, nil, fmt.Errorf("malformed podfile.lock: incomplete checksums") return nil, nil, fmt.Errorf("malformed podfile.lock: incomplete checksums")
} }
pkgs = append(pkgs, newPackage(podName, podVersion, pkgHash, reader.Location)) pkgs = append(
pkgs,
newPackage(
podName,
podVersion,
pkgHash,
reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
)
} }
return pkgs, nil, nil return pkgs, nil, nil

7
syft/pkg/evidence.go Normal file
View File

@ -0,0 +1,7 @@
package pkg
const (
EvidenceAnnotationKey = "evidence"
PrimaryEvidenceAnnotation = "primary"
SupportingEvidenceAnnotation = "supporting"
)

View File

@ -7,19 +7,19 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/cpe" "github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
func TestIDUniqueness(t *testing.T) { func TestIDUniqueness(t *testing.T) {
originalLocation := source.Location{ originalLocation := source.NewVirtualLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "39.0742° N, 21.8243° E", RealPath: "39.0742° N, 21.8243° E",
FileSystemID: "Earth", FileSystemID: "Earth",
}, },
VirtualPath: "/Ancient-Greece", "/Ancient-Greece",
} )
originalPkg := Package{ originalPkg := Package{
Name: "pi", Name: "pi",
Version: "3.14", Version: "3.14",
@ -238,13 +238,13 @@ func TestIDUniqueness(t *testing.T) {
} }
func TestPackage_Merge(t *testing.T) { func TestPackage_Merge(t *testing.T) {
originalLocation := source.Location{ originalLocation := source.NewVirtualLocationFromCoordinates(
Coordinates: source.Coordinates{ source.Coordinates{
RealPath: "39.0742° N, 21.8243° E", RealPath: "39.0742° N, 21.8243° E",
FileSystemID: "Earth", FileSystemID: "Earth",
}, },
VirtualPath: "/Ancient-Greece", "/Ancient-Greece",
} )
similarLocation := originalLocation similarLocation := originalLocation
similarLocation.FileSystemID = "Mars" similarLocation.FileSystemID = "Mars"
@ -423,13 +423,23 @@ func TestPackage_Merge(t *testing.T) {
cmp.AllowUnexported(Package{}), cmp.AllowUnexported(Package{}),
cmp.Comparer( cmp.Comparer(
func(x, y source.LocationSet) bool { func(x, y source.LocationSet) bool {
return cmp.Equal( xs := x.ToSlice()
x.ToSlice(), y.ToSlice(), ys := y.ToSlice()
cmp.AllowUnexported(source.Location{}),
cmp.AllowUnexported(file.Reference{}), if len(xs) != len(ys) {
) return false
}
for i, xe := range xs {
ye := ys[i]
if !locationComparer(xe, ye) {
return false
}
}
return true
}, },
), ),
cmp.Comparer(locationComparer),
); diff != "" { ); diff != "" {
t.Errorf("unexpected result from parsing (-expected +actual)\n%s", diff) t.Errorf("unexpected result from parsing (-expected +actual)\n%s", diff)
} }
@ -437,6 +447,10 @@ func TestPackage_Merge(t *testing.T) {
} }
} }
func locationComparer(x, y source.Location) bool {
return cmp.Equal(x.Coordinates, y.Coordinates) && cmp.Equal(x.VirtualPath, y.VirtualPath)
}
func TestIsValid(t *testing.T) { func TestIsValid(t *testing.T) {
cases := []struct { cases := []struct {
name string name string

View File

@ -765,30 +765,10 @@ func Test_directoryResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt"
Coordinates: Coordinates{ NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt"
RealPath: "file-1.txt", NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt"
}, NewLocation("parent/file-4.txt"), // note: missing virtual path "file-4.txt"
//VirtualPath: "file-1.txt",
},
{
Coordinates: Coordinates{
RealPath: "file-3.txt",
},
//VirtualPath: "file-3.txt",
},
{
Coordinates: Coordinates{
RealPath: "file-2.txt",
},
//VirtualPath: "file-2.txt",
},
{
Coordinates: Coordinates{
RealPath: "parent/file-4.txt",
},
//VirtualPath: "parent/file-4.txt",
},
}, },
}, },
{ {
@ -801,31 +781,11 @@ func Test_directoryResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewVirtualLocation("file-1.txt", "link-1"),
Coordinates: Coordinates{ NewVirtualLocation("file-2.txt", "link-2"),
RealPath: "file-1.txt",
},
VirtualPath: "link-1",
},
{
Coordinates: Coordinates{
RealPath: "file-2.txt",
},
VirtualPath: "link-2",
},
// we already have this real file path via another link, so only one is returned // we already have this real file path via another link, so only one is returned
//{ //NewVirtualLocation("file-2.txt", "link-indirect"),
// Coordinates: Coordinates{ NewVirtualLocation("file-3.txt", "link-within"),
// RealPath: "file-2.txt",
// },
// VirtualPath: "link-indirect",
//},
{
Coordinates: Coordinates{
RealPath: "file-3.txt",
},
VirtualPath: "link-within",
},
}, },
}, },
{ {
@ -838,12 +798,7 @@ func Test_directoryResolver_resolvesLinks(t *testing.T) {
}, },
expected: []Location{ expected: []Location{
// this has two copies in the base image, which overwrites the same location // this has two copies in the base image, which overwrites the same location
{ NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt",
Coordinates: Coordinates{
RealPath: "file-2.txt",
},
//VirtualPath: "file-2.txt",
},
}, },
}, },
{ {
@ -855,30 +810,10 @@ func Test_directoryResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt"
Coordinates: Coordinates{ NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt"
RealPath: "file-1.txt", NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt"
}, NewLocation("parent/file-4.txt"), // note: missing virtual path "parent/file-4.txt"
//VirtualPath: "file-1.txt",
},
{
Coordinates: Coordinates{
RealPath: "file-2.txt",
},
//VirtualPath: "file-2.txt",
},
{
Coordinates: Coordinates{
RealPath: "file-3.txt",
},
//VirtualPath: "file-3.txt",
},
{
Coordinates: Coordinates{
RealPath: "parent/file-4.txt",
},
//VirtualPath: "parent/file-4.txt",
},
}, },
}, },
{ {
@ -890,33 +825,41 @@ func Test_directoryResolver_resolvesLinks(t *testing.T) {
}, },
expected: []Location{ expected: []Location{
{ {
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "file-1.txt", Coordinates: Coordinates{
RealPath: "file-1.txt",
},
VirtualPath: "link-1",
ref: file.Reference{RealPath: "file-1.txt"},
}, },
VirtualPath: "link-1",
ref: file.Reference{RealPath: "file-1.txt"},
}, },
{ {
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "file-2.txt", Coordinates: Coordinates{
RealPath: "file-2.txt",
},
VirtualPath: "link-2",
ref: file.Reference{RealPath: "file-2.txt"},
}, },
VirtualPath: "link-2",
ref: file.Reference{RealPath: "file-2.txt"},
}, },
// we already have this real file path via another link, so only one is returned // we already have this real file path via another link, so only one is returned
//{ //{
// Coordinates: Coordinates{ // LocationData: LocationData{
// RealPath: "file-2.txt", // Coordinates: Coordinates{
// }, // RealPath: "file-2.txt",
// VirtualPath: "link-indirect", // },
// ref: file.Reference{RealPath: "file-2.txt"}, // VirtualPath: "link-indirect",
// ref: file.Reference{RealPath: "file-2.txt"},
// },
//}, //},
{ {
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "file-3.txt", Coordinates: Coordinates{
RealPath: "file-3.txt",
},
VirtualPath: "link-within",
ref: file.Reference{RealPath: "file-3.txt"},
}, },
VirtualPath: "link-within",
ref: file.Reference{RealPath: "file-3.txt"},
}, },
}, },
}, },
@ -929,30 +872,10 @@ func Test_directoryResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt"
Coordinates: Coordinates{ NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt"
RealPath: "file-1.txt", NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt"
}, NewLocation("parent/file-4.txt"), // note: missing virtual path "parent/file-4.txt"
//VirtualPath: "file-1.txt",
},
{
Coordinates: Coordinates{
RealPath: "file-2.txt",
},
//VirtualPath: "file-2.txt",
},
{
Coordinates: Coordinates{
RealPath: "file-3.txt",
},
//VirtualPath: "file-3.txt",
},
{
Coordinates: Coordinates{
RealPath: "parent/file-4.txt",
},
//VirtualPath: "parent/file-4.txt",
},
}, },
}, },
{ {
@ -965,12 +888,7 @@ func Test_directoryResolver_resolvesLinks(t *testing.T) {
}, },
expected: []Location{ expected: []Location{
// we have multiple copies across layers // we have multiple copies across layers
{ NewVirtualLocation("file-2.txt", "link-2"),
Coordinates: Coordinates{
RealPath: "file-2.txt",
},
VirtualPath: "link-2",
},
}, },
}, },
{ {
@ -983,12 +901,7 @@ func Test_directoryResolver_resolvesLinks(t *testing.T) {
}, },
expected: []Location{ expected: []Location{
// we have multiple copies across layers // we have multiple copies across layers
{ NewVirtualLocation("file-2.txt", "link-indirect"),
Coordinates: Coordinates{
RealPath: "file-2.txt",
},
VirtualPath: "link-indirect",
},
}, },
}, },
} }

View File

@ -6,8 +6,6 @@ import (
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/anchore/stereoscope/pkg/file"
) )
func TestExcludingResolver(t *testing.T) { func TestExcludingResolver(t *testing.T) {
@ -79,25 +77,25 @@ func TestExcludingResolver(t *testing.T) {
for _, path := range diff { for _, path := range diff {
assert.False(t, er.HasPath(path)) assert.False(t, er.HasPath(path))
c, err := er.FileContentsByLocation(makeLocation(path)) c, err := er.FileContentsByLocation(NewLocation(path))
assert.Nil(t, c) assert.Nil(t, c)
assert.Error(t, err) assert.Error(t, err)
m, err := er.FileMetadataByLocation(makeLocation(path)) m, err := er.FileMetadataByLocation(NewLocation(path))
assert.Empty(t, m.LinkDestination) assert.Empty(t, m.LinkDestination)
assert.Error(t, err) assert.Error(t, err)
l := er.RelativeFileByPath(makeLocation(""), path) l := er.RelativeFileByPath(NewLocation(""), path)
assert.Nil(t, l) assert.Nil(t, l)
} }
for _, path := range test.expected { for _, path := range test.expected {
assert.True(t, er.HasPath(path)) assert.True(t, er.HasPath(path))
c, err := er.FileContentsByLocation(makeLocation(path)) c, err := er.FileContentsByLocation(NewLocation(path))
assert.NotNil(t, c) assert.NotNil(t, c)
assert.Nil(t, err) assert.Nil(t, err)
m, err := er.FileMetadataByLocation(makeLocation(path)) m, err := er.FileMetadataByLocation(NewLocation(path))
assert.NotEmpty(t, m.LinkDestination) assert.NotEmpty(t, m.LinkDestination)
assert.Nil(t, err) assert.Nil(t, err)
l := er.RelativeFileByPath(makeLocation(""), path) l := er.RelativeFileByPath(NewLocation(""), path)
assert.NotNil(t, l) assert.NotNil(t, l)
} }
}) })
@ -119,17 +117,6 @@ func difference(a, b []string) []string {
return diff return diff
} }
func makeLocation(path string) Location {
return Location{
Coordinates: Coordinates{
RealPath: path,
FileSystemID: "",
},
VirtualPath: "",
ref: file.Reference{},
}
}
func locationPaths(locations []Location) []string { func locationPaths(locations []Location) []string {
paths := []string{} paths := []string{}
for _, l := range locations { for _, l := range locations {
@ -145,7 +132,7 @@ type mockResolver struct {
func (r *mockResolver) getLocations() ([]Location, error) { func (r *mockResolver) getLocations() ([]Location, error) {
out := []Location{} out := []Location{}
for _, path := range r.locations { for _, path := range r.locations {
out = append(out, makeLocation(path)) out = append(out, NewLocation(path))
} }
return out, nil return out, nil
} }
@ -189,11 +176,8 @@ func (r *mockResolver) FilesByBasenameGlob(_ ...string) ([]Location, error) {
} }
func (r *mockResolver) RelativeFileByPath(_ Location, path string) *Location { func (r *mockResolver) RelativeFileByPath(_ Location, path string) *Location {
return &Location{ l := NewLocation(path)
Coordinates: Coordinates{ return &l
RealPath: path,
},
}
} }
func (r *mockResolver) AllLocations() <-chan Location { func (r *mockResolver) AllLocations() <-chan Location {

View File

@ -398,66 +398,17 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewVirtualLocation("/etc/group", "/etc/group"),
Coordinates: Coordinates{ NewVirtualLocation("/etc/passwd", "/etc/passwd"),
RealPath: "/etc/group", NewVirtualLocation("/etc/shadow", "/etc/shadow"),
}, NewVirtualLocation("/file-1.txt", "/file-1.txt"),
VirtualPath: "/etc/group", NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1
},
{
Coordinates: Coordinates{
RealPath: "/etc/passwd",
},
VirtualPath: "/etc/passwd",
},
{
Coordinates: Coordinates{
RealPath: "/etc/shadow",
},
VirtualPath: "/etc/shadow",
},
{
Coordinates: Coordinates{
RealPath: "/file-1.txt",
},
VirtualPath: "/file-1.txt",
},
// copy 1
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
// note: we're de-duping the redundant access to file-3.txt // note: we're de-duping the redundant access to file-3.txt
// ... (there would usually be two copies) // ... (there would usually be two copies)
{ NewVirtualLocation("/file-3.txt", "/file-3.txt"),
Coordinates: Coordinates{ NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2
RealPath: "/file-3.txt", NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // copy 1
}, NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // copy 2
VirtualPath: "/file-3.txt",
},
// copy 2
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
// copy 1
{
Coordinates: Coordinates{
RealPath: "/parent/file-4.txt",
},
VirtualPath: "/parent/file-4.txt",
},
// copy 2
{
Coordinates: Coordinates{
RealPath: "/parent/file-4.txt",
},
VirtualPath: "/parent/file-4.txt",
},
}, },
}, },
{ {
@ -469,32 +420,10 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewVirtualLocation("/file-1.txt", "/link-1"),
Coordinates: Coordinates{ NewVirtualLocation("/file-2.txt", "/link-2"), // copy 1
RealPath: "/file-1.txt", NewVirtualLocation("/file-2.txt", "/link-2"), // copy 2
}, NewVirtualLocation("/file-3.txt", "/link-within"),
VirtualPath: "/link-1",
},
// copy 1
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/link-2",
},
// copy 2
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/link-2",
},
{
Coordinates: Coordinates{
RealPath: "/file-3.txt",
},
VirtualPath: "/link-within",
},
}, },
}, },
{ {
@ -506,20 +435,8 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
// copy 1 NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1
{ NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
// copy 2
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
}, },
}, },
{ {
@ -531,45 +448,12 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewVirtualLocation("/file-1.txt", "/file-1.txt"),
Coordinates: Coordinates{ NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1
RealPath: "/file-1.txt", NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2
}, NewVirtualLocation("/file-3.txt", "/file-3.txt"),
VirtualPath: "/file-1.txt", NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"),
}, NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // when we copy into the link path, the same file-4.txt is copied
// copy 1
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
// copy 2
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
{
Coordinates: Coordinates{
RealPath: "/file-3.txt",
},
VirtualPath: "/file-3.txt",
},
{
Coordinates: Coordinates{
RealPath: "/parent/file-4.txt",
},
VirtualPath: "/parent/file-4.txt",
},
// when we copy into the link path, the same file-4.txt is copied
{
Coordinates: Coordinates{
RealPath: "/parent/file-4.txt",
},
VirtualPath: "/parent/file-4.txt",
},
}, },
}, },
{ {
@ -581,45 +465,12 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewVirtualLocation("/file-1.txt", "/file-1.txt"),
Coordinates: Coordinates{ NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1
RealPath: "/file-1.txt", NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2
}, NewVirtualLocation("/file-3.txt", "/file-3.txt"),
VirtualPath: "/file-1.txt", NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"),
}, NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // when we copy into the link path, the same file-4.txt is copied
// copy 1
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
// copy 2
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
{
Coordinates: Coordinates{
RealPath: "/file-3.txt",
},
VirtualPath: "/file-3.txt",
},
{
Coordinates: Coordinates{
RealPath: "/parent/file-4.txt",
},
VirtualPath: "/parent/file-4.txt",
},
// when we copy into the link path, the same file-4.txt is copied
{
Coordinates: Coordinates{
RealPath: "/parent/file-4.txt",
},
VirtualPath: "/parent/file-4.txt",
},
}, },
}, },
{ {
@ -632,18 +483,8 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
}, },
expected: []Location{ expected: []Location{
// we have multiple copies across layers // we have multiple copies across layers
{ NewVirtualLocation("/file-2.txt", "/link-2"),
Coordinates: Coordinates{ NewVirtualLocation("/file-2.txt", "/link-2"),
RealPath: "/file-2.txt",
},
VirtualPath: "/link-2",
},
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/link-2",
},
}, },
}, },
{ {
@ -656,18 +497,8 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
}, },
expected: []Location{ expected: []Location{
// we have multiple copies across layers // we have multiple copies across layers
{ NewVirtualLocation("/file-2.txt", "/link-indirect"),
Coordinates: Coordinates{ NewVirtualLocation("/file-2.txt", "/link-indirect"),
RealPath: "/file-2.txt",
},
VirtualPath: "/link-indirect",
},
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/link-indirect",
},
}, },
}, },
} }

View File

@ -382,48 +382,13 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewVirtualLocation("/etc/group", "/etc/group"),
Coordinates: Coordinates{ NewVirtualLocation("/etc/passwd", "/etc/passwd"),
RealPath: "/etc/group", NewVirtualLocation("/etc/shadow", "/etc/shadow"),
}, NewVirtualLocation("/file-1.txt", "/file-1.txt"),
VirtualPath: "/etc/group", NewVirtualLocation("/file-3.txt", "/file-3.txt"),
}, NewVirtualLocation("/file-2.txt", "/file-2.txt"),
{ NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"),
Coordinates: Coordinates{
RealPath: "/etc/passwd",
},
VirtualPath: "/etc/passwd",
},
{
Coordinates: Coordinates{
RealPath: "/etc/shadow",
},
VirtualPath: "/etc/shadow",
},
{
Coordinates: Coordinates{
RealPath: "/file-1.txt",
},
VirtualPath: "/file-1.txt",
},
{
Coordinates: Coordinates{
RealPath: "/file-3.txt",
},
VirtualPath: "/file-3.txt",
},
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
{
Coordinates: Coordinates{
RealPath: "/parent/file-4.txt",
},
VirtualPath: "/parent/file-4.txt",
},
}, },
}, },
{ {
@ -435,32 +400,14 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewVirtualLocation("/file-1.txt", "/link-1"),
Coordinates: Coordinates{ NewVirtualLocation("/file-2.txt", "/link-2"),
RealPath: "/file-1.txt",
},
VirtualPath: "/link-1",
},
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/link-2",
},
// though this is a link, and it matches to the file, the resolver de-duplicates files // though this is a link, and it matches to the file, the resolver de-duplicates files
// by the real path, so it is not included in the results // by the real path, so it is not included in the results
//{ //NewVirtualLocation("/file-2.txt", "/link-indirect"),
// Coordinates: Coordinates{
// RealPath: "/file-2.txt", NewVirtualLocation("/file-3.txt", "/link-within"),
// },
// VirtualPath: "/link-indirect",
//},
{
Coordinates: Coordinates{
RealPath: "/file-3.txt",
},
VirtualPath: "/link-within",
},
}, },
}, },
{ {
@ -473,12 +420,7 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
}, },
expected: []Location{ expected: []Location{
// this has two copies in the base image, which overwrites the same location // this has two copies in the base image, which overwrites the same location
{ NewVirtualLocation("/file-2.txt", "/file-2.txt"),
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
}, },
}, },
{ {
@ -490,30 +432,10 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewVirtualLocation("/file-1.txt", "/file-1.txt"),
Coordinates: Coordinates{ NewVirtualLocation("/file-2.txt", "/file-2.txt"),
RealPath: "/file-1.txt", NewVirtualLocation("/file-3.txt", "/file-3.txt"),
}, NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"),
VirtualPath: "/file-1.txt",
},
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
{
Coordinates: Coordinates{
RealPath: "/file-3.txt",
},
VirtualPath: "/file-3.txt",
},
{
Coordinates: Coordinates{
RealPath: "/parent/file-4.txt",
},
VirtualPath: "/parent/file-4.txt",
},
}, },
}, },
{ {
@ -524,34 +446,44 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ {
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/file-1.txt", Coordinates: Coordinates{
RealPath: "/file-1.txt",
},
VirtualPath: "/link-1",
ref: file.Reference{RealPath: "/file-1.txt"},
}, },
VirtualPath: "/link-1",
ref: file.Reference{RealPath: "/file-1.txt"},
}, },
{ {
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/file-2.txt",
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/link-2",
ref: file.Reference{RealPath: "/file-2.txt"},
}, },
VirtualPath: "/link-2",
ref: file.Reference{RealPath: "/file-2.txt"},
}, },
// we already have this real file path via another link, so only one is returned // we already have this real file path via another link, so only one is returned
//{ //{
// Coordinates: Coordinates{ // LocationData: LocationData{
// RealPath: "/file-2.txt", // Coordinates: Coordinates{
// }, // RealPath: "/file-2.txt",
// VirtualPath: "/link-indirect", // },
// ref: file.Reference{RealPath: "/file-2.txt"}, // VirtualPath: "/link-indirect",
// ref: file.Reference{RealPath: "/file-2.txt"},
// },
//}, //},
{ {
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/file-3.txt", Coordinates: Coordinates{
RealPath: "/file-3.txt",
},
VirtualPath: "/link-within",
ref: file.Reference{RealPath: "/file-3.txt"},
}, },
VirtualPath: "/link-within",
ref: file.Reference{RealPath: "/file-3.txt"},
}, },
}, },
}, },
@ -564,30 +496,10 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
return actualLocations return actualLocations
}, },
expected: []Location{ expected: []Location{
{ NewVirtualLocation("/file-1.txt", "/file-1.txt"),
Coordinates: Coordinates{ NewVirtualLocation("/file-2.txt", "/file-2.txt"),
RealPath: "/file-1.txt", NewVirtualLocation("/file-3.txt", "/file-3.txt"),
}, NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"),
VirtualPath: "/file-1.txt",
},
{
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/file-2.txt",
},
{
Coordinates: Coordinates{
RealPath: "/file-3.txt",
},
VirtualPath: "/file-3.txt",
},
{
Coordinates: Coordinates{
RealPath: "/parent/file-4.txt",
},
VirtualPath: "/parent/file-4.txt",
},
}, },
}, },
{ {
@ -600,12 +512,7 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
}, },
expected: []Location{ expected: []Location{
// we have multiple copies across layers // we have multiple copies across layers
{ NewVirtualLocation("/file-2.txt", "/link-2"),
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/link-2",
},
}, },
}, },
{ {
@ -618,12 +525,7 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
}, },
expected: []Location{ expected: []Location{
// we have multiple copies across layers // we have multiple copies across layers
{ NewVirtualLocation("/file-2.txt", "/link-indirect"),
Coordinates: Coordinates{
RealPath: "/file-2.txt",
},
VirtualPath: "/link-indirect",
},
}, },
}, },
} }
@ -646,7 +548,8 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
func compareLocations(t *testing.T, expected, actual []Location) { func compareLocations(t *testing.T, expected, actual []Location) {
t.Helper() t.Helper()
ignoreUnexported := cmpopts.IgnoreFields(Location{}, "ref") ignoreUnexported := cmpopts.IgnoreFields(LocationData{}, "ref")
ignoreMetadata := cmpopts.IgnoreFields(LocationMetadata{}, "Annotations")
ignoreFS := cmpopts.IgnoreFields(Coordinates{}, "FileSystemID") ignoreFS := cmpopts.IgnoreFields(Coordinates{}, "FileSystemID")
sort.Sort(Locations(expected)) sort.Sort(Locations(expected))
@ -655,6 +558,7 @@ func compareLocations(t *testing.T, expected, actual []Location) {
if d := cmp.Diff(expected, actual, if d := cmp.Diff(expected, actual,
ignoreUnexported, ignoreUnexported,
ignoreFS, ignoreFS,
ignoreMetadata,
); d != "" { ); d != "" {
t.Errorf("unexpected locations (-want +got):\n%s", d) t.Errorf("unexpected locations (-want +got):\n%s", d)

View File

@ -3,6 +3,8 @@ package source
import ( import (
"fmt" "fmt"
"github.com/hashicorp/go-multierror"
"github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/stereoscope/pkg/image" "github.com/anchore/stereoscope/pkg/image"
) )
@ -10,18 +12,55 @@ import (
// Location represents a path relative to a particular filesystem resolved to a specific file.Reference. This struct is used as a key // Location represents a path relative to a particular filesystem resolved to a specific file.Reference. This struct is used as a key
// in content fetching to uniquely identify a file relative to a request (the VirtualPath). // in content fetching to uniquely identify a file relative to a request (the VirtualPath).
type Location struct { type Location struct {
LocationData `cyclonedx:""`
LocationMetadata `cyclonedx:""`
}
type LocationData struct {
Coordinates `cyclonedx:""` // Empty string here means there is no intermediate property name, e.g. syft:locations:0:path without "coordinates" Coordinates `cyclonedx:""` // Empty string here means there is no intermediate property name, e.g. syft:locations:0:path without "coordinates"
// note: it is IMPORTANT to ignore anything but the coordinates for a Location when considering the ID (hash value) // note: it is IMPORTANT to ignore anything but the coordinates for a Location when considering the ID (hash value)
// since the coordinates are the minimally correct ID for a location (symlinks should not come into play) // since the coordinates are the minimally correct ID for a location (symlinks should not come into play)
VirtualPath string `hash:"ignore" json:"virtualPath,omitempty"` // The path to the file which may or may not have hardlinks / symlinks VirtualPath string `hash:"ignore" json:"-"` // The path to the file which may or may not have hardlinks / symlinks
ref file.Reference `hash:"ignore"` // The file reference relative to the stereoscope.FileCatalog that has more information about this location. ref file.Reference `hash:"ignore"` // The file reference relative to the stereoscope.FileCatalog that has more information about this location.
}
type LocationMetadata struct {
Annotations map[string]string `json:"annotations,omitempty"` // Arbitrary key-value pairs that can be used to annotate a location
}
func (m *LocationMetadata) merge(other LocationMetadata) error {
var errs error
for k, v := range other.Annotations {
if otherV, ok := m.Annotations[k]; ok {
if v != otherV {
err := fmt.Errorf("unable to merge location metadata: conflicting values for key=%q: %q != %q", k, v, otherV)
errs = multierror.Append(errs, err)
continue
}
}
m.Annotations[k] = v
}
return errs
}
func (l Location) WithAnnotation(key, value string) Location {
if l.LocationMetadata.Annotations == nil {
l.LocationMetadata.Annotations = map[string]string{}
}
l.LocationMetadata.Annotations[key] = value
return l
} }
// NewLocation creates a new Location representing a path without denoting a filesystem or FileCatalog reference. // NewLocation creates a new Location representing a path without denoting a filesystem or FileCatalog reference.
func NewLocation(realPath string) Location { func NewLocation(realPath string) Location {
return Location{ return Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: realPath, Coordinates: Coordinates{
RealPath: realPath,
},
},
LocationMetadata: LocationMetadata{
Annotations: map[string]string{},
}, },
} }
} }
@ -29,40 +68,70 @@ func NewLocation(realPath string) Location {
// NewVirtualLocation creates a new location for a path accessed by a virtual path (a path with a symlink or hardlink somewhere in the path) // NewVirtualLocation creates a new location for a path accessed by a virtual path (a path with a symlink or hardlink somewhere in the path)
func NewVirtualLocation(realPath, virtualPath string) Location { func NewVirtualLocation(realPath, virtualPath string) Location {
return Location{ return Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: realPath, Coordinates: Coordinates{
RealPath: realPath,
},
VirtualPath: virtualPath,
}, },
VirtualPath: virtualPath, LocationMetadata: LocationMetadata{
} Annotations: map[string]string{},
}}
} }
// NewLocationFromCoordinates creates a new location for the given Coordinates. // NewLocationFromCoordinates creates a new location for the given Coordinates.
func NewLocationFromCoordinates(coordinates Coordinates) Location { func NewLocationFromCoordinates(coordinates Coordinates) Location {
return Location{ return Location{
Coordinates: coordinates, LocationData: LocationData{
} Coordinates: coordinates,
},
LocationMetadata: LocationMetadata{
Annotations: map[string]string{},
}}
}
// NewVirtualLocationFromCoordinates creates a new location for the given Coordinates via a virtual path.
func NewVirtualLocationFromCoordinates(coordinates Coordinates, virtualPath string) Location {
return Location{
LocationData: LocationData{
Coordinates: coordinates,
VirtualPath: virtualPath,
},
LocationMetadata: LocationMetadata{
Annotations: map[string]string{},
}}
} }
// NewLocationFromImage creates a new Location representing the given path (extracted from the ref) relative to the given image. // NewLocationFromImage creates a new Location representing the given path (extracted from the ref) relative to the given image.
func NewLocationFromImage(virtualPath string, ref file.Reference, img *image.Image) Location { func NewLocationFromImage(virtualPath string, ref file.Reference, img *image.Image) Location {
layer := img.FileCatalog.Layer(ref) layer := img.FileCatalog.Layer(ref)
return Location{ return Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: string(ref.RealPath), Coordinates: Coordinates{
FileSystemID: layer.Metadata.Digest, RealPath: string(ref.RealPath),
FileSystemID: layer.Metadata.Digest,
},
VirtualPath: virtualPath,
ref: ref,
},
LocationMetadata: LocationMetadata{
Annotations: map[string]string{},
}, },
VirtualPath: virtualPath,
ref: ref,
} }
} }
// NewLocationFromDirectory creates a new Location representing the given path (extracted from the ref) relative to the given directory. // NewLocationFromDirectory creates a new Location representing the given path (extracted from the ref) relative to the given directory.
func NewLocationFromDirectory(responsePath string, ref file.Reference) Location { func NewLocationFromDirectory(responsePath string, ref file.Reference) Location {
return Location{ return Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: responsePath, Coordinates: Coordinates{
RealPath: responsePath,
},
ref: ref,
},
LocationMetadata: LocationMetadata{
Annotations: map[string]string{},
}, },
ref: ref,
} }
} }
@ -72,11 +141,16 @@ func NewVirtualLocationFromDirectory(responsePath, virtualResponsePath string, r
return NewLocationFromDirectory(responsePath, ref) return NewLocationFromDirectory(responsePath, ref)
} }
return Location{ return Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: responsePath, Coordinates: Coordinates{
RealPath: responsePath,
},
VirtualPath: virtualResponsePath,
ref: ref,
},
LocationMetadata: LocationMetadata{
Annotations: map[string]string{},
}, },
VirtualPath: virtualResponsePath,
ref: ref,
} }
} }

View File

@ -4,10 +4,12 @@ import (
"sort" "sort"
"github.com/mitchellh/hashstructure/v2" "github.com/mitchellh/hashstructure/v2"
"github.com/anchore/syft/internal/log"
) )
type LocationSet struct { type LocationSet struct {
set map[Location]struct{} set map[LocationData]LocationMetadata
} }
func NewLocationSet(locations ...Location) (s LocationSet) { func NewLocationSet(locations ...Location) (s LocationSet) {
@ -20,10 +22,18 @@ func NewLocationSet(locations ...Location) (s LocationSet) {
func (s *LocationSet) Add(locations ...Location) { func (s *LocationSet) Add(locations ...Location) {
if s.set == nil { if s.set == nil {
s.set = make(map[Location]struct{}) s.set = make(map[LocationData]LocationMetadata)
} }
for _, l := range locations { for _, l := range locations {
s.set[l] = struct{}{} if m, ok := s.set[l.LocationData]; ok {
err := m.merge(l.LocationMetadata)
if err != nil {
log.Debugf("partial merge of location metadata: %+v", err)
}
s.set[l.LocationData] = m
} else {
s.set[l.LocationData] = l.LocationMetadata
}
} }
} }
@ -32,7 +42,7 @@ func (s LocationSet) Remove(locations ...Location) {
return return
} }
for _, l := range locations { for _, l := range locations {
delete(s.set, l) delete(s.set, l.LocationData)
} }
} }
@ -40,7 +50,7 @@ func (s LocationSet) Contains(l Location) bool {
if s.set == nil { if s.set == nil {
return false return false
} }
_, ok := s.set[l] _, ok := s.set[l.LocationData]
return ok return ok
} }
@ -50,8 +60,11 @@ func (s LocationSet) ToSlice() []Location {
} }
locations := make([]Location, len(s.set)) locations := make([]Location, len(s.set))
idx := 0 idx := 0
for v := range s.set { for dir := range s.set {
locations[idx] = v locations[idx] = Location{
LocationData: dir,
LocationMetadata: s.set[dir],
}
idx++ idx++
} }
sort.Sort(Locations(locations)) sort.Sort(Locations(locations))

View File

@ -12,35 +12,43 @@ import (
func TestLocationSet(t *testing.T) { func TestLocationSet(t *testing.T) {
etcHostsLinkVar := Location{ etcHostsLinkVar := Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/etc/hosts", Coordinates: Coordinates{
FileSystemID: "a", RealPath: "/etc/hosts",
FileSystemID: "a",
},
VirtualPath: "/var/etc/hosts",
}, },
VirtualPath: "/var/etc/hosts",
} }
etcHostsLinkHome := Location{ etcHostsLinkHome := Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/etc/hosts", Coordinates: Coordinates{
FileSystemID: "a", RealPath: "/etc/hosts",
FileSystemID: "a",
},
VirtualPath: "/home/wagoodman/hosts",
}, },
VirtualPath: "/home/wagoodman/hosts",
} }
binA := Location{ binA := Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/bin", Coordinates: Coordinates{
FileSystemID: "a", RealPath: "/bin",
FileSystemID: "a",
},
VirtualPath: "/usr/bin",
}, },
VirtualPath: "/usr/bin",
} }
binB := Location{ binB := Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/bin", Coordinates: Coordinates{
FileSystemID: "b", RealPath: "/bin",
FileSystemID: "b",
},
VirtualPath: "/usr/bin",
}, },
VirtualPath: "/usr/bin",
} }
tests := []struct { tests := []struct {
@ -87,41 +95,51 @@ func TestLocationSet(t *testing.T) {
func TestLocationSet_Hash(t *testing.T) { func TestLocationSet_Hash(t *testing.T) {
etcAlink := Location{ etcAlink := Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/etc/hosts", Coordinates: Coordinates{
FileSystemID: "a", RealPath: "/etc/hosts",
FileSystemID: "a",
},
VirtualPath: "/var/etc/hosts",
}, },
VirtualPath: "/var/etc/hosts",
} }
etcA := Location{ etcA := Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/etc/hosts", Coordinates: Coordinates{
FileSystemID: "a", RealPath: "/etc/hosts",
FileSystemID: "a",
},
}, },
} }
etcB := Location{ etcB := Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/etc/hosts", Coordinates: Coordinates{
FileSystemID: "b", RealPath: "/etc/hosts",
FileSystemID: "b",
},
}, },
} }
binA := Location{ binA := Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/bin", Coordinates: Coordinates{
FileSystemID: "a", RealPath: "/bin",
FileSystemID: "a",
},
VirtualPath: "/usr/bin",
}, },
VirtualPath: "/usr/bin",
} }
binB := Location{ binB := Location{
Coordinates: Coordinates{ LocationData: LocationData{
RealPath: "/bin", Coordinates: Coordinates{
FileSystemID: "b", RealPath: "/bin",
FileSystemID: "b",
},
VirtualPath: "/usr/bin",
}, },
VirtualPath: "/usr/bin",
} }
tests := []struct { tests := []struct {

View File

@ -37,9 +37,11 @@ func TestLocation_ID(t *testing.T) {
for _, test := range tests { for _, test := range tests {
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
l := Location{ l := Location{
Coordinates: test.coordinates, LocationData: LocationData{
VirtualPath: test.virtualPath, Coordinates: test.coordinates,
ref: test.ref, VirtualPath: test.virtualPath,
ref: test.ref,
},
} }
assert.Equal(t, l.ID(), test.coordinates.ID()) assert.Equal(t, l.ID(), test.coordinates.ID())
}) })

View File

@ -18,7 +18,7 @@ var _ FileResolver = (*MockResolver)(nil)
// paths, which are typically paths to test fixtures. // paths, which are typically paths to test fixtures.
type MockResolver struct { type MockResolver struct {
locations []Location locations []Location
metadata map[Location]FileMetadata metadata map[Coordinates]FileMetadata
mimeTypeIndex map[string][]Location mimeTypeIndex map[string][]Location
extension map[string][]Location extension map[string][]Location
basename map[string][]Location basename map[string][]Location
@ -41,18 +41,19 @@ func NewMockResolverForPaths(paths ...string) *MockResolver {
return &MockResolver{ return &MockResolver{
locations: locations, locations: locations,
metadata: make(map[Location]FileMetadata), metadata: make(map[Coordinates]FileMetadata),
extension: extension, extension: extension,
basename: basename, basename: basename,
} }
} }
func NewMockResolverForPathsWithMetadata(metadata map[Location]FileMetadata) *MockResolver { func NewMockResolverForPathsWithMetadata(metadata map[Coordinates]FileMetadata) *MockResolver {
var locations []Location var locations []Location
var mimeTypeIndex = make(map[string][]Location) var mimeTypeIndex = make(map[string][]Location)
extension := make(map[string][]Location) extension := make(map[string][]Location)
basename := make(map[string][]Location) basename := make(map[string][]Location)
for l, m := range metadata { for c, m := range metadata {
l := NewLocationFromCoordinates(c)
locations = append(locations, l) locations = append(locations, l)
mimeTypeIndex[m.MIMEType] = append(mimeTypeIndex[m.MIMEType], l) mimeTypeIndex[m.MIMEType] = append(mimeTypeIndex[m.MIMEType], l)
ext := path.Ext(l.RealPath) ext := path.Ext(l.RealPath)
@ -89,7 +90,7 @@ func (r MockResolver) String() string {
// path does not exist, an error is returned. // path does not exist, an error is returned.
func (r MockResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { func (r MockResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) {
for _, l := range r.locations { for _, l := range r.locations {
if l == location { if l.Coordinates == location.Coordinates {
return os.Open(location.RealPath) return os.Open(location.RealPath)
} }
} }

View File

@ -5,6 +5,7 @@ import (
"testing" "testing"
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
"github.com/scylladb/go-set/strset"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -254,3 +255,63 @@ func TestPkgCoverageCatalogerConfiguration(t *testing.T) {
c.Catalogers = []string{"rust"} c.Catalogers = []string{"rust"}
assert.Len(t, cataloger.ImageCatalogers(c), 0) assert.Len(t, cataloger.ImageCatalogers(c), 0)
} }
func TestPkgCoverageImage_HasEvidence(t *testing.T) {
sbom, _ := catalogFixtureImage(t, "image-pkg-coverage", source.SquashedScope, nil)
var cases []testCase
cases = append(cases, commonTestCases...)
cases = append(cases, imageOnlyTestCases...)
pkgTypesMissingEvidence := strset.New()
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
for a := range sbom.Artifacts.PackageCatalog.Enumerate(c.pkgType) {
assert.NotEmpty(t, a.Locations.ToSlice(), "package %q has no locations (type=%q)", a.Name, a.Type)
for _, l := range a.Locations.ToSlice() {
if _, exists := l.Annotations[pkg.EvidenceAnnotationKey]; !exists {
pkgTypesMissingEvidence.Add(string(a.Type))
t.Errorf("missing evidence annotation (pkg=%s type=%s)", a.Name, a.Type)
}
}
}
})
}
if pkgTypesMissingEvidence.Size() > 0 {
t.Log("Package types missing evidence annotations (img resolver): ", pkgTypesMissingEvidence.List())
}
}
func TestPkgCoverageDirectory_HasEvidence(t *testing.T) {
sbom, _ := catalogDirectory(t, "test-fixtures/image-pkg-coverage")
var cases []testCase
cases = append(cases, commonTestCases...)
cases = append(cases, imageOnlyTestCases...)
pkgTypesMissingEvidence := strset.New()
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
for a := range sbom.Artifacts.PackageCatalog.Enumerate(c.pkgType) {
assert.NotEmpty(t, a.Locations.ToSlice(), "package %q has no locations (type=%q)", a.Name, a.Type)
for _, l := range a.Locations.ToSlice() {
if _, exists := l.Annotations[pkg.EvidenceAnnotationKey]; !exists {
pkgTypesMissingEvidence.Add(string(a.Type))
t.Errorf("missing evidence annotation (pkg=%s type=%s)", a.Name, a.Type)
}
}
}
})
}
if pkgTypesMissingEvidence.Size() > 0 {
t.Log("Package types missing evidence annotations (dir resolver): ", pkgTypesMissingEvidence.List())
}
}