mirror of
https://github.com/anchore/syft.git
synced 2025-11-17 16:33:21 +01:00
return relationships from tasks
Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
parent
a906b9a03a
commit
fef951c29b
@ -255,7 +255,7 @@ func packagesExecWorker(userInput string) <-chan error {
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
catalog, d, err := syft.CatalogPackages(src, appConfig.Package.Cataloger.ScopeOpt)
|
||||
catalog, relationships, d, err := syft.CatalogPackages(src, appConfig.Package.Cataloger.ScopeOpt)
|
||||
if err != nil {
|
||||
errs <- fmt.Errorf("failed to catalog input: %w", err)
|
||||
return
|
||||
@ -273,6 +273,7 @@ func packagesExecWorker(userInput string) <-chan error {
|
||||
PackageCatalog: catalog,
|
||||
Distro: d,
|
||||
},
|
||||
Relationships: relationships,
|
||||
Source: src.Metadata,
|
||||
}
|
||||
|
||||
|
||||
@ -4,6 +4,8 @@ import (
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
|
||||
"github.com/anchore/syft/syft/sbom"
|
||||
|
||||
"github.com/anchore/stereoscope"
|
||||
@ -88,7 +90,6 @@ func powerUserExec(_ *cobra.Command, args []string) error {
|
||||
ui.Select(isVerbose(), appConfig.Quiet, reporter)...,
|
||||
)
|
||||
}
|
||||
|
||||
func powerUserExecWorker(userInput string) <-chan error {
|
||||
errs := make(chan error)
|
||||
go func() {
|
||||
@ -109,28 +110,61 @@ func powerUserExecWorker(userInput string) <-chan error {
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
analysisResults := sbom.SBOM{
|
||||
s := sbom.SBOM{
|
||||
Source: src.Metadata,
|
||||
}
|
||||
|
||||
wg := &sync.WaitGroup{}
|
||||
var results []<-chan artifact.Relationship
|
||||
for _, task := range tasks {
|
||||
wg.Add(1)
|
||||
go func(task powerUserTask) {
|
||||
defer wg.Done()
|
||||
if err = task(&analysisResults.Artifacts, src); err != nil {
|
||||
errs <- err
|
||||
return
|
||||
}
|
||||
}(task)
|
||||
c := make(chan artifact.Relationship)
|
||||
results = append(results, c)
|
||||
|
||||
go runTask(task, &s.Artifacts, src, c, errs)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
for relationship := range mergeResults(results...) {
|
||||
s.Relationships = append(s.Relationships, relationship)
|
||||
}
|
||||
|
||||
bus.Publish(partybus.Event{
|
||||
Type: event.PresenterReady,
|
||||
Value: poweruser.NewJSONPresenter(analysisResults, *appConfig),
|
||||
Value: poweruser.NewJSONPresenter(s, *appConfig),
|
||||
})
|
||||
}()
|
||||
return errs
|
||||
}
|
||||
|
||||
func runTask(t powerUserTask, a *sbom.Artifacts, src *source.Source, c chan<- artifact.Relationship, errs chan<- error) {
|
||||
defer close(c)
|
||||
|
||||
relationships, err := t(a, src)
|
||||
if err != nil {
|
||||
errs <- err
|
||||
return
|
||||
}
|
||||
|
||||
for _, relationship := range relationships {
|
||||
c <- relationship
|
||||
}
|
||||
}
|
||||
|
||||
func mergeResults(cs ...<-chan artifact.Relationship) <-chan artifact.Relationship {
|
||||
var wg sync.WaitGroup
|
||||
var results = make(chan artifact.Relationship)
|
||||
|
||||
wg.Add(len(cs))
|
||||
for _, c := range cs {
|
||||
go func(c <-chan artifact.Relationship) {
|
||||
for n := range c {
|
||||
results <- n
|
||||
}
|
||||
wg.Done()
|
||||
}(c)
|
||||
}
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(results)
|
||||
}()
|
||||
return results
|
||||
}
|
||||
|
||||
@ -4,6 +4,8 @@ import (
|
||||
"crypto"
|
||||
"fmt"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
|
||||
"github.com/anchore/syft/syft/sbom"
|
||||
|
||||
"github.com/anchore/syft/syft"
|
||||
@ -11,7 +13,7 @@ import (
|
||||
"github.com/anchore/syft/syft/source"
|
||||
)
|
||||
|
||||
type powerUserTask func(*sbom.Artifacts, *source.Source) error
|
||||
type powerUserTask func(*sbom.Artifacts, *source.Source) ([]artifact.Relationship, error)
|
||||
|
||||
func powerUserTasks() ([]powerUserTask, error) {
|
||||
var tasks []powerUserTask
|
||||
@ -43,16 +45,16 @@ func catalogPackagesTask() (powerUserTask, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||
packageCatalog, theDistro, err := syft.CatalogPackages(src, appConfig.Package.Cataloger.ScopeOpt)
|
||||
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
|
||||
packageCatalog, relationships, theDistro, err := syft.CatalogPackages(src, appConfig.Package.Cataloger.ScopeOpt)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
results.PackageCatalog = packageCatalog
|
||||
results.Distro = theDistro
|
||||
|
||||
return nil
|
||||
return relationships, nil
|
||||
}
|
||||
|
||||
return task, nil
|
||||
@ -65,18 +67,18 @@ func catalogFileMetadataTask() (powerUserTask, error) {
|
||||
|
||||
metadataCataloger := file.NewMetadataCataloger()
|
||||
|
||||
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
|
||||
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result, err := metadataCataloger.Catalog(resolver)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
results.FileMetadata = result
|
||||
return nil
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return task, nil
|
||||
@ -111,18 +113,18 @@ func catalogFileDigestsTask() (powerUserTask, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
|
||||
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result, err := digestsCataloger.Catalog(resolver)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
results.FileDigests = result
|
||||
return nil
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return task, nil
|
||||
@ -143,18 +145,18 @@ func catalogSecretsTask() (powerUserTask, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
|
||||
resolver, err := src.FileResolver(appConfig.Secrets.Cataloger.ScopeOpt)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result, err := secretsCataloger.Catalog(resolver)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
results.Secrets = result
|
||||
return nil
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return task, nil
|
||||
@ -171,18 +173,18 @@ func catalogFileClassificationsTask() (powerUserTask, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
|
||||
resolver, err := src.FileResolver(appConfig.FileClassification.Cataloger.ScopeOpt)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result, err := classifierCataloger.Catalog(resolver)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
results.FileClassifications = result
|
||||
return nil
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return task, nil
|
||||
@ -198,18 +200,18 @@ func catalogContentsTask() (powerUserTask, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
|
||||
resolver, err := src.FileResolver(appConfig.FileContents.Cataloger.ScopeOpt)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result, err := contentsCataloger.Catalog(resolver)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
results.FileContents = result
|
||||
return nil
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return task, nil
|
||||
|
||||
@ -8,8 +8,8 @@ const (
|
||||
type RelationshipType string
|
||||
|
||||
type Relationship struct {
|
||||
From ID
|
||||
To ID
|
||||
Type RelationshipType
|
||||
Data interface{}
|
||||
From ID `json:"from"`
|
||||
To ID `json:"to"`
|
||||
Type RelationshipType `json:"type"`
|
||||
Data interface{} `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
14
syft/lib.go
14
syft/lib.go
@ -19,6 +19,8 @@ package syft
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
|
||||
"github.com/anchore/syft/internal/bus"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/distro"
|
||||
@ -32,10 +34,10 @@ import (
|
||||
// CatalogPackages takes an inventory of packages from the given image from a particular perspective
|
||||
// (e.g. squashed source, all-layers source). Returns the discovered set of packages, the identified Linux
|
||||
// distribution, and the source object used to wrap the data source.
|
||||
func CatalogPackages(src *source.Source, scope source.Scope) (*pkg.Catalog, *distro.Distro, error) {
|
||||
func CatalogPackages(src *source.Source, scope source.Scope) (*pkg.Catalog, []artifact.Relationship, *distro.Distro, error) {
|
||||
resolver, err := src.FileResolver(scope)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("unable to determine resolver while cataloging packages: %w", err)
|
||||
return nil, nil, nil, fmt.Errorf("unable to determine resolver while cataloging packages: %w", err)
|
||||
}
|
||||
|
||||
// find the distro
|
||||
@ -59,15 +61,15 @@ func CatalogPackages(src *source.Source, scope source.Scope) (*pkg.Catalog, *dis
|
||||
log.Info("cataloging directory")
|
||||
catalogers = cataloger.DirectoryCatalogers()
|
||||
default:
|
||||
return nil, nil, fmt.Errorf("unable to determine cataloger set from scheme=%+v", src.Metadata.Scheme)
|
||||
return nil, nil, nil, fmt.Errorf("unable to determine cataloger set from scheme=%+v", src.Metadata.Scheme)
|
||||
}
|
||||
|
||||
catalog, err := cataloger.Catalog(resolver, theDistro, catalogers...)
|
||||
catalog, relationships, err := cataloger.Catalog(resolver, theDistro, catalogers...)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
|
||||
return catalog, theDistro, nil
|
||||
return catalog, relationships, theDistro, nil
|
||||
}
|
||||
|
||||
// SetLogger sets the logger object used for all syft logging calls.
|
||||
|
||||
@ -8,6 +8,8 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
|
||||
"github.com/anchore/syft/syft/file"
|
||||
|
||||
"github.com/anchore/syft/internal/log"
|
||||
@ -21,7 +23,7 @@ var _ common.ParserFn = parseApkDB
|
||||
|
||||
// parseApkDb parses individual packages from a given Alpine DB file. For more information on specific fields
|
||||
// see https://wiki.alpinelinux.org/wiki/Apk_spec .
|
||||
func parseApkDB(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parseApkDB(_ string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
// larger capacity for the scanner.
|
||||
const maxScannerCapacity = 1024 * 1024
|
||||
// a new larger buffer for the scanner
|
||||
@ -47,7 +49,7 @@ func parseApkDB(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
for scanner.Scan() {
|
||||
metadata, err := parseApkDBEntry(strings.NewReader(scanner.Text()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
if metadata != nil {
|
||||
packages = append(packages, pkg.Package{
|
||||
@ -62,10 +64,10 @@ func parseApkDB(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse APK DB file: %w", err)
|
||||
return nil, nil, fmt.Errorf("failed to parse APK DB file: %w", err)
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
return packages, nil, nil
|
||||
}
|
||||
|
||||
// nolint:funlen
|
||||
|
||||
@ -3,6 +3,7 @@ package cataloger
|
||||
import (
|
||||
"github.com/anchore/syft/internal/bus"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/distro"
|
||||
"github.com/anchore/syft/syft/event"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
@ -38,8 +39,9 @@ func newMonitor() (*progress.Manual, *progress.Manual) {
|
||||
// In order to efficiently retrieve contents from a underlying container image the content fetch requests are
|
||||
// done in bulk. Specifically, all files of interest are collected from each catalogers and accumulated into a single
|
||||
// request.
|
||||
func Catalog(resolver source.FileResolver, theDistro *distro.Distro, catalogers ...Cataloger) (*pkg.Catalog, error) {
|
||||
func Catalog(resolver source.FileResolver, theDistro *distro.Distro, catalogers ...Cataloger) (*pkg.Catalog, []artifact.Relationship, error) {
|
||||
catalog := pkg.NewCatalog()
|
||||
var allRelationships []artifact.Relationship
|
||||
|
||||
filesProcessed, packagesDiscovered := newMonitor()
|
||||
|
||||
@ -47,7 +49,7 @@ func Catalog(resolver source.FileResolver, theDistro *distro.Distro, catalogers
|
||||
var errs error
|
||||
for _, theCataloger := range catalogers {
|
||||
// find packages from the underlying raw data
|
||||
packages, err := theCataloger.Catalog(resolver)
|
||||
packages, relationships, err := theCataloger.Catalog(resolver)
|
||||
if err != nil {
|
||||
errs = multierror.Append(errs, err)
|
||||
continue
|
||||
@ -68,14 +70,16 @@ func Catalog(resolver source.FileResolver, theDistro *distro.Distro, catalogers
|
||||
// add to catalog
|
||||
catalog.Add(p)
|
||||
}
|
||||
|
||||
allRelationships = append(allRelationships, relationships...)
|
||||
}
|
||||
|
||||
if errs != nil {
|
||||
return nil, errs
|
||||
return nil, nil, errs
|
||||
}
|
||||
|
||||
filesProcessed.SetCompleted()
|
||||
packagesDiscovered.SetCompleted()
|
||||
|
||||
return catalog, nil
|
||||
return catalog, allRelationships, nil
|
||||
}
|
||||
|
||||
@ -6,6 +6,7 @@ catalogers defined in child packages as well as the interface definition to impl
|
||||
package cataloger
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/apkdb"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/deb"
|
||||
@ -26,7 +27,7 @@ type Cataloger interface {
|
||||
// Name returns a string that uniquely describes a cataloger
|
||||
Name() string
|
||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
|
||||
Catalog(resolver source.FileResolver) ([]pkg.Package, error)
|
||||
Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error)
|
||||
}
|
||||
|
||||
// ImageCatalogers returns a slice of locally implemented catalogers that are fit for detecting installations of packages.
|
||||
|
||||
@ -6,6 +6,8 @@ package common
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
@ -35,18 +37,18 @@ func (c *GenericCataloger) Name() string {
|
||||
}
|
||||
|
||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
|
||||
func (c *GenericCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, error) {
|
||||
func (c *GenericCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
var packages []pkg.Package
|
||||
parserByLocation := c.selectFiles(resolver)
|
||||
var relationships []artifact.Relationship
|
||||
|
||||
for location, parser := range parserByLocation {
|
||||
for location, parser := range c.selectFiles(resolver) {
|
||||
contentReader, err := resolver.FileContentsByLocation(location)
|
||||
if err != nil {
|
||||
// TODO: fail or log?
|
||||
return nil, fmt.Errorf("unable to fetch contents for location=%v : %w", location, err)
|
||||
return nil, nil, fmt.Errorf("unable to fetch contents for location=%v : %w", location, err)
|
||||
}
|
||||
|
||||
entries, err := parser(location.RealPath, contentReader)
|
||||
discoveredPackages, discoveredRelationships, err := parser(location.RealPath, contentReader)
|
||||
internal.CloseAndLogError(contentReader, location.VirtualPath)
|
||||
if err != nil {
|
||||
// TODO: should we fail? or only log?
|
||||
@ -54,14 +56,16 @@ func (c *GenericCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package,
|
||||
continue
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
entry.FoundBy = c.upstreamCataloger
|
||||
entry.Locations = []source.Location{location}
|
||||
for _, p := range discoveredPackages {
|
||||
p.FoundBy = c.upstreamCataloger
|
||||
p.Locations = append(p.Locations, location)
|
||||
|
||||
packages = append(packages, entry)
|
||||
packages = append(packages, p)
|
||||
}
|
||||
|
||||
relationships = append(relationships, discoveredRelationships...)
|
||||
}
|
||||
return packages, nil
|
||||
return packages, relationships, nil
|
||||
}
|
||||
|
||||
// SelectFiles takes a set of file trees and resolves and file references of interest for future cataloging
|
||||
|
||||
@ -8,11 +8,12 @@ import (
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
)
|
||||
|
||||
func parser(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parser(_ string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
contents, err := ioutil.ReadAll(reader)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
@ -21,7 +22,7 @@ func parser(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
{
|
||||
Name: string(contents),
|
||||
},
|
||||
}, nil
|
||||
}, nil, nil
|
||||
}
|
||||
|
||||
func TestGenericCataloger(t *testing.T) {
|
||||
@ -47,7 +48,7 @@ func TestGenericCataloger(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
actualPkgs, err := cataloger.Catalog(resolver)
|
||||
actualPkgs, _, err := cataloger.Catalog(resolver)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, actualPkgs, len(expectedPkgs))
|
||||
|
||||
|
||||
@ -3,8 +3,9 @@ package common
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
)
|
||||
|
||||
// ParserFn standardizes a function signature for parser functions that accept the virtual file path (not usable for file reads) and contents and return any discovered packages from that file
|
||||
type ParserFn func(string, io.Reader) ([]pkg.Package, error)
|
||||
type ParserFn func(string, io.Reader) ([]pkg.Package, []artifact.Relationship, error)
|
||||
|
||||
@ -13,6 +13,7 @@ import (
|
||||
"github.com/anchore/syft/internal"
|
||||
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
)
|
||||
@ -36,24 +37,24 @@ func (c *Cataloger) Name() string {
|
||||
}
|
||||
|
||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing dpkg support files.
|
||||
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, error) {
|
||||
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
dbFileMatches, err := resolver.FilesByGlob(pkg.DpkgDBGlob)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find dpkg status files's by glob: %w", err)
|
||||
return nil, nil, fmt.Errorf("failed to find dpkg status files's by glob: %w", err)
|
||||
}
|
||||
|
||||
var results []pkg.Package
|
||||
var pkgs []pkg.Package
|
||||
var allPackages []pkg.Package
|
||||
var allRelationships []artifact.Relationship
|
||||
for _, dbLocation := range dbFileMatches {
|
||||
dbContents, err := resolver.FileContentsByLocation(dbLocation)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
pkgs, err = parseDpkgStatus(dbContents)
|
||||
pkgs, relationships, err := parseDpkgStatus(dbContents)
|
||||
internal.CloseAndLogError(dbContents, dbLocation.VirtualPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to catalog dpkg package=%+v: %w", dbLocation.RealPath, err)
|
||||
return nil, nil, fmt.Errorf("unable to catalog dpkg package=%+v: %w", dbLocation.RealPath, err)
|
||||
}
|
||||
|
||||
for i := range pkgs {
|
||||
@ -70,9 +71,10 @@ func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, error)
|
||||
addLicenses(resolver, dbLocation, p)
|
||||
}
|
||||
|
||||
results = append(results, pkgs...)
|
||||
allPackages = append(allPackages, pkgs...)
|
||||
allRelationships = append(allRelationships, relationships...)
|
||||
}
|
||||
return results, nil
|
||||
return allPackages, allRelationships, nil
|
||||
}
|
||||
|
||||
func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) {
|
||||
|
||||
@ -11,6 +11,7 @@ import (
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/mitchellh/mapstructure"
|
||||
)
|
||||
@ -21,9 +22,9 @@ var (
|
||||
)
|
||||
|
||||
// parseDpkgStatus is a parser function for Debian DB status contents, returning all Debian packages listed.
|
||||
func parseDpkgStatus(reader io.Reader) ([]pkg.Package, error) {
|
||||
func parseDpkgStatus(reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
buffedReader := bufio.NewReader(reader)
|
||||
var packages = make([]pkg.Package, 0)
|
||||
var packages []pkg.Package
|
||||
|
||||
continueProcessing := true
|
||||
for continueProcessing {
|
||||
@ -32,7 +33,7 @@ func parseDpkgStatus(reader io.Reader) ([]pkg.Package, error) {
|
||||
if errors.Is(err, errEndOfPackages) {
|
||||
continueProcessing = false
|
||||
} else {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
}
|
||||
|
||||
@ -47,7 +48,7 @@ func parseDpkgStatus(reader io.Reader) ([]pkg.Package, error) {
|
||||
}
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
return packages, nil, nil
|
||||
}
|
||||
|
||||
// parseDpkgStatusEntry returns an individual Dpkg entry, or returns errEndOfPackages if there are no more packages to parse from the reader.
|
||||
|
||||
@ -6,7 +6,10 @@ package golang
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
)
|
||||
@ -35,27 +38,30 @@ func (c *Cataloger) Name() string {
|
||||
}
|
||||
|
||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing rpm db installation.
|
||||
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, error) {
|
||||
pkgs := make([]pkg.Package, 0)
|
||||
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
var pkgs []pkg.Package
|
||||
var relationships []artifact.Relationship
|
||||
|
||||
fileMatches, err := resolver.FilesByMIMEType(mimeTypes...)
|
||||
if err != nil {
|
||||
return pkgs, fmt.Errorf("failed to find bin by mime types: %w", err)
|
||||
return pkgs, nil, fmt.Errorf("failed to find bin by mime types: %w", err)
|
||||
}
|
||||
|
||||
for _, location := range fileMatches {
|
||||
r, err := resolver.FileContentsByLocation(location)
|
||||
if err != nil {
|
||||
return pkgs, fmt.Errorf("failed to resolve file contents by location: %w", err)
|
||||
return pkgs, nil, fmt.Errorf("failed to resolve file contents by location: %w", err)
|
||||
}
|
||||
|
||||
goPkgs, err := parseGoBin(location, r)
|
||||
goPkgs, goRelationships, err := parseGoBin(location, r)
|
||||
if err != nil {
|
||||
log.Warnf("could not parse possible go binary: %+v", err)
|
||||
}
|
||||
|
||||
r.Close()
|
||||
internal.CloseAndLogError(r, location.RealPath)
|
||||
pkgs = append(pkgs, goPkgs...)
|
||||
relationships = append(relationships, goRelationships...)
|
||||
}
|
||||
|
||||
return pkgs, nil
|
||||
return pkgs, relationships, nil
|
||||
}
|
||||
@ -5,6 +5,7 @@ import (
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
)
|
||||
@ -14,18 +15,16 @@ const (
|
||||
replaceIdentifier = "=>"
|
||||
)
|
||||
|
||||
func parseGoBin(location source.Location, reader io.ReadCloser) ([]pkg.Package, error) {
|
||||
func parseGoBin(location source.Location, reader io.ReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
// Identify if bin was compiled by go
|
||||
x, err := openExe(reader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
goVersion, mod := findVers(x)
|
||||
|
||||
pkgs := buildGoPkgInfo(location, mod, goVersion)
|
||||
|
||||
return pkgs, nil
|
||||
return buildGoPkgInfo(location, mod, goVersion), nil, nil
|
||||
}
|
||||
|
||||
func buildGoPkgInfo(location source.Location, mod, goVersion string) []pkg.Package {
|
||||
|
||||
@ -6,22 +6,23 @@ import (
|
||||
"io/ioutil"
|
||||
"sort"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"golang.org/x/mod/modfile"
|
||||
)
|
||||
|
||||
// parseGoMod takes a go.mod and lists all packages discovered.
|
||||
func parseGoMod(path string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parseGoMod(path string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
packages := make(map[string]pkg.Package)
|
||||
|
||||
contents, err := ioutil.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read go module: %w", err)
|
||||
return nil, nil, fmt.Errorf("failed to read go module: %w", err)
|
||||
}
|
||||
|
||||
file, err := modfile.Parse(path, contents, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse go module: %w", err)
|
||||
return nil, nil, fmt.Errorf("failed to parse go module: %w", err)
|
||||
}
|
||||
|
||||
for _, m := range file.Require {
|
||||
@ -59,5 +60,5 @@ func parseGoMod(path string, reader io.Reader) ([]pkg.Package, error) {
|
||||
return pkgsSlice[i].Name < pkgsSlice[j].Name
|
||||
})
|
||||
|
||||
return pkgsSlice, nil
|
||||
return pkgsSlice, nil, nil
|
||||
}
|
||||
|
||||
@ -9,6 +9,7 @@ import (
|
||||
"github.com/anchore/syft/internal/log"
|
||||
|
||||
"github.com/anchore/syft/internal/file"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/common"
|
||||
)
|
||||
@ -34,12 +35,12 @@ type archiveParser struct {
|
||||
}
|
||||
|
||||
// parseJavaArchive is a parser function for java archive contents, returning all Java libraries and nested archives.
|
||||
func parseJavaArchive(virtualPath string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parseJavaArchive(virtualPath string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
parser, cleanupFn, err := newJavaArchiveParser(virtualPath, reader, true)
|
||||
// note: even on error, we should always run cleanup functions
|
||||
defer cleanupFn()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
return parser.parse()
|
||||
}
|
||||
@ -80,29 +81,31 @@ func newJavaArchiveParser(virtualPath string, reader io.Reader, detectNested boo
|
||||
}
|
||||
|
||||
// parse the loaded archive and return all packages found.
|
||||
func (j *archiveParser) parse() ([]pkg.Package, error) {
|
||||
var pkgs = make([]pkg.Package, 0)
|
||||
func (j *archiveParser) parse() ([]pkg.Package, []artifact.Relationship, error) {
|
||||
var pkgs []pkg.Package
|
||||
var relationships []artifact.Relationship
|
||||
|
||||
// find the parent package from the java manifest
|
||||
parentPkg, err := j.discoverMainPackage()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not generate package from %s: %w", j.virtualPath, err)
|
||||
return nil, nil, fmt.Errorf("could not generate package from %s: %w", j.virtualPath, err)
|
||||
}
|
||||
|
||||
// find aux packages from pom.properties/pom.xml and potentially modify the existing parentPkg
|
||||
auxPkgs, err := j.discoverPkgsFromAllMavenFiles(parentPkg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
pkgs = append(pkgs, auxPkgs...)
|
||||
|
||||
if j.detectNested {
|
||||
// find nested java archive packages
|
||||
nestedPkgs, err := j.discoverPkgsFromNestedArchives(parentPkg)
|
||||
nestedPkgs, nestedRelationships, err := j.discoverPkgsFromNestedArchives(parentPkg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
pkgs = append(pkgs, nestedPkgs...)
|
||||
relationships = append(relationships, nestedRelationships...)
|
||||
}
|
||||
|
||||
// lastly, add the parent package to the list (assuming the parent exists)
|
||||
@ -110,7 +113,7 @@ func (j *archiveParser) parse() ([]pkg.Package, error) {
|
||||
pkgs = append([]pkg.Package{*parentPkg}, pkgs...)
|
||||
}
|
||||
|
||||
return pkgs, nil
|
||||
return pkgs, relationships, nil
|
||||
}
|
||||
|
||||
// discoverMainPackage parses the root Java manifest used as the parent package to all discovered nested packages.
|
||||
@ -189,31 +192,32 @@ func (j *archiveParser) discoverPkgsFromAllMavenFiles(parentPkg *pkg.Package) ([
|
||||
|
||||
// discoverPkgsFromNestedArchives finds Java archives within Java archives, returning all listed Java packages found and
|
||||
// associating each discovered package to the given parent package.
|
||||
func (j *archiveParser) discoverPkgsFromNestedArchives(parentPkg *pkg.Package) ([]pkg.Package, error) {
|
||||
var pkgs = make([]pkg.Package, 0)
|
||||
func (j *archiveParser) discoverPkgsFromNestedArchives(parentPkg *pkg.Package) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
var pkgs []pkg.Package
|
||||
var relationships []artifact.Relationship
|
||||
|
||||
// search and parse pom.properties files & fetch the contents
|
||||
openers, err := file.ExtractFromZipToUniqueTempFile(j.archivePath, j.contentPath, j.fileManifest.GlobMatch(archiveFormatGlobs...)...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to extract files from zip: %w", err)
|
||||
return nil, nil, fmt.Errorf("unable to extract files from zip: %w", err)
|
||||
}
|
||||
|
||||
// discover nested artifacts
|
||||
for archivePath, archiveOpener := range openers {
|
||||
archiveReadCloser, err := archiveOpener.Open()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to open archived file from tempdir: %w", err)
|
||||
return nil, nil, fmt.Errorf("unable to open archived file from tempdir: %w", err)
|
||||
}
|
||||
nestedPath := fmt.Sprintf("%s:%s", j.virtualPath, archivePath)
|
||||
nestedPkgs, err := parseJavaArchive(nestedPath, archiveReadCloser)
|
||||
nestedPkgs, nestedRelationships, err := parseJavaArchive(nestedPath, archiveReadCloser)
|
||||
if err != nil {
|
||||
if closeErr := archiveReadCloser.Close(); closeErr != nil {
|
||||
log.Warnf("unable to close archived file from tempdir: %+v", closeErr)
|
||||
}
|
||||
return nil, fmt.Errorf("unable to process nested java archive (%s): %w", archivePath, err)
|
||||
return nil, nil, fmt.Errorf("unable to process nested java archive (%s): %w", archivePath, err)
|
||||
}
|
||||
if err = archiveReadCloser.Close(); err != nil {
|
||||
return nil, fmt.Errorf("unable to close archived file from tempdir: %w", err)
|
||||
return nil, nil, fmt.Errorf("unable to close archived file from tempdir: %w", err)
|
||||
}
|
||||
|
||||
// attach the parent package to all discovered packages that are not already associated with a java archive
|
||||
@ -226,9 +230,11 @@ func (j *archiveParser) discoverPkgsFromNestedArchives(parentPkg *pkg.Package) (
|
||||
}
|
||||
pkgs = append(pkgs, p)
|
||||
}
|
||||
|
||||
relationships = append(relationships, nestedRelationships...)
|
||||
}
|
||||
|
||||
return pkgs, nil
|
||||
return pkgs, relationships, nil
|
||||
}
|
||||
|
||||
func pomPropertiesByParentPath(archivePath string, extractPaths []string, virtualPath string) (map[string]pkg.PomProperties, error) {
|
||||
|
||||
@ -13,6 +13,7 @@ import (
|
||||
|
||||
"github.com/mitchellh/mapstructure"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/common"
|
||||
)
|
||||
@ -162,8 +163,8 @@ func licensesFromJSON(p PackageJSON) ([]string, error) {
|
||||
}
|
||||
|
||||
// parsePackageJSON parses a package.json and returns the discovered JavaScript packages.
|
||||
func parsePackageJSON(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
packages := make([]pkg.Package, 0)
|
||||
func parsePackageJSON(_ string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
var packages []pkg.Package
|
||||
dec := json.NewDecoder(reader)
|
||||
|
||||
for {
|
||||
@ -171,17 +172,17 @@ func parsePackageJSON(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
if err := dec.Decode(&p); err == io.EOF {
|
||||
break
|
||||
} else if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse package.json file: %w", err)
|
||||
return nil, nil, fmt.Errorf("failed to parse package.json file: %w", err)
|
||||
}
|
||||
|
||||
if !p.hasNameAndVersionValues() {
|
||||
log.Debug("encountered package.json file without a name and/or version field, ignoring this file")
|
||||
return nil, nil
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
licenses, err := licensesFromJSON(p)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse package.json file: %w", err)
|
||||
return nil, nil, fmt.Errorf("failed to parse package.json file: %w", err)
|
||||
}
|
||||
|
||||
packages = append(packages, pkg.Package{
|
||||
@ -200,7 +201,7 @@ func parsePackageJSON(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
})
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
return packages, nil, nil
|
||||
}
|
||||
|
||||
func (p PackageJSON) hasNameAndVersionValues() bool {
|
||||
|
||||
@ -5,6 +5,7 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/common"
|
||||
)
|
||||
@ -28,11 +29,11 @@ type Dependency struct {
|
||||
}
|
||||
|
||||
// parsePackageLock parses a package-lock.json and returns the discovered JavaScript packages.
|
||||
func parsePackageLock(path string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parsePackageLock(path string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
// in the case we find package-lock.json files in the node_modules directories, skip those
|
||||
// as the whole purpose of the lock file is for the specific dependencies of the root project
|
||||
if pathContainsNodeModulesDirectory(path) {
|
||||
return nil, nil
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
var packages []pkg.Package
|
||||
@ -43,7 +44,7 @@ func parsePackageLock(path string, reader io.Reader) ([]pkg.Package, error) {
|
||||
if err := dec.Decode(&lock); err == io.EOF {
|
||||
break
|
||||
} else if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse package-lock.json file: %w", err)
|
||||
return nil, nil, fmt.Errorf("failed to parse package-lock.json file: %w", err)
|
||||
}
|
||||
for name, pkgMeta := range lock.Dependencies {
|
||||
packages = append(packages, pkg.Package{
|
||||
@ -55,5 +56,5 @@ func parsePackageLock(path string, reader io.Reader) ([]pkg.Package, error) {
|
||||
}
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
return packages, nil, nil
|
||||
}
|
||||
|
||||
@ -7,6 +7,7 @@ import (
|
||||
"regexp"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/common"
|
||||
)
|
||||
@ -34,11 +35,11 @@ const (
|
||||
noVersion = ""
|
||||
)
|
||||
|
||||
func parseYarnLock(path string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parseYarnLock(path string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
// in the case we find yarn.lock files in the node_modules directories, skip those
|
||||
// as the whole purpose of the lock file is for the specific dependencies of the project
|
||||
if pathContainsNodeModulesDirectory(path) {
|
||||
return nil, nil
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
var packages []pkg.Package
|
||||
@ -79,10 +80,10 @@ func parseYarnLock(path string, reader io.Reader) ([]pkg.Package, error) {
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse yarn.lock file: %w", err)
|
||||
return nil, nil, fmt.Errorf("failed to parse yarn.lock file: %w", err)
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
return packages, nil, nil
|
||||
}
|
||||
|
||||
func findPackageName(line string) string {
|
||||
|
||||
@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
|
||||
"github.com/anchore/syft/syft/source"
|
||||
@ -31,13 +32,13 @@ func (c *PackageCataloger) Name() string {
|
||||
}
|
||||
|
||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing python egg and wheel installations.
|
||||
func (c *PackageCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, error) {
|
||||
func (c *PackageCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
var fileMatches []source.Location
|
||||
|
||||
for _, glob := range []string{eggMetadataGlob, wheelMetadataGlob, eggFileMetadataGlob} {
|
||||
matches, err := resolver.FilesByGlob(glob)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find files by glob: %s", glob)
|
||||
return nil, nil, fmt.Errorf("failed to find files by glob: %s", glob)
|
||||
}
|
||||
fileMatches = append(fileMatches, matches...)
|
||||
}
|
||||
@ -46,13 +47,13 @@ func (c *PackageCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package,
|
||||
for _, location := range fileMatches {
|
||||
p, err := c.catalogEggOrWheel(resolver, location)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to catalog python package=%+v: %w", location.RealPath, err)
|
||||
return nil, nil, fmt.Errorf("unable to catalog python package=%+v: %w", location.RealPath, err)
|
||||
}
|
||||
if p != nil {
|
||||
pkgs = append(pkgs, *p)
|
||||
}
|
||||
}
|
||||
return pkgs, nil
|
||||
return pkgs, nil, nil
|
||||
}
|
||||
|
||||
// catalogEggOrWheel takes the primary metadata file reference and returns the python package it represents.
|
||||
|
||||
@ -6,6 +6,7 @@ import (
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/common"
|
||||
)
|
||||
@ -37,7 +38,7 @@ type Dependency struct {
|
||||
var _ common.ParserFn = parsePipfileLock
|
||||
|
||||
// parsePipfileLock is a parser function for Pipfile.lock contents, returning "Default" python packages discovered.
|
||||
func parsePipfileLock(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parsePipfileLock(_ string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
packages := make([]pkg.Package, 0)
|
||||
dec := json.NewDecoder(reader)
|
||||
|
||||
@ -46,7 +47,7 @@ func parsePipfileLock(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
if err := dec.Decode(&lock); err == io.EOF {
|
||||
break
|
||||
} else if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse Pipfile.lock file: %w", err)
|
||||
return nil, nil, fmt.Errorf("failed to parse Pipfile.lock file: %w", err)
|
||||
}
|
||||
for name, pkgMeta := range lock.Default {
|
||||
version := strings.TrimPrefix(pkgMeta.Version, "==")
|
||||
@ -59,5 +60,5 @@ func parsePipfileLock(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
}
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
return packages, nil, nil
|
||||
}
|
||||
|
||||
@ -4,6 +4,7 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/common"
|
||||
"github.com/pelletier/go-toml"
|
||||
@ -13,17 +14,17 @@ import (
|
||||
var _ common.ParserFn = parsePoetryLock
|
||||
|
||||
// parsePoetryLock is a parser function for poetry.lock contents, returning all python packages discovered.
|
||||
func parsePoetryLock(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parsePoetryLock(_ string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
tree, err := toml.LoadReader(reader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to load poetry.lock for parsing: %v", err)
|
||||
return nil, nil, fmt.Errorf("unable to load poetry.lock for parsing: %v", err)
|
||||
}
|
||||
|
||||
metadata := PoetryMetadata{}
|
||||
err = tree.Unmarshal(&metadata)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to parse poetry.lock: %v", err)
|
||||
return nil, nil, fmt.Errorf("unable to parse poetry.lock: %v", err)
|
||||
}
|
||||
|
||||
return metadata.Pkgs(), nil
|
||||
return metadata.Pkgs(), nil, nil
|
||||
}
|
||||
|
||||
@ -6,6 +6,7 @@ import (
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/common"
|
||||
)
|
||||
@ -15,7 +16,7 @@ var _ common.ParserFn = parseRequirementsTxt
|
||||
|
||||
// parseRequirementsTxt takes a Python requirements.txt file, returning all Python packages that are locked to a
|
||||
// specific version.
|
||||
func parseRequirementsTxt(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parseRequirementsTxt(_ string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
packages := make([]pkg.Package, 0)
|
||||
|
||||
scanner := bufio.NewScanner(reader)
|
||||
@ -55,10 +56,10 @@ func parseRequirementsTxt(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse python requirements file: %w", err)
|
||||
return nil, nil, fmt.Errorf("failed to parse python requirements file: %w", err)
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
return packages, nil, nil
|
||||
}
|
||||
|
||||
// removeTrailingComment takes a requirements.txt line and strips off comment strings.
|
||||
|
||||
@ -6,6 +6,7 @@ import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/common"
|
||||
)
|
||||
@ -19,7 +20,7 @@ var _ common.ParserFn = parseSetup
|
||||
// " mypy2 == v0.770", ' mypy3== v0.770', --> match(name=mypy2 version=v0.770), match(name=mypy3, version=v0.770)
|
||||
var pinnedDependency = regexp.MustCompile(`['"]\W?(\w+\W?==\W?[\w\.]*)`)
|
||||
|
||||
func parseSetup(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parseSetup(_ string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
packages := make([]pkg.Package, 0)
|
||||
|
||||
scanner := bufio.NewScanner(reader)
|
||||
@ -46,5 +47,5 @@ func parseSetup(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
}
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
return packages, nil, nil
|
||||
}
|
||||
|
||||
@ -8,6 +8,7 @@ import (
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
)
|
||||
@ -27,24 +28,26 @@ func (c *Cataloger) Name() string {
|
||||
}
|
||||
|
||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing rpm db installation.
|
||||
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, error) {
|
||||
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
fileMatches, err := resolver.FilesByGlob(pkg.RpmDBGlob)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find rpmdb's by glob: %w", err)
|
||||
return nil, nil, fmt.Errorf("failed to find rpmdb's by glob: %w", err)
|
||||
}
|
||||
|
||||
var pkgs []pkg.Package
|
||||
for _, location := range fileMatches {
|
||||
dbContentReader, err := resolver.FileContentsByLocation(location)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
pkgs, err = parseRpmDB(resolver, location, dbContentReader)
|
||||
discoveredPkgs, err := parseRpmDB(resolver, location, dbContentReader)
|
||||
internal.CloseAndLogError(dbContentReader, location.VirtualPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to catalog rpmdb package=%+v: %w", location.RealPath, err)
|
||||
return nil, nil, fmt.Errorf("unable to catalog rpmdb package=%+v: %w", location.RealPath, err)
|
||||
}
|
||||
|
||||
pkgs = append(pkgs, discoveredPkgs...)
|
||||
}
|
||||
return pkgs, nil
|
||||
return pkgs, nil, nil
|
||||
}
|
||||
|
||||
@ -6,6 +6,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/common"
|
||||
)
|
||||
@ -16,7 +17,7 @@ var _ common.ParserFn = parseGemFileLockEntries
|
||||
var sectionsOfInterest = internal.NewStringSetFromSlice([]string{"GEM"})
|
||||
|
||||
// parseGemFileLockEntries is a parser function for Gemfile.lock contents, returning all Gems discovered.
|
||||
func parseGemFileLockEntries(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parseGemFileLockEntries(_ string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
pkgs := make([]pkg.Package, 0)
|
||||
scanner := bufio.NewScanner(reader)
|
||||
|
||||
@ -49,9 +50,9 @@ func parseGemFileLockEntries(_ string, reader io.Reader) ([]pkg.Package, error)
|
||||
}
|
||||
}
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
return pkgs, nil
|
||||
return pkgs, nil, nil
|
||||
}
|
||||
|
||||
func isDependencyLine(line string) bool {
|
||||
|
||||
@ -12,6 +12,7 @@ import (
|
||||
|
||||
"github.com/mitchellh/mapstructure"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/common"
|
||||
)
|
||||
@ -60,7 +61,7 @@ func processList(s string) []string {
|
||||
return results
|
||||
}
|
||||
|
||||
func parseGemSpecEntries(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parseGemSpecEntries(_ string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
var pkgs []pkg.Package
|
||||
var fields = make(map[string]interface{})
|
||||
scanner := bufio.NewScanner(reader)
|
||||
@ -93,7 +94,7 @@ func parseGemSpecEntries(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
if fields["name"] != "" && fields["version"] != "" {
|
||||
var metadata pkg.GemMetadata
|
||||
if err := mapstructure.Decode(fields, &metadata); err != nil {
|
||||
return nil, fmt.Errorf("unable to decode gem metadata: %w", err)
|
||||
return nil, nil, fmt.Errorf("unable to decode gem metadata: %w", err)
|
||||
}
|
||||
|
||||
pkgs = append(pkgs, pkg.Package{
|
||||
@ -107,7 +108,7 @@ func parseGemSpecEntries(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
})
|
||||
}
|
||||
|
||||
return pkgs, nil
|
||||
return pkgs, nil, nil
|
||||
}
|
||||
|
||||
// renderUtf8 takes any string escaped string sub-sections from the ruby string and replaces those sections with the UTF8 runes.
|
||||
|
||||
@ -4,6 +4,7 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/common"
|
||||
"github.com/pelletier/go-toml"
|
||||
@ -13,17 +14,17 @@ import (
|
||||
var _ common.ParserFn = parseCargoLock
|
||||
|
||||
// parseCargoLock is a parser function for Cargo.lock contents, returning all rust cargo crates discovered.
|
||||
func parseCargoLock(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||
func parseCargoLock(_ string, reader io.Reader) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
tree, err := toml.LoadReader(reader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to load Cargo.lock for parsing: %v", err)
|
||||
return nil, nil, fmt.Errorf("unable to load Cargo.lock for parsing: %v", err)
|
||||
}
|
||||
|
||||
metadata := CargoMetadata{}
|
||||
err = tree.Unmarshal(&metadata)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to parse Cargo.lock: %v", err)
|
||||
return nil, nil, fmt.Errorf("unable to parse Cargo.lock: %v", err)
|
||||
}
|
||||
|
||||
return metadata.Pkgs(), nil
|
||||
return metadata.Pkgs(), nil, nil
|
||||
}
|
||||
|
||||
@ -15,7 +15,7 @@ import (
|
||||
// Package represents an application or library that has been bundled into a distributable format.
|
||||
// TODO: if we ignore FoundBy for ID generation should we merge the field to show it was found in two places?
|
||||
type Package struct {
|
||||
ID artifact.ID `hash:"ignore"` // uniquely identifies a package, set by the cataloger
|
||||
ID artifact.ID `hash:"ignore"` // uniquely identifies a package
|
||||
Name string // the package name
|
||||
Version string // the version of the package
|
||||
FoundBy string // the specific cataloger that discovered this package
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
package sbom
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/distro"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
@ -9,6 +10,7 @@ import (
|
||||
|
||||
type SBOM struct {
|
||||
Artifacts Artifacts
|
||||
Relationships []artifact.Relationship
|
||||
Source source.Metadata
|
||||
}
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user