[wip] api refactor

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
Alex Goodman 2022-03-11 21:11:59 -05:00
parent 003d28ad48
commit 3323ce2b6b
No known key found for this signature in database
GPG Key ID: 5CB45AE22BAB7EA7
84 changed files with 1047 additions and 769 deletions

View File

@ -188,26 +188,31 @@ func attestExec(ctx context.Context, _ *cobra.Command, args []string) error {
) )
} }
func attestationExecWorker(sourceInput source.Input, format sbom.Format, predicateType string, sv *sign.SignerVerifier) <-chan error { func attestationExecWorker(si source.Input, format sbom.Format, predicateType string, sv *sign.SignerVerifier) <-chan error {
errs := make(chan error) errs := make(chan error)
go func() { go func() {
defer close(errs) defer close(errs)
src, cleanup, err := source.NewFromRegistry(sourceInput, appConfig.Registry.ToOptions(), appConfig.Exclusions) src, cleanup, err := source.NewFromRegistry(si, appConfig.Registry.ToOptions(), appConfig.Exclusions)
if cleanup != nil { if cleanup != nil {
defer cleanup() defer cleanup()
} }
if err != nil { if err != nil {
errs <- fmt.Errorf("failed to construct source from user input %q: %w", sourceInput.UserInput, err) errs <- fmt.Errorf("failed to construct source from user input %q: %w", si.UserInput, err)
return return
} }
s, err := generateSBOM(src, errs) s, err := generateSBOM(src)
if err != nil { if err != nil {
errs <- err errs <- err
return return
} }
if s == nil {
errs <- fmt.Errorf("no SBOM produced for %q", si.UserInput)
return
}
sbomBytes, err := syft.Encode(*s, format) sbomBytes, err := syft.Encode(*s, format)
if err != nil { if err != nil {
errs <- err errs <- err

View File

@ -13,11 +13,8 @@ import (
"github.com/anchore/syft/internal/formats/table" "github.com/anchore/syft/internal/formats/table"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/ui" "github.com/anchore/syft/internal/ui"
"github.com/anchore/syft/internal/version"
"github.com/anchore/syft/syft" "github.com/anchore/syft/syft"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/event" "github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/pkg/cataloger"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/pkg/profile" "github.com/pkg/profile"
@ -97,7 +94,7 @@ func init() {
func setPackageFlags(flags *pflag.FlagSet) { func setPackageFlags(flags *pflag.FlagSet) {
// Formatting & Input options ////////////////////////////////////////////// // Formatting & Input options //////////////////////////////////////////////
flags.StringP( flags.StringP(
"scope", "s", cataloger.DefaultSearchConfig().Scope.String(), "scope", "s", syft.DefaultCatalogingConfig().Scope.String(),
fmt.Sprintf("selection of layers to catalog, options=%v", source.AllScopes)) fmt.Sprintf("selection of layers to catalog, options=%v", source.AllScopes))
flags.StringArrayP( flags.StringArrayP(
@ -260,35 +257,16 @@ func isVerbose() (result bool) {
return appConfig.CliOptions.Verbosity > 0 || isPipedInput return appConfig.CliOptions.Verbosity > 0 || isPipedInput
} }
func generateSBOM(src *source.Source, errs chan error) (*sbom.SBOM, error) { func generateSBOM(src *source.Source) (*sbom.SBOM, error) {
tasks, err := tasks() catalogingConfig, err := appConfig.ToCatalogingConfig()
if err != nil { if err != nil {
return nil, err return nil, err
} }
s := sbom.SBOM{ return syft.Catalog(src,
Source: src.Metadata, syft.WithConfig(*catalogingConfig),
Descriptor: sbom.Descriptor{ syft.WithDefaultPackages(appConfig.Package.ToConfig()),
Name: internal.ApplicationName, )
Version: version.FromBuild().Version,
Configuration: appConfig,
},
}
buildRelationships(&s, src, tasks, errs)
return &s, nil
}
func buildRelationships(s *sbom.SBOM, src *source.Source, tasks []task, errs chan error) {
var relationships []<-chan artifact.Relationship
for _, task := range tasks {
c := make(chan artifact.Relationship)
relationships = append(relationships, c)
go runTask(task, &s.Artifacts, src, c, errs)
}
s.Relationships = append(s.Relationships, mergeRelationships(relationships...)...)
} }
func packagesExecWorker(si source.Input, writer sbom.Writer) <-chan error { func packagesExecWorker(si source.Input, writer sbom.Writer) <-chan error {
@ -305,7 +283,7 @@ func packagesExecWorker(si source.Input, writer sbom.Writer) <-chan error {
return return
} }
s, err := generateSBOM(src, errs) s, err := generateSBOM(src)
if err != nil { if err != nil {
errs <- err errs <- err
return return
@ -313,6 +291,7 @@ func packagesExecWorker(si source.Input, writer sbom.Writer) <-chan error {
if s == nil { if s == nil {
errs <- fmt.Errorf("no SBOM produced for %q", si.UserInput) errs <- fmt.Errorf("no SBOM produced for %q", si.UserInput)
return
} }
if appConfig.Anchore.Host != "" { if appConfig.Anchore.Host != "" {
@ -330,16 +309,6 @@ func packagesExecWorker(si source.Input, writer sbom.Writer) <-chan error {
return errs return errs
} }
func mergeRelationships(cs ...<-chan artifact.Relationship) (relationships []artifact.Relationship) {
for _, c := range cs {
for n := range c {
relationships = append(relationships, n)
}
}
return relationships
}
func runPackageSbomUpload(src *source.Source, s sbom.SBOM) error { func runPackageSbomUpload(src *source.Source, s sbom.SBOM) error {
log.Infof("uploading results to %s", appConfig.Anchore.Host) log.Infof("uploading results to %s", appConfig.Anchore.Host)

View File

@ -10,8 +10,6 @@ import (
"github.com/anchore/syft/internal/formats/syftjson" "github.com/anchore/syft/internal/formats/syftjson"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/ui" "github.com/anchore/syft/internal/ui"
"github.com/anchore/syft/internal/version"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/event" "github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
@ -110,11 +108,6 @@ func powerUserExecWorker(userInput string, writer sbom.Writer) <-chan error {
appConfig.FileMetadata.Cataloger.Enabled = true appConfig.FileMetadata.Cataloger.Enabled = true
appConfig.FileContents.Cataloger.Enabled = true appConfig.FileContents.Cataloger.Enabled = true
appConfig.FileClassification.Cataloger.Enabled = true appConfig.FileClassification.Cataloger.Enabled = true
tasks, err := tasks()
if err != nil {
errs <- err
return
}
si, err := source.ParseInput(userInput, appConfig.Platform, true) si, err := source.ParseInput(userInput, appConfig.Platform, true)
if err != nil { if err != nil {
@ -131,28 +124,20 @@ func powerUserExecWorker(userInput string, writer sbom.Writer) <-chan error {
defer cleanup() defer cleanup()
} }
s := sbom.SBOM{ s, err := generateSBOM(src)
Source: src.Metadata, if err != nil {
Descriptor: sbom.Descriptor{ errs <- err
Name: internal.ApplicationName, return
Version: version.FromBuild().Version,
Configuration: appConfig,
},
} }
var relationships []<-chan artifact.Relationship if s == nil {
for _, task := range tasks { errs <- fmt.Errorf("no SBOM produced for %q", si.UserInput)
c := make(chan artifact.Relationship) return
relationships = append(relationships, c)
go runTask(task, &s.Artifacts, src, c, errs)
} }
s.Relationships = append(s.Relationships, mergeRelationships(relationships...)...)
bus.Publish(partybus.Event{ bus.Publish(partybus.Event{
Type: event.Exit, Type: event.Exit,
Value: func() error { return writer.Write(s) }, Value: func() error { return writer.Write(*s) },
}) })
}() }()

View File

@ -1,231 +0,0 @@
package cmd
import (
"crypto"
"fmt"
"github.com/anchore/syft/syft"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/sbom"
"github.com/anchore/syft/syft/source"
)
type task func(*sbom.Artifacts, *source.Source) ([]artifact.Relationship, error)
func tasks() ([]task, error) {
var tasks []task
generators := []func() (task, error){
generateCatalogPackagesTask,
generateCatalogFileMetadataTask,
generateCatalogFileDigestsTask,
generateCatalogSecretsTask,
generateCatalogFileClassificationsTask,
generateCatalogContentsTask,
}
for _, generator := range generators {
task, err := generator()
if err != nil {
return nil, err
}
if task != nil {
tasks = append(tasks, task)
}
}
return tasks, nil
}
func generateCatalogPackagesTask() (task, error) {
if !appConfig.Package.Cataloger.Enabled {
return nil, nil
}
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
packageCatalog, relationships, theDistro, err := syft.CatalogPackages(src, appConfig.Package.ToConfig())
if err != nil {
return nil, err
}
results.PackageCatalog = packageCatalog
results.LinuxDistribution = theDistro
return relationships, nil
}
return task, nil
}
func generateCatalogFileMetadataTask() (task, error) {
if !appConfig.FileMetadata.Cataloger.Enabled {
return nil, nil
}
metadataCataloger := file.NewMetadataCataloger()
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
if err != nil {
return nil, err
}
result, err := metadataCataloger.Catalog(resolver)
if err != nil {
return nil, err
}
results.FileMetadata = result
return nil, nil
}
return task, nil
}
func generateCatalogFileDigestsTask() (task, error) {
if !appConfig.FileMetadata.Cataloger.Enabled {
return nil, nil
}
supportedHashAlgorithms := make(map[string]crypto.Hash)
for _, h := range []crypto.Hash{
crypto.MD5,
crypto.SHA1,
crypto.SHA256,
} {
supportedHashAlgorithms[file.DigestAlgorithmName(h)] = h
}
var hashes []crypto.Hash
for _, hashStr := range appConfig.FileMetadata.Digests {
name := file.CleanDigestAlgorithmName(hashStr)
hashObj, ok := supportedHashAlgorithms[name]
if !ok {
return nil, fmt.Errorf("unsupported hash algorithm: %s", hashStr)
}
hashes = append(hashes, hashObj)
}
digestsCataloger, err := file.NewDigestsCataloger(hashes)
if err != nil {
return nil, err
}
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
if err != nil {
return nil, err
}
result, err := digestsCataloger.Catalog(resolver)
if err != nil {
return nil, err
}
results.FileDigests = result
return nil, nil
}
return task, nil
}
func generateCatalogSecretsTask() (task, error) {
if !appConfig.Secrets.Cataloger.Enabled {
return nil, nil
}
patterns, err := file.GenerateSearchPatterns(file.DefaultSecretsPatterns, appConfig.Secrets.AdditionalPatterns, appConfig.Secrets.ExcludePatternNames)
if err != nil {
return nil, err
}
secretsCataloger, err := file.NewSecretsCataloger(patterns, appConfig.Secrets.RevealValues, appConfig.Secrets.SkipFilesAboveSize)
if err != nil {
return nil, err
}
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(appConfig.Secrets.Cataloger.ScopeOpt)
if err != nil {
return nil, err
}
result, err := secretsCataloger.Catalog(resolver)
if err != nil {
return nil, err
}
results.Secrets = result
return nil, nil
}
return task, nil
}
func generateCatalogFileClassificationsTask() (task, error) {
if !appConfig.FileClassification.Cataloger.Enabled {
return nil, nil
}
// TODO: in the future we could expose out the classifiers via configuration
classifierCataloger, err := file.NewClassificationCataloger(file.DefaultClassifiers)
if err != nil {
return nil, err
}
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(appConfig.FileClassification.Cataloger.ScopeOpt)
if err != nil {
return nil, err
}
result, err := classifierCataloger.Catalog(resolver)
if err != nil {
return nil, err
}
results.FileClassifications = result
return nil, nil
}
return task, nil
}
func generateCatalogContentsTask() (task, error) {
if !appConfig.FileContents.Cataloger.Enabled {
return nil, nil
}
contentsCataloger, err := file.NewContentsCataloger(appConfig.FileContents.Globs, appConfig.FileContents.SkipFilesAboveSize)
if err != nil {
return nil, err
}
task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(appConfig.FileContents.Cataloger.ScopeOpt)
if err != nil {
return nil, err
}
result, err := contentsCataloger.Catalog(resolver)
if err != nil {
return nil, err
}
results.FileContents = result
return nil, nil
}
return task, nil
}
func runTask(t task, a *sbom.Artifacts, src *source.Source, c chan<- artifact.Relationship, errs chan<- error) {
defer close(c)
relationships, err := t(a, src)
if err != nil {
errs <- err
return
}
for _, relationship := range relationships {
c <- relationship
}
}

View File

@ -3,6 +3,9 @@ package config
import ( import (
"errors" "errors"
"fmt" "fmt"
"github.com/anchore/syft/internal/version"
"github.com/anchore/syft/syft"
"github.com/anchore/syft/syft/file"
"path" "path"
"reflect" "reflect"
"strings" "strings"
@ -49,6 +52,35 @@ type Application struct {
Platform string `yaml:"platform" json:"platform" mapstructure:"platform"` Platform string `yaml:"platform" json:"platform" mapstructure:"platform"`
} }
func (cfg Application) ToCatalogingConfig() (*syft.CatalogingConfig, error) {
digests, err := file.DigestHashesByName(cfg.FileMetadata.Digests...)
if err != nil {
return nil, fmt.Errorf("unable to parse config item 'file-metadata.digests': %w", err)
}
secretsConfig, err := cfg.Secrets.ToConfig()
if err != nil {
return nil, err
}
return &syft.CatalogingConfig{
// note: package catalogers cannot be determined until runtime
ToolName: internal.ApplicationName,
ToolVersion: version.FromBuild().Version,
ToolConfiguration: cfg,
Scope: cfg.Package.Cataloger.ScopeOpt,
ProcessTasksInSerial: false,
CaptureFileMetadata: cfg.FileMetadata.Cataloger.Enabled,
DigestHashes: digests,
CaptureSecrets: cfg.Secrets.Cataloger.Enabled,
SecretsConfig: *secretsConfig,
SecretsScope: cfg.Secrets.Cataloger.ScopeOpt,
ClassifyFiles: cfg.FileClassification.Cataloger.Enabled,
FileClassifiers: file.DefaultClassifiers(),
ContentsConfig: cfg.FileContents.ToConfig(),
}, nil
}
// PowerUserCatalogerEnabledDefault switches all catalogers to be enabled when running power-user command // PowerUserCatalogerEnabledDefault switches all catalogers to be enabled when running power-user command
func PowerUserCatalogerEnabledDefault() { func PowerUserCatalogerEnabledDefault() {
catalogerEnabledDefault = true catalogerEnabledDefault = true

View File

@ -1,7 +1,8 @@
package config package config
import ( import (
"github.com/anchore/syft/internal/file" internalFile "github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/spf13/viper" "github.com/spf13/viper"
) )
@ -15,10 +16,17 @@ type fileContents struct {
func (cfg fileContents) loadDefaultValues(v *viper.Viper) { func (cfg fileContents) loadDefaultValues(v *viper.Viper) {
v.SetDefault("file-contents.cataloger.enabled", catalogerEnabledDefault) v.SetDefault("file-contents.cataloger.enabled", catalogerEnabledDefault)
v.SetDefault("file-contents.cataloger.scope", source.SquashedScope) v.SetDefault("file-contents.cataloger.scope", source.SquashedScope)
v.SetDefault("file-contents.skip-files-above-size", 1*file.MB) v.SetDefault("file-contents.skip-files-above-size", 1*internalFile.MB)
v.SetDefault("file-contents.globs", []string{}) v.SetDefault("file-contents.globs", []string{})
} }
func (cfg *fileContents) parseConfigValues() error { func (cfg *fileContents) parseConfigValues() error {
return cfg.Cataloger.parseConfigValues() return cfg.Cataloger.parseConfigValues()
} }
func (cfg fileContents) ToConfig() file.ContentsCatalogerConfig {
return file.ContentsCatalogerConfig{
Globs: cfg.Globs,
SkipFilesAboveSizeInBytes: cfg.SkipFilesAboveSize,
}
}

View File

@ -1,7 +1,7 @@
package config package config
import ( import (
"github.com/anchore/syft/syft/pkg/cataloger" "github.com/anchore/syft/syft/pkg/cataloger/packages"
"github.com/spf13/viper" "github.com/spf13/viper"
) )
@ -13,7 +13,7 @@ type pkg struct {
func (cfg pkg) loadDefaultValues(v *viper.Viper) { func (cfg pkg) loadDefaultValues(v *viper.Viper) {
cfg.Cataloger.loadDefaultValues(v) cfg.Cataloger.loadDefaultValues(v)
c := cataloger.DefaultSearchConfig() c := packages.DefaultSearchConfig()
v.SetDefault("package.search-unindexed-archives", c.IncludeUnindexedArchives) v.SetDefault("package.search-unindexed-archives", c.IncludeUnindexedArchives)
v.SetDefault("package.search-indexed-archives", c.IncludeIndexedArchives) v.SetDefault("package.search-indexed-archives", c.IncludeIndexedArchives)
} }
@ -22,12 +22,9 @@ func (cfg *pkg) parseConfigValues() error {
return cfg.Cataloger.parseConfigValues() return cfg.Cataloger.parseConfigValues()
} }
func (cfg pkg) ToConfig() cataloger.Config { func (cfg pkg) ToConfig() packages.SearchConfig {
return cataloger.Config{ return packages.SearchConfig{
Search: cataloger.SearchConfig{
IncludeIndexedArchives: cfg.SearchIndexedArchives, IncludeIndexedArchives: cfg.SearchIndexedArchives,
IncludeUnindexedArchives: cfg.SearchUnindexedArchives, IncludeUnindexedArchives: cfg.SearchUnindexedArchives,
Scope: cfg.Cataloger.ScopeOpt,
},
} }
} }

View File

@ -1,7 +1,9 @@
package config package config
import ( import (
"github.com/anchore/syft/internal/file" "fmt"
internalFile "github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/spf13/viper" "github.com/spf13/viper"
) )
@ -18,7 +20,7 @@ func (cfg secrets) loadDefaultValues(v *viper.Viper) {
v.SetDefault("secrets.cataloger.enabled", catalogerEnabledDefault) v.SetDefault("secrets.cataloger.enabled", catalogerEnabledDefault)
v.SetDefault("secrets.cataloger.scope", source.AllLayersScope) v.SetDefault("secrets.cataloger.scope", source.AllLayersScope)
v.SetDefault("secrets.reveal-values", false) v.SetDefault("secrets.reveal-values", false)
v.SetDefault("secrets.skip-files-above-size", 1*file.MB) v.SetDefault("secrets.skip-files-above-size", 1*internalFile.MB)
v.SetDefault("secrets.additional-patterns", map[string]string{}) v.SetDefault("secrets.additional-patterns", map[string]string{})
v.SetDefault("secrets.exclude-pattern-names", []string{}) v.SetDefault("secrets.exclude-pattern-names", []string{})
} }
@ -26,3 +28,15 @@ func (cfg secrets) loadDefaultValues(v *viper.Viper) {
func (cfg *secrets) parseConfigValues() error { func (cfg *secrets) parseConfigValues() error {
return cfg.Cataloger.parseConfigValues() return cfg.Cataloger.parseConfigValues()
} }
func (cfg secrets) ToConfig() (*file.SecretsCatalogerConfig, error) {
patterns, err := file.GenerateSearchPatterns(file.DefaultSecretsPatterns, cfg.AdditionalPatterns, cfg.ExcludePatternNames)
if err != nil {
return nil, fmt.Errorf("unable to process secrets config patterns: %w", err)
}
return &file.SecretsCatalogerConfig{
Patterns: patterns,
RevealValues: cfg.RevealValues,
MaxFileSize: cfg.SkipFilesAboveSize,
}, nil
}

46
internal/version/guess.go Normal file
View File

@ -0,0 +1,46 @@
package version
import (
"github.com/anchore/syft/internal/log"
"runtime/debug"
"strings"
)
func Guess() string {
v := FromBuild().Version
if strings.HasPrefix(v, "v") {
return v
}
buildInfo, ok := debug.ReadBuildInfo()
if !ok {
log.Warn("syft version could not be determined: unable to find the buildinfo section of the binary")
return v
}
var found bool
for _, d := range buildInfo.Deps {
if d.Path == "github.com/anchore/syft" {
v = d.Version
found = true
break
}
}
if !found {
// look for probable forks
for _, d := range buildInfo.Deps {
if strings.HasSuffix(d.Path, "/syft") {
v = d.Version
found = true
break
}
}
}
if !found {
log.Warn("syft version could not be determined: unable to find syft within the buildinfo section of the binary")
}
return v
}

103
syft/catalog.go Normal file
View File

@ -0,0 +1,103 @@
package syft
import (
"fmt"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/sbom"
"github.com/anchore/syft/syft/source"
"github.com/hashicorp/go-multierror"
)
func Catalog(src *source.Source, options ...CatalogingOption) (*sbom.SBOM, error) {
var config = DefaultCatalogingConfig()
for _, optFn := range options {
if err := optFn(src, &config); err != nil {
return nil, fmt.Errorf("unable to apply cataloging option: %w", err)
}
}
var tasks []task
generators := []taskGenerator{
generateCatalogPackagesTask,
generateCatalogFileMetadataTask,
generateCatalogFileDigestsTask,
generateCatalogSecretsTask,
generateCatalogFileClassificationsTask,
generateCatalogContentsTask,
}
for _, generator := range generators {
t, err := generator(config)
if err != nil {
return nil, fmt.Errorf("unable to create cataloging task: %w", err)
}
if t != nil {
tasks = append(tasks, t)
}
}
s := sbom.SBOM{
Source: src.Metadata,
Descriptor: sbom.Descriptor{
Name: config.ToolName,
Version: config.ToolVersion,
Configuration: config.ToolConfiguration,
},
}
return &s, runTasks(&s, src, tasks, config.ProcessTasksInSerial)
}
func runTasks(s *sbom.SBOM, src *source.Source, tasks []task, serial bool) error {
var relationships []<-chan artifact.Relationship
var errs = make(chan error)
for _, t := range tasks {
r := make(chan artifact.Relationship)
relationships = append(relationships, r)
if serial {
runTask(t, &s.Artifacts, src, r, errs)
} else {
go runTask(t, &s.Artifacts, src, r, errs)
}
}
s.Relationships = append(s.Relationships, mergeRelationships(relationships...)...)
close(errs)
return mergeErrors(errs)
}
func mergeRelationships(cs ...<-chan artifact.Relationship) (relationships []artifact.Relationship) {
for _, c := range cs {
for n := range c {
relationships = append(relationships, n)
}
}
return relationships
}
func mergeErrors(errs <-chan error) (allErrs error) {
for err := range errs {
if err != nil {
allErrs = multierror.Append(allErrs, err)
}
}
return allErrs
}
func runTask(t task, a *sbom.Artifacts, src *source.Source, r chan<- artifact.Relationship, errs chan<- error) {
defer close(r)
relationships, err := t(a, src)
if err != nil {
errs <- err
return
}
for _, relationship := range relationships {
r <- relationship
}
}

154
syft/cataloging_option.go Normal file
View File

@ -0,0 +1,154 @@
package syft
import (
"crypto"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/version"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/packages"
"github.com/anchore/syft/syft/source"
)
type CatalogingConfig struct {
// tool-specific information
ToolName string
ToolVersion string
ToolConfiguration interface{}
// applies to all catalogers
Scope source.Scope
ProcessTasksInSerial bool
// package
PackageCatalogers []pkg.Cataloger
// file metadata
CaptureFileMetadata bool
DigestHashes []crypto.Hash
// secrets
CaptureSecrets bool
SecretsConfig file.SecretsCatalogerConfig
SecretsScope source.Scope
// file classification
ClassifyFiles bool
FileClassifiers []file.Classifier
// file contents
ContentsConfig file.ContentsCatalogerConfig
}
func DefaultCatalogingConfig() CatalogingConfig {
return CatalogingConfig{
Scope: source.SquashedScope,
ToolName: internal.ApplicationName,
ToolVersion: version.Guess(),
SecretsScope: source.AllLayersScope,
SecretsConfig: file.DefaultSecretsCatalogerConfig(),
FileClassifiers: file.DefaultClassifiers(),
ContentsConfig: file.DefaultContentsCatalogerConfig(),
}
}
type CatalogingOption func(*source.Source, *CatalogingConfig) error
func WithConfig(override CatalogingConfig) CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
*config = override
return nil
}
}
func WithoutConcurrency() CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.ProcessTasksInSerial = true
return nil
}
}
func WithScope(scope source.Scope) CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.Scope = scope
return nil
}
}
func WithToolIdentification(name, version string) CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.ToolName = name
config.ToolVersion = version
return nil
}
}
func WithToolConfiguration(c interface{}) CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.ToolConfiguration = c
return nil
}
}
func WithPackageCatalogers(catalogers ...pkg.Cataloger) CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.PackageCatalogers = catalogers
return nil
}
}
func WithDefaultPackages(cfg packages.SearchConfig) CatalogingOption {
return func(src *source.Source, config *CatalogingConfig) error {
config.PackageCatalogers = packages.CatalogersBySourceScheme(src.Metadata.Scheme, cfg)
return nil
}
}
func WithFileMetadata() CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.CaptureFileMetadata = true
return nil
}
}
func WithFileDigests(hashes ...crypto.Hash) CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.DigestHashes = hashes
return nil
}
}
func WithSecrets(secretConfig *file.SecretsCatalogerConfig) CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.CaptureSecrets = true
if secretConfig != nil {
config.SecretsConfig = *secretConfig
}
return nil
}
}
func WithFileClassification() CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.ClassifyFiles = true
return nil
}
}
func WithFileClassifiers(classifiers ...file.Classifier) CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.ClassifyFiles = !(len(classifiers) > 0)
config.FileClassifiers = classifiers
return nil
}
}
func WithFileContents(globs ...string) CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.ContentsConfig.Globs = globs
return nil
}
}
func WithFileSizeLimit(byteLimit int64) CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error {
config.ContentsConfig.SkipFilesAboveSizeInBytes = byteLimit
config.SecretsConfig.MaxFileSize = byteLimit
return nil
}
}

View File

@ -0,0 +1,28 @@
package monitor
import (
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/syft/event"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
)
func FileDigesterMonitor(locations int64) (*progress.Stage, *progress.Manual) {
stage := &progress.Stage{}
prog := &progress.Manual{
Total: locations,
}
bus.Publish(partybus.Event{
Type: event.FileDigestsCatalogerStarted,
Value: struct {
progress.Stager
progress.Progressable
}{
Stager: progress.Stager(stage),
Progressable: prog,
},
})
return stage, prog
}

View File

@ -0,0 +1,29 @@
package monitor
import (
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/syft/event"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
)
// PackageCatalogerMonitor provides progress-related data for observing the progress of a Catalog() call (published on the event bus).
type PackageCatalogerMonitor struct {
FilesProcessed progress.Monitorable // the number of files selected and contents analyzed from all registered catalogers
PackagesDiscovered progress.Monitorable // the number of packages discovered from all registered catalogers
}
// NewPackageCatalogerMonitor creates a new PackageCatalogerMonitor object and publishes the object on the bus as a PackageCatalogerStarted event.
func NewPackageCatalogerMonitor() (*progress.Manual, *progress.Manual) {
filesProcessed := progress.Manual{}
packagesDiscovered := progress.Manual{}
bus.Publish(partybus.Event{
Type: event.PackageCatalogerStarted,
Value: PackageCatalogerMonitor{
FilesProcessed: progress.Monitorable(&filesProcessed),
PackagesDiscovered: progress.Monitorable(&packagesDiscovered),
},
})
return &filesProcessed, &packagesDiscovered
}

View File

@ -0,0 +1,34 @@
package monitor
import (
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/syft/event"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
)
type SecretsCatalogerMonitor struct {
progress.Stager
SecretsDiscovered progress.Monitorable
progress.Progressable
}
func NewSecretsCatalogerMonitor(locations int64) (*progress.Stage, *progress.Manual, *progress.Manual) {
stage := &progress.Stage{}
secretsDiscovered := &progress.Manual{}
prog := &progress.Manual{
Total: locations,
}
bus.Publish(partybus.Event{
Type: event.SecretsCatalogerStarted,
Source: secretsDiscovered,
Value: SecretsCatalogerMonitor{
Stager: progress.Stager(stage),
SecretsDiscovered: secretsDiscovered,
Progressable: prog,
},
})
return stage, prog, secretsDiscovered
}

View File

@ -5,10 +5,9 @@ package parsers
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/syft/event/monitor"
"github.com/anchore/syft/syft/event" "github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg/cataloger"
"github.com/wagoodman/go-partybus" "github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress" "github.com/wagoodman/go-progress"
) )
@ -38,12 +37,12 @@ func checkEventType(actual, expected partybus.EventType) error {
return nil return nil
} }
func ParsePackageCatalogerStarted(e partybus.Event) (*cataloger.Monitor, error) { func ParsePackageCatalogerStarted(e partybus.Event) (*monitor.PackageCatalogerMonitor, error) {
if err := checkEventType(e.Type, event.PackageCatalogerStarted); err != nil { if err := checkEventType(e.Type, event.PackageCatalogerStarted); err != nil {
return nil, err return nil, err
} }
monitor, ok := e.Value.(cataloger.Monitor) monitor, ok := e.Value.(monitor.PackageCatalogerMonitor)
if !ok { if !ok {
return nil, newPayloadErr(e.Type, "Value", e.Value) return nil, newPayloadErr(e.Type, "Value", e.Value)
} }
@ -51,12 +50,12 @@ func ParsePackageCatalogerStarted(e partybus.Event) (*cataloger.Monitor, error)
return &monitor, nil return &monitor, nil
} }
func ParseSecretsCatalogingStarted(e partybus.Event) (*file.SecretsMonitor, error) { func ParseSecretsCatalogingStarted(e partybus.Event) (*monitor.SecretsCatalogerMonitor, error) {
if err := checkEventType(e.Type, event.SecretsCatalogerStarted); err != nil { if err := checkEventType(e.Type, event.SecretsCatalogerStarted); err != nil {
return nil, err return nil, err
} }
monitor, ok := e.Value.(file.SecretsMonitor) monitor, ok := e.Value.(monitor.SecretsCatalogerMonitor)
if !ok { if !ok {
return nil, newPayloadErr(e.Type, "Value", e.Value) return nil, newPayloadErr(e.Type, "Value", e.Value)
} }

View File

@ -11,7 +11,8 @@ import (
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
var DefaultClassifiers = []Classifier{ func DefaultClassifiers() []Classifier {
return []Classifier{
{ {
Class: "python-binary", Class: "python-binary",
FilepathPatterns: []*regexp.Regexp{ FilepathPatterns: []*regexp.Regexp{
@ -59,6 +60,7 @@ var DefaultClassifiers = []Classifier{
}, },
}, },
} }
}
type Classifier struct { type Classifier struct {
Class string Class string

View File

@ -4,6 +4,7 @@ import (
"bytes" "bytes"
"encoding/base64" "encoding/base64"
"fmt" "fmt"
"github.com/anchore/syft/internal/file"
"io" "io"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
@ -12,15 +13,25 @@ import (
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
type ContentsCataloger struct { type ContentsCatalogerConfig struct {
globs []string Globs []string
skipFilesAboveSizeInBytes int64 SkipFilesAboveSizeInBytes int64
} }
func NewContentsCataloger(globs []string, skipFilesAboveSize int64) (*ContentsCataloger, error) { type ContentsCataloger struct {
config ContentsCatalogerConfig
}
func DefaultContentsCatalogerConfig() ContentsCatalogerConfig {
return ContentsCatalogerConfig{
Globs: nil,
SkipFilesAboveSizeInBytes: 1 * file.MB,
}
}
func NewContentsCataloger(config ContentsCatalogerConfig) (*ContentsCataloger, error) {
return &ContentsCataloger{ return &ContentsCataloger{
globs: globs, config: config,
skipFilesAboveSizeInBytes: skipFilesAboveSize,
}, nil }, nil
} }
@ -28,7 +39,7 @@ func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Co
results := make(map[source.Coordinates]string) results := make(map[source.Coordinates]string)
var locations []source.Location var locations []source.Location
locations, err := resolver.FilesByGlob(i.globs...) locations, err := resolver.FilesByGlob(i.config.Globs...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -38,7 +49,7 @@ func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Co
return nil, err return nil, err
} }
if i.skipFilesAboveSizeInBytes > 0 && metadata.Size > i.skipFilesAboveSizeInBytes { if i.config.SkipFilesAboveSizeInBytes > 0 && metadata.Size > i.config.SkipFilesAboveSizeInBytes {
continue continue
} }

View File

@ -1,6 +1,50 @@
package file package file
import (
"crypto"
"fmt"
"strings"
)
type Digest struct { type Digest struct {
Algorithm string `json:"algorithm"` Algorithm string `json:"algorithm"`
Value string `json:"value"` Value string `json:"value"`
} }
func DigestAlgorithmName(hash crypto.Hash) string {
return CleanDigestAlgorithmName(hash.String())
}
func CleanDigestAlgorithmName(name string) string {
lower := strings.ToLower(name)
return strings.ReplaceAll(lower, "-", "")
}
func DigestHashesByName(digestAlgorithms ...string) ([]crypto.Hash, error) {
supportedHashAlgorithms := make(map[string]crypto.Hash)
for _, h := range []crypto.Hash{
crypto.MD5,
crypto.SHA1,
crypto.SHA256,
crypto.SHA512,
crypto.BLAKE2b_256,
crypto.BLAKE2s_256,
crypto.BLAKE2b_512,
crypto.RIPEMD160,
crypto.SHA3_256,
crypto.SHA3_512,
} {
supportedHashAlgorithms[DigestAlgorithmName(h)] = h
}
var hashes []crypto.Hash
for _, hashStr := range digestAlgorithms {
name := CleanDigestAlgorithmName(hashStr)
hashObj, ok := supportedHashAlgorithms[name]
if !ok {
return nil, fmt.Errorf("unsupported hash algorithm: %s", hashStr)
}
hashes = append(hashes, hashObj)
}
return hashes, nil
}

View File

@ -4,19 +4,13 @@ import (
"crypto" "crypto"
"errors" "errors"
"fmt" "fmt"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/event/monitor"
"hash" "hash"
"io" "io"
"strings"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/syft/event"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
@ -35,7 +29,7 @@ func NewDigestsCataloger(hashes []crypto.Hash) (*DigestsCataloger, error) {
func (i *DigestsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]Digest, error) { func (i *DigestsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]Digest, error) {
results := make(map[source.Coordinates][]Digest) results := make(map[source.Coordinates][]Digest)
locations := allRegularFiles(resolver) locations := allRegularFiles(resolver)
stage, prog := digestsCatalogingProgress(int64(len(locations))) stage, prog := monitor.FileDigesterMonitor(int64(len(locations)))
for _, location := range locations { for _, location := range locations {
stage.Current = location.RealPath stage.Current = location.RealPath
result, err := i.catalogLocation(resolver, location) result, err := i.catalogLocation(resolver, location)
@ -107,32 +101,3 @@ func (i *DigestsCataloger) catalogLocation(resolver source.FileResolver, locatio
return result, nil return result, nil
} }
func DigestAlgorithmName(hash crypto.Hash) string {
return CleanDigestAlgorithmName(hash.String())
}
func CleanDigestAlgorithmName(name string) string {
lower := strings.ToLower(name)
return strings.ReplaceAll(lower, "-", "")
}
func digestsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manual) {
stage := &progress.Stage{}
prog := &progress.Manual{
Total: locations,
}
bus.Publish(partybus.Event{
Type: event.FileDigestsCatalogerStarted,
Value: struct {
progress.Stager
progress.Progressable
}{
Stager: progress.Stager(stage),
Progressable: prog,
},
})
return stage, prog
}

View File

@ -31,9 +31,11 @@ func GenerateSearchPatterns(basePatterns map[string]string, additionalPatterns m
} }
// add all additional cases // add all additional cases
if additionalPatterns != nil {
for name, pattern := range additionalPatterns { for name, pattern := range additionalPatterns {
addFn(name, pattern) addFn(name, pattern)
} }
}
if errs != nil { if errs != nil {
return nil, errs return nil, errs

View File

@ -3,6 +3,8 @@ package file
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/event/monitor"
"io" "io"
"io/ioutil" "io/ioutil"
"regexp" "regexp"
@ -10,12 +12,8 @@ import (
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
) )
var DefaultSecretsPatterns = map[string]string{ var DefaultSecretsPatterns = map[string]string{
@ -26,24 +24,39 @@ var DefaultSecretsPatterns = map[string]string{
"generic-api-key": `(?i)api(-|_)?key["'=:\s]*?(?P<value>[A-Z0-9]{20,60})["']?(\s|$)`, "generic-api-key": `(?i)api(-|_)?key["'=:\s]*?(?P<value>[A-Z0-9]{20,60})["']?(\s|$)`,
} }
type SecretsCataloger struct { type SecretsCatalogerConfig struct {
patterns map[string]*regexp.Regexp Patterns map[string]*regexp.Regexp
revealValues bool RevealValues bool
skipFilesAboveSize int64 MaxFileSize int64
} }
func NewSecretsCataloger(patterns map[string]*regexp.Regexp, revealValues bool, maxFileSize int64) (*SecretsCataloger, error) { type SecretsCataloger struct {
config SecretsCatalogerConfig
}
func DefaultSecretsCatalogerConfig() SecretsCatalogerConfig {
patterns, err := GenerateSearchPatterns(DefaultSecretsPatterns, nil, nil)
if err != nil {
patterns = make(map[string]*regexp.Regexp)
log.Errorf("unable to create default secrets config: %w", err)
}
return SecretsCatalogerConfig{
Patterns: patterns,
RevealValues: false,
MaxFileSize: 1 * file.MB,
}
}
func NewSecretsCataloger(config SecretsCatalogerConfig) (*SecretsCataloger, error) {
return &SecretsCataloger{ return &SecretsCataloger{
patterns: patterns, config: config,
revealValues: revealValues,
skipFilesAboveSize: maxFileSize,
}, nil }, nil
} }
func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]SearchResult, error) { func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]SearchResult, error) {
results := make(map[source.Coordinates][]SearchResult) results := make(map[source.Coordinates][]SearchResult)
locations := allRegularFiles(resolver) locations := allRegularFiles(resolver)
stage, prog, secretsDiscovered := secretsCatalogingProgress(int64(len(locations))) stage, prog, secretsDiscovered := monitor.NewSecretsCatalogerMonitor(int64(len(locations)))
for _, location := range locations { for _, location := range locations {
stage.Current = location.RealPath stage.Current = location.RealPath
result, err := i.catalogLocation(resolver, location) result, err := i.catalogLocation(resolver, location)
@ -76,17 +89,17 @@ func (i *SecretsCataloger) catalogLocation(resolver source.FileResolver, locatio
return nil, nil return nil, nil
} }
if i.skipFilesAboveSize > 0 && metadata.Size > i.skipFilesAboveSize { if i.config.MaxFileSize > 0 && metadata.Size > i.config.MaxFileSize {
return nil, nil return nil, nil
} }
// TODO: in the future we can swap out search strategies here // TODO: in the future we can swap out search strategies here
secrets, err := catalogLocationByLine(resolver, location, i.patterns) secrets, err := catalogLocationByLine(resolver, location, i.config.Patterns)
if err != nil { if err != nil {
return nil, internal.ErrPath{Context: "secrets-cataloger", Path: location.RealPath, Err: err} return nil, internal.ErrPath{Context: "secrets-cataloger", Path: location.RealPath, Err: err}
} }
if i.revealValues { if i.config.RevealValues {
for idx, secret := range secrets { for idx, secret := range secrets {
value, err := extractValue(resolver, location, secret.SeekPosition, secret.Length) value, err := extractValue(resolver, location, secret.SeekPosition, secret.Length)
if err != nil { if err != nil {
@ -130,29 +143,3 @@ func extractValue(resolver source.FileResolver, location source.Location, start,
return buf.String(), nil return buf.String(), nil
} }
type SecretsMonitor struct {
progress.Stager
SecretsDiscovered progress.Monitorable
progress.Progressable
}
func secretsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manual, *progress.Manual) {
stage := &progress.Stage{}
secretsDiscovered := &progress.Manual{}
prog := &progress.Manual{
Total: locations,
}
bus.Publish(partybus.Event{
Type: event.SecretsCatalogerStarted,
Source: secretsDiscovered,
Value: SecretsMonitor{
Stager: progress.Stager(stage),
SecretsDiscovered: secretsDiscovered,
Progressable: prog,
},
})
return stage, prog, secretsDiscovered
}

View File

@ -17,61 +17,12 @@ Similar to the cataloging process, Linux distribution identification is also per
package syft package syft
import ( import (
"fmt"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/linux"
"github.com/anchore/syft/syft/logger" "github.com/anchore/syft/syft/logger"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger"
"github.com/anchore/syft/syft/source"
"github.com/wagoodman/go-partybus" "github.com/wagoodman/go-partybus"
) )
// CatalogPackages takes an inventory of packages from the given image from a particular perspective
// (e.g. squashed source, all-layers source). Returns the discovered set of packages, the identified Linux
// distribution, and the source object used to wrap the data source.
func CatalogPackages(src *source.Source, cfg cataloger.Config) (*pkg.Catalog, []artifact.Relationship, *linux.Release, error) {
resolver, err := src.FileResolver(cfg.Search.Scope)
if err != nil {
return nil, nil, nil, fmt.Errorf("unable to determine resolver while cataloging packages: %w", err)
}
// find the distro
release := linux.IdentifyRelease(resolver)
if release != nil {
log.Infof("identified distro: %s", release.String())
} else {
log.Info("could not identify distro")
}
// conditionally use the correct set of loggers based on the input type (container image or directory)
var catalogers []cataloger.Cataloger
switch src.Metadata.Scheme {
case source.ImageScheme:
log.Info("cataloging image")
catalogers = cataloger.ImageCatalogers(cfg)
case source.FileScheme:
log.Info("cataloging file")
catalogers = cataloger.AllCatalogers(cfg)
case source.DirectoryScheme:
log.Info("cataloging directory")
catalogers = cataloger.DirectoryCatalogers(cfg)
default:
return nil, nil, nil, fmt.Errorf("unable to determine cataloger set from scheme=%+v", src.Metadata.Scheme)
}
catalog, relationships, err := cataloger.Catalog(resolver, release, catalogers...)
if err != nil {
return nil, nil, nil, err
}
return catalog, relationships, release, nil
}
// SetLogger sets the logger object used for all syft logging calls. // SetLogger sets the logger object used for all syft logging calls.
func SetLogger(logger logger.Logger) { func SetLogger(logger logger.Logger) {
log.Log = logger log.Log = logger

16
syft/pkg/cataloger.go Normal file
View File

@ -0,0 +1,16 @@
package pkg
import (
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/source"
)
// Cataloger describes behavior for an object to participate in parsing container image or file system
// contents for the purpose of discovering Packages. Each concrete implementation should focus on discovering Packages
// for a specific Package Type or ecosystem.
type Cataloger interface {
// Name returns a string that uniquely describes a cataloger
Name() string
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
Catalog(resolver source.FileResolver) ([]Package, []artifact.Relationship, error)
}

View File

@ -5,14 +5,14 @@ package apkdb
import ( import (
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
// NewApkdbCataloger returns a new Alpine DB cataloger object. // NewApkdbCataloger returns a new Alpine DB cataloger object.
func NewApkdbCataloger() *common.GenericCataloger { func NewApkdbCataloger() *generic.Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]generic.Parser{
pkg.ApkDBGlob: parseApkDB, pkg.ApkDBGlob: parseApkDB,
} }
return common.NewGenericCataloger(nil, globParsers, "apkdb-cataloger") return generic.NewCataloger(nil, globParsers, "apkdb-cataloger")
} }

View File

@ -3,6 +3,7 @@ package apkdb
import ( import (
"bufio" "bufio"
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"path" "path"
"strconv" "strconv"
@ -14,12 +15,11 @@ import (
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
) )
// integrity check // integrity check
var _ common.ParserFn = parseApkDB var _ generic.Parser = parseApkDB
func newApkDBPackage(d *pkg.ApkMetadata) *pkg.Package { func newApkDBPackage(d *pkg.ApkMetadata) *pkg.Package {
return &pkg.Package{ return &pkg.Package{

View File

@ -1,84 +0,0 @@
/*
Package cataloger provides the ability to process files from a container image or file system and discover packages
(gems, wheels, jars, rpms, debs, etc). Specifically, this package contains both a catalog function to utilize all
catalogers defined in child packages as well as the interface definition to implement a cataloger.
*/
package cataloger
import (
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/apkdb"
"github.com/anchore/syft/syft/pkg/cataloger/deb"
"github.com/anchore/syft/syft/pkg/cataloger/golang"
"github.com/anchore/syft/syft/pkg/cataloger/java"
"github.com/anchore/syft/syft/pkg/cataloger/javascript"
"github.com/anchore/syft/syft/pkg/cataloger/php"
"github.com/anchore/syft/syft/pkg/cataloger/python"
"github.com/anchore/syft/syft/pkg/cataloger/rpmdb"
"github.com/anchore/syft/syft/pkg/cataloger/ruby"
"github.com/anchore/syft/syft/pkg/cataloger/rust"
"github.com/anchore/syft/syft/source"
)
// Cataloger describes behavior for an object to participate in parsing container image or file system
// contents for the purpose of discovering Packages. Each concrete implementation should focus on discovering Packages
// for a specific Package Type or ecosystem.
type Cataloger interface {
// Name returns a string that uniquely describes a cataloger
Name() string
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error)
}
// ImageCatalogers returns a slice of locally implemented catalogers that are fit for detecting installations of packages.
func ImageCatalogers(cfg Config) []Cataloger {
return []Cataloger{
ruby.NewGemSpecCataloger(),
python.NewPythonPackageCataloger(),
php.NewPHPComposerInstalledCataloger(),
javascript.NewJavascriptPackageCataloger(),
deb.NewDpkgdbCataloger(),
rpmdb.NewRpmdbCataloger(),
java.NewJavaCataloger(cfg.Java()),
apkdb.NewApkdbCataloger(),
golang.NewGoModuleBinaryCataloger(),
}
}
// DirectoryCatalogers returns a slice of locally implemented catalogers that are fit for detecting packages from index files (and select installations)
func DirectoryCatalogers(cfg Config) []Cataloger {
return []Cataloger{
ruby.NewGemFileLockCataloger(),
python.NewPythonIndexCataloger(),
python.NewPythonPackageCataloger(),
php.NewPHPComposerLockCataloger(),
javascript.NewJavascriptLockCataloger(),
deb.NewDpkgdbCataloger(),
rpmdb.NewRpmdbCataloger(),
java.NewJavaCataloger(cfg.Java()),
apkdb.NewApkdbCataloger(),
golang.NewGoModuleBinaryCataloger(),
golang.NewGoModFileCataloger(),
rust.NewCargoLockCataloger(),
}
}
// AllCatalogers returns all implemented catalogers
func AllCatalogers(cfg Config) []Cataloger {
return []Cataloger{
ruby.NewGemFileLockCataloger(),
ruby.NewGemSpecCataloger(),
python.NewPythonIndexCataloger(),
python.NewPythonPackageCataloger(),
javascript.NewJavascriptLockCataloger(),
javascript.NewJavascriptPackageCataloger(),
deb.NewDpkgdbCataloger(),
rpmdb.NewRpmdbCataloger(),
java.NewJavaCataloger(cfg.Java()),
apkdb.NewApkdbCataloger(),
golang.NewGoModuleBinaryCataloger(),
golang.NewGoModFileCataloger(),
rust.NewCargoLockCataloger(),
}
}

View File

@ -1,11 +0,0 @@
package common
import (
"io"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg"
)
// ParserFn standardizes a function signature for parser functions that accept the virtual file path (not usable for file reads) and contents and return any discovered packages from that file
type ParserFn func(string, io.Reader) ([]*pkg.Package, []artifact.Relationship, error)

View File

@ -1,22 +0,0 @@
package cataloger
import (
"github.com/anchore/syft/syft/pkg/cataloger/java"
)
type Config struct {
Search SearchConfig
}
func DefaultConfig() Config {
return Config{
Search: DefaultSearchConfig(),
}
}
func (c Config) Java() java.Config {
return java.Config{
SearchUnindexedArchives: c.Search.IncludeUnindexedArchives,
SearchIndexedArchives: c.Search.IncludeIndexedArchives,
}
}

View File

@ -1,7 +1,7 @@
/* /*
Package common provides generic utilities used by multiple catalogers. Package generic provides utilities used by multiple package catalogers.
*/ */
package common package generic
import ( import (
"fmt" "fmt"
@ -14,17 +14,17 @@ import (
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
// GenericCataloger implements the Catalog interface and is responsible for dispatching the proper parser function for // Cataloger implements the Catalog interface and is responsible for dispatching the proper parser function for
// a given path or glob pattern. This is intended to be reusable across many package cataloger types. // a given path or glob pattern. This is intended to be reusable across many package cataloger types.
type GenericCataloger struct { type Cataloger struct {
globParsers map[string]ParserFn globParsers map[string]Parser
pathParsers map[string]ParserFn pathParsers map[string]Parser
upstreamCataloger string upstreamCataloger string
} }
// NewGenericCataloger if provided path-to-parser-function and glob-to-parser-function lookups creates a GenericCataloger // NewCataloger if provided path-to-parser-function and glob-to-parser-function lookups creates a Cataloger
func NewGenericCataloger(pathParsers map[string]ParserFn, globParsers map[string]ParserFn, upstreamCataloger string) *GenericCataloger { func NewCataloger(pathParsers map[string]Parser, globParsers map[string]Parser, upstreamCataloger string) *Cataloger {
return &GenericCataloger{ return &Cataloger{
globParsers: globParsers, globParsers: globParsers,
pathParsers: pathParsers, pathParsers: pathParsers,
upstreamCataloger: upstreamCataloger, upstreamCataloger: upstreamCataloger,
@ -32,12 +32,12 @@ func NewGenericCataloger(pathParsers map[string]ParserFn, globParsers map[string
} }
// Name returns a string that uniquely describes the upstream cataloger that this Generic Cataloger represents. // Name returns a string that uniquely describes the upstream cataloger that this Generic Cataloger represents.
func (c *GenericCataloger) Name() string { func (c *Cataloger) Name() string {
return c.upstreamCataloger return c.upstreamCataloger
} }
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source. // Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
func (c *GenericCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
var packages []pkg.Package var packages []pkg.Package
var relationships []artifact.Relationship var relationships []artifact.Relationship
@ -70,8 +70,8 @@ func (c *GenericCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package,
} }
// SelectFiles takes a set of file trees and resolves and file references of interest for future cataloging // SelectFiles takes a set of file trees and resolves and file references of interest for future cataloging
func (c *GenericCataloger) selectFiles(resolver source.FilePathResolver) map[source.Location]ParserFn { func (c *Cataloger) selectFiles(resolver source.FilePathResolver) map[source.Location]Parser {
var parserByLocation = make(map[source.Location]ParserFn) var parserByLocation = make(map[source.Location]Parser)
// select by exact path // select by exact path
for path, parser := range c.pathParsers { for path, parser := range c.pathParsers {

View File

@ -1,4 +1,4 @@
package common package generic
import ( import (
"fmt" "fmt"
@ -27,10 +27,10 @@ func parser(_ string, reader io.Reader) ([]*pkg.Package, []artifact.Relationship
func TestGenericCataloger(t *testing.T) { func TestGenericCataloger(t *testing.T) {
globParsers := map[string]ParserFn{ globParsers := map[string]Parser{
"**/a-path.txt": parser, "**/a-path.txt": parser,
} }
pathParsers := map[string]ParserFn{ pathParsers := map[string]Parser{
"test-fixtures/another-path.txt": parser, "test-fixtures/another-path.txt": parser,
"test-fixtures/last/path.txt": parser, "test-fixtures/last/path.txt": parser,
} }
@ -38,7 +38,7 @@ func TestGenericCataloger(t *testing.T) {
expectedSelection := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"} expectedSelection := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"}
resolver := source.NewMockResolverForPaths(expectedSelection...) resolver := source.NewMockResolverForPaths(expectedSelection...)
cataloger := NewGenericCataloger(pathParsers, globParsers, upstream) cataloger := NewCataloger(pathParsers, globParsers, upstream)
expectedPkgs := make(map[string]pkg.Package) expectedPkgs := make(map[string]pkg.Package)
for _, path := range expectedSelection { for _, path := range expectedSelection {

View File

@ -0,0 +1,11 @@
package generic
import (
"github.com/anchore/syft/syft/pkg"
"io"
"github.com/anchore/syft/syft/artifact"
)
// Parser standardizes a function signature for parser functions that accept the virtual file path (not usable for file reads) and contents and return any discovered packages from that file
type Parser func(string, io.Reader) ([]*pkg.Package, []artifact.Relationship, error)

View File

@ -4,14 +4,14 @@ Package golang provides a concrete Cataloger implementation for go.mod files.
package golang package golang
import ( import (
"github.com/anchore/syft/syft/pkg/cataloger/common" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
// NewGoModFileCataloger returns a new Go module cataloger object. // NewGoModFileCataloger returns a new Go module cataloger object.
func NewGoModFileCataloger() *common.GenericCataloger { func NewGoModFileCataloger() *generic.Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]generic.Parser{
"**/go.mod": parseGoMod, "**/go.mod": parseGoMod,
} }
return common.NewGenericCataloger(nil, globParsers, "go-mod-file-cataloger") return generic.NewCataloger(nil, globParsers, "go-mod-file-cataloger")
} }

View File

@ -2,6 +2,7 @@ package java
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"path" "path"
"strings" "strings"
@ -10,11 +11,10 @@ import (
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
// integrity check // integrity check
var _ common.ParserFn = parseJavaArchive var _ generic.Parser = parseJavaArchive
var archiveFormatGlobs = []string{ var archiveFormatGlobs = []string{
"**/*.jar", "**/*.jar",

View File

@ -4,12 +4,12 @@ Package java provides a concrete Cataloger implementation for Java archives (jar
package java package java
import ( import (
"github.com/anchore/syft/syft/pkg/cataloger/common" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
// NewJavaCataloger returns a new Java archive cataloger object. // NewJavaCataloger returns a new Java archive cataloger object.
func NewJavaCataloger(cfg Config) *common.GenericCataloger { func NewJavaCataloger(cfg CatalogerConfig) *generic.Cataloger {
globParsers := make(map[string]common.ParserFn) globParsers := make(map[string]generic.Parser)
// java archive formats // java archive formats
for _, pattern := range archiveFormatGlobs { for _, pattern := range archiveFormatGlobs {
@ -30,5 +30,5 @@ func NewJavaCataloger(cfg Config) *common.GenericCataloger {
} }
} }
return common.NewGenericCataloger(nil, globParsers, "java-cataloger") return generic.NewCataloger(nil, globParsers, "java-cataloger")
} }

View File

@ -1,6 +1,6 @@
package java package java
type Config struct { type CatalogerConfig struct {
SearchUnindexedArchives bool SearchUnindexedArchives bool
SearchIndexedArchives bool SearchIndexedArchives bool
} }

View File

@ -2,8 +2,8 @@ package java
import ( import (
"github.com/anchore/packageurl-go" "github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common/cpe"
) )
// PackageURL returns the PURL for the specific java package (see https://github.com/package-url/purl-spec) // PackageURL returns the PURL for the specific java package (see https://github.com/package-url/purl-spec)

View File

@ -2,17 +2,17 @@ package java
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"github.com/anchore/syft/internal/file" "github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
// integrity check // integrity check
var _ common.ParserFn = parseTarWrappedJavaArchive var _ generic.Parser = parseTarWrappedJavaArchive
var genericTarGlobs = []string{ var genericTarGlobs = []string{
"**/*.tar", "**/*.tar",

View File

@ -2,17 +2,17 @@ package java
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"github.com/anchore/syft/internal/file" "github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
// integrity check // integrity check
var _ common.ParserFn = parseZipWrappedJavaArchive var _ generic.Parser = parseZipWrappedJavaArchive
var genericZipGlobs = []string{ var genericZipGlobs = []string{
"**/*.zip", "**/*.zip",

View File

@ -4,24 +4,24 @@ Package javascript provides a concrete Cataloger implementation for JavaScript e
package javascript package javascript
import ( import (
"github.com/anchore/syft/syft/pkg/cataloger/common" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
// NewJavascriptPackageCataloger returns a new JavaScript cataloger object based on detection of npm based packages. // NewJavascriptPackageCataloger returns a new JavaScript cataloger object based on detection of npm based packages.
func NewJavascriptPackageCataloger() *common.GenericCataloger { func NewJavascriptPackageCataloger() *generic.Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]generic.Parser{
"**/package.json": parsePackageJSON, "**/package.json": parsePackageJSON,
} }
return common.NewGenericCataloger(nil, globParsers, "javascript-package-cataloger") return generic.NewCataloger(nil, globParsers, "javascript-package-cataloger")
} }
// NewJavascriptLockCataloger returns a new Javascript cataloger object base on package lock files. // NewJavascriptLockCataloger returns a new Javascript cataloger object base on package lock files.
func NewJavascriptLockCataloger() *common.GenericCataloger { func NewJavascriptLockCataloger() *generic.Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]generic.Parser{
"**/package-lock.json": parsePackageLock, "**/package-lock.json": parsePackageLock,
"**/yarn.lock": parseYarnLock, "**/yarn.lock": parseYarnLock,
} }
return common.NewGenericCataloger(nil, globParsers, "javascript-lock-cataloger") return generic.NewCataloger(nil, globParsers, "javascript-lock-cataloger")
} }

View File

@ -4,6 +4,7 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"regexp" "regexp"
@ -15,11 +16,10 @@ import (
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
// integrity check // integrity check
var _ common.ParserFn = parsePackageJSON var _ generic.Parser = parsePackageJSON
// packageJSON represents a JavaScript package.json file // packageJSON represents a JavaScript package.json file
type packageJSON struct { type packageJSON struct {

View File

@ -3,15 +3,15 @@ package javascript
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
// integrity check // integrity check
var _ common.ParserFn = parsePackageLock var _ generic.Parser = parsePackageLock
// PackageLock represents a JavaScript package.lock json file // PackageLock represents a JavaScript package.lock json file
type PackageLock struct { type PackageLock struct {

View File

@ -3,17 +3,17 @@ package javascript
import ( import (
"bufio" "bufio"
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"regexp" "regexp"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
// integrity check // integrity check
var _ common.ParserFn = parseYarnLock var _ generic.Parser = parseYarnLock
var ( var (
// packageNameExp matches the name of the dependency in yarn.lock // packageNameExp matches the name of the dependency in yarn.lock

View File

@ -1,51 +1,26 @@
package cataloger package packages
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/event" "github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/event/monitor"
"github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/linux"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common/cpe"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/hashicorp/go-multierror" "github.com/hashicorp/go-multierror"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
) )
// Monitor provides progress-related data for observing the progress of a Catalog() call (published on the event bus).
type Monitor struct {
FilesProcessed progress.Monitorable // the number of files selected and contents analyzed from all registered catalogers
PackagesDiscovered progress.Monitorable // the number of packages discovered from all registered catalogers
}
// newMonitor creates a new Monitor object and publishes the object on the bus as a PackageCatalogerStarted event.
func newMonitor() (*progress.Manual, *progress.Manual) {
filesProcessed := progress.Manual{}
packagesDiscovered := progress.Manual{}
bus.Publish(partybus.Event{
Type: event.PackageCatalogerStarted,
Value: Monitor{
FilesProcessed: progress.Monitorable(&filesProcessed),
PackagesDiscovered: progress.Monitorable(&packagesDiscovered),
},
})
return &filesProcessed, &packagesDiscovered
}
// Catalog a given source (container image or filesystem) with the given catalogers, returning all discovered packages. // Catalog a given source (container image or filesystem) with the given catalogers, returning all discovered packages.
// In order to efficiently retrieve contents from a underlying container image the content fetch requests are // In order to efficiently retrieve contents from an underlying container image the content fetch requests are
// done in bulk. Specifically, all files of interest are collected from each catalogers and accumulated into a single // done in bulk. Specifically, all files of interest are collected from each cataloger and accumulated into a single
// request. // request.
func Catalog(resolver source.FileResolver, release *linux.Release, catalogers ...Cataloger) (*pkg.Catalog, []artifact.Relationship, error) { func Catalog(resolver source.FileResolver, release *linux.Release, catalogers ...pkg.Cataloger) (*pkg.Catalog, []artifact.Relationship, error) {
catalog := pkg.NewCatalog() catalog := pkg.NewCatalog()
var allRelationships []artifact.Relationship var allRelationships []artifact.Relationship
filesProcessed, packagesDiscovered := newMonitor() filesProcessed, packagesDiscovered := monitor.NewPackageCatalogerMonitor()
// perform analysis, accumulating errors for each failed analysis // perform analysis, accumulating errors for each failed analysis
var errs error var errs error

View File

@ -0,0 +1,82 @@
package packages
import (
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/apkdb"
"github.com/anchore/syft/syft/pkg/cataloger/deb"
"github.com/anchore/syft/syft/pkg/cataloger/golang"
"github.com/anchore/syft/syft/pkg/cataloger/java"
"github.com/anchore/syft/syft/pkg/cataloger/javascript"
"github.com/anchore/syft/syft/pkg/cataloger/php"
"github.com/anchore/syft/syft/pkg/cataloger/python"
"github.com/anchore/syft/syft/pkg/cataloger/rpmdb"
"github.com/anchore/syft/syft/pkg/cataloger/ruby"
"github.com/anchore/syft/syft/pkg/cataloger/rust"
"github.com/anchore/syft/syft/source"
)
// TODO: add tag-based API to select appropriate package catalogers for different scenarios
// AllCatalogers returns all implemented package catalogers
func AllCatalogers(cfg SearchConfig) []pkg.Cataloger {
return []pkg.Cataloger{
ruby.NewGemFileLockCataloger(),
ruby.NewGemSpecCataloger(),
python.NewPythonIndexCataloger(),
python.NewPythonPackageCataloger(),
javascript.NewJavascriptLockCataloger(),
javascript.NewJavascriptPackageCataloger(),
deb.NewDpkgdbCataloger(),
rpmdb.NewRpmdbCataloger(),
java.NewJavaCataloger(cfg.Java()),
apkdb.NewApkdbCataloger(),
golang.NewGoModuleBinaryCataloger(),
golang.NewGoModFileCataloger(),
rust.NewCargoLockCataloger(),
}
}
// InstalledCatalogers returns a slice of locally implemented package catalogers that are fit for detecting installations of packages.
func InstalledCatalogers(cfg SearchConfig) []pkg.Cataloger {
return []pkg.Cataloger{
ruby.NewGemSpecCataloger(),
python.NewPythonPackageCataloger(),
php.NewPHPComposerInstalledCataloger(),
javascript.NewJavascriptPackageCataloger(),
deb.NewDpkgdbCataloger(),
rpmdb.NewRpmdbCataloger(),
java.NewJavaCataloger(cfg.Java()),
apkdb.NewApkdbCataloger(),
golang.NewGoModuleBinaryCataloger(),
}
}
// IndexCatalogers returns a slice of locally implemented package catalogers that are fit for detecting packages from index files (and select installations)
func IndexCatalogers(cfg SearchConfig) []pkg.Cataloger {
return []pkg.Cataloger{
ruby.NewGemFileLockCataloger(),
python.NewPythonIndexCataloger(),
python.NewPythonPackageCataloger(), // for install
php.NewPHPComposerLockCataloger(),
javascript.NewJavascriptLockCataloger(),
deb.NewDpkgdbCataloger(), // for install
rpmdb.NewRpmdbCataloger(), // for install
java.NewJavaCataloger(cfg.Java()), // for install
apkdb.NewApkdbCataloger(), // for install
golang.NewGoModuleBinaryCataloger(), // for install
golang.NewGoModFileCataloger(),
rust.NewCargoLockCataloger(),
}
}
func CatalogersBySourceScheme(scheme source.Scheme, cfg SearchConfig) []pkg.Cataloger {
switch scheme {
case source.ImageScheme:
return InstalledCatalogers(cfg)
case source.FileScheme:
return AllCatalogers(cfg)
case source.DirectoryScheme:
return IndexCatalogers(cfg)
}
return nil
}

View File

@ -0,0 +1,24 @@
package packages
import (
"github.com/anchore/syft/syft/pkg/cataloger/java"
)
type SearchConfig struct {
IncludeIndexedArchives bool
IncludeUnindexedArchives bool
}
func DefaultSearchConfig() SearchConfig {
return SearchConfig{
IncludeIndexedArchives: true,
IncludeUnindexedArchives: false,
}
}
func (c SearchConfig) Java() java.CatalogerConfig {
return java.CatalogerConfig{
SearchUnindexedArchives: c.IncludeUnindexedArchives,
SearchIndexedArchives: c.IncludeIndexedArchives,
}
}

View File

@ -4,23 +4,23 @@ Package php provides a concrete Cataloger implementation for PHP ecosystem files
package php package php
import ( import (
"github.com/anchore/syft/syft/pkg/cataloger/common" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
// NewPHPComposerInstalledCataloger returns a new cataloger for PHP installed.json files. // NewPHPComposerInstalledCataloger returns a new cataloger for PHP installed.json files.
func NewPHPComposerInstalledCataloger() *common.GenericCataloger { func NewPHPComposerInstalledCataloger() *generic.Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]generic.Parser{
"**/installed.json": parseInstalledJSON, "**/installed.json": parseInstalledJSON,
} }
return common.NewGenericCataloger(nil, globParsers, "php-composer-installed-cataloger") return generic.NewCataloger(nil, globParsers, "php-composer-installed-cataloger")
} }
// NewPHPComposerLockCataloger returns a new cataloger for PHP composer.lock files. // NewPHPComposerLockCataloger returns a new cataloger for PHP composer.lock files.
func NewPHPComposerLockCataloger() *common.GenericCataloger { func NewPHPComposerLockCataloger() *generic.Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]generic.Parser{
"**/composer.lock": parseComposerLock, "**/composer.lock": parseComposerLock,
} }
return common.NewGenericCataloger(nil, globParsers, "php-composer-lock-cataloger") return generic.NewCataloger(nil, globParsers, "php-composer-lock-cataloger")
} }

View File

@ -3,11 +3,11 @@ package php
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
// Note: composer version 2 introduced a new structure for the installed.json file, so we support both // Note: composer version 2 introduced a new structure for the installed.json file, so we support both
@ -37,7 +37,7 @@ func (w *installedJSONComposerV2) UnmarshalJSON(data []byte) error {
} }
// integrity check // integrity check
var _ common.ParserFn = parseComposerLock var _ generic.Parser = parseComposerLock
// parseComposerLock is a parser function for Composer.lock contents, returning "Default" php packages discovered. // parseComposerLock is a parser function for Composer.lock contents, returning "Default" php packages discovered.
func parseInstalledJSON(_ string, reader io.Reader) ([]*pkg.Package, []artifact.Relationship, error) { func parseInstalledJSON(_ string, reader io.Reader) ([]*pkg.Package, []artifact.Relationship, error) {

View File

@ -4,17 +4,17 @@ Package python provides a concrete Cataloger implementation for Python ecosystem
package python package python
import ( import (
"github.com/anchore/syft/syft/pkg/cataloger/common" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
// NewPythonIndexCataloger returns a new cataloger for python packages referenced from poetry lock files, requirements.txt files, and setup.py files. // NewPythonIndexCataloger returns a new cataloger for python packages referenced from poetry lock files, requirements.txt files, and setup.py files.
func NewPythonIndexCataloger() *common.GenericCataloger { func NewPythonIndexCataloger() *generic.Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]generic.Parser{
"**/*requirements*.txt": parseRequirementsTxt, "**/*requirements*.txt": parseRequirementsTxt,
"**/poetry.lock": parsePoetryLock, "**/poetry.lock": parsePoetryLock,
"**/Pipfile.lock": parsePipfileLock, "**/Pipfile.lock": parsePipfileLock,
"**/setup.py": parseSetup, "**/setup.py": parseSetup,
} }
return common.NewGenericCataloger(nil, globParsers, "python-index-cataloger") return generic.NewCataloger(nil, globParsers, "python-index-cataloger")
} }

View File

@ -3,13 +3,13 @@ package python
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"sort" "sort"
"strings" "strings"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
type PipfileLock struct { type PipfileLock struct {
@ -36,7 +36,7 @@ type Dependency struct {
} }
// integrity check // integrity check
var _ common.ParserFn = parsePipfileLock var _ generic.Parser = parsePipfileLock
// parsePipfileLock is a parser function for Pipfile.lock contents, returning "Default" python packages discovered. // parsePipfileLock is a parser function for Pipfile.lock contents, returning "Default" python packages discovered.
func parsePipfileLock(_ string, reader io.Reader) ([]*pkg.Package, []artifact.Relationship, error) { func parsePipfileLock(_ string, reader io.Reader) ([]*pkg.Package, []artifact.Relationship, error) {

View File

@ -2,16 +2,16 @@ package python
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
"github.com/pelletier/go-toml" "github.com/pelletier/go-toml"
) )
// integrity check // integrity check
var _ common.ParserFn = parsePoetryLock var _ generic.Parser = parsePoetryLock
// parsePoetryLock is a parser function for poetry.lock contents, returning all python packages discovered. // parsePoetryLock is a parser function for poetry.lock contents, returning all python packages discovered.
func parsePoetryLock(_ string, reader io.Reader) ([]*pkg.Package, []artifact.Relationship, error) { func parsePoetryLock(_ string, reader io.Reader) ([]*pkg.Package, []artifact.Relationship, error) {

View File

@ -3,16 +3,16 @@ package python
import ( import (
"bufio" "bufio"
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"strings" "strings"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
// integrity check // integrity check
var _ common.ParserFn = parseRequirementsTxt var _ generic.Parser = parseRequirementsTxt
// parseRequirementsTxt takes a Python requirements.txt file, returning all Python packages that are locked to a // parseRequirementsTxt takes a Python requirements.txt file, returning all Python packages that are locked to a
// specific version. // specific version.

View File

@ -2,17 +2,17 @@ package python
import ( import (
"bufio" "bufio"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"regexp" "regexp"
"strings" "strings"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
// integrity check // integrity check
var _ common.ParserFn = parseSetup var _ generic.Parser = parseSetup
// match examples: // match examples:
// 'pathlib3==2.2.0;python_version<"3.6"' --> match(name=pathlib3 version=2.2.0) // 'pathlib3==2.2.0;python_version<"3.6"' --> match(name=pathlib3 version=2.2.0)

View File

@ -4,23 +4,23 @@ Package ruby bundler provides a concrete Cataloger implementation for Ruby Gemfi
package ruby package ruby
import ( import (
"github.com/anchore/syft/syft/pkg/cataloger/common" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
// NewGemFileLockCataloger returns a new Bundler cataloger object tailored for parsing index-oriented files (e.g. Gemfile.lock). // NewGemFileLockCataloger returns a new Bundler cataloger object tailored for parsing index-oriented files (e.g. Gemfile.lock).
func NewGemFileLockCataloger() *common.GenericCataloger { func NewGemFileLockCataloger() *generic.Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]generic.Parser{
"**/Gemfile.lock": parseGemFileLockEntries, "**/Gemfile.lock": parseGemFileLockEntries,
} }
return common.NewGenericCataloger(nil, globParsers, "ruby-gemfile-cataloger") return generic.NewCataloger(nil, globParsers, "ruby-gemfile-cataloger")
} }
// NewGemSpecCataloger returns a new Bundler cataloger object tailored for detecting installations of gems (e.g. Gemspec). // NewGemSpecCataloger returns a new Bundler cataloger object tailored for detecting installations of gems (e.g. Gemspec).
func NewGemSpecCataloger() *common.GenericCataloger { func NewGemSpecCataloger() *generic.Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]generic.Parser{
"**/specifications/**/*.gemspec": parseGemSpecEntries, "**/specifications/**/*.gemspec": parseGemSpecEntries,
} }
return common.NewGenericCataloger(nil, globParsers, "ruby-gemspec-cataloger") return generic.NewCataloger(nil, globParsers, "ruby-gemspec-cataloger")
} }

View File

@ -2,17 +2,17 @@ package ruby
import ( import (
"bufio" "bufio"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"strings" "strings"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
// integrity check // integrity check
var _ common.ParserFn = parseGemFileLockEntries var _ generic.Parser = parseGemFileLockEntries
var sectionsOfInterest = internal.NewStringSet("GEM") var sectionsOfInterest = internal.NewStringSet("GEM")

View File

@ -4,6 +4,7 @@ import (
"bufio" "bufio"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"regexp" "regexp"
"strings" "strings"
@ -14,11 +15,10 @@ import (
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
) )
// integrity check // integrity check
var _ common.ParserFn = parseGemFileLockEntries var _ generic.Parser = parseGemFileLockEntries
type postProcessor func(string) []string type postProcessor func(string) []string

View File

@ -4,14 +4,14 @@ Package rust provides a concrete Cataloger implementation for Cargo.lock files.
package rust package rust
import ( import (
"github.com/anchore/syft/syft/pkg/cataloger/common" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
// NewCargoLockCataloger returns a new Rust Cargo lock file cataloger object. // NewCargoLockCataloger returns a new Rust Cargo lock file cataloger object.
func NewCargoLockCataloger() *common.GenericCataloger { func NewCargoLockCataloger() *generic.Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]generic.Parser{
"**/Cargo.lock": parseCargoLock, "**/Cargo.lock": parseCargoLock,
} }
return common.NewGenericCataloger(nil, globParsers, "rust-cataloger") return generic.NewCataloger(nil, globParsers, "rust-cataloger")
} }

View File

@ -2,16 +2,16 @@ package rust
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
"github.com/pelletier/go-toml" "github.com/pelletier/go-toml"
) )
// integrity check // integrity check
var _ common.ParserFn = parseCargoLock var _ generic.Parser = parseCargoLock
// parseCargoLock is a parser function for Cargo.lock contents, returning all rust cargo crates discovered. // parseCargoLock is a parser function for Cargo.lock contents, returning all rust cargo crates discovered.
func parseCargoLock(_ string, reader io.Reader) ([]*pkg.Package, []artifact.Relationship, error) { func parseCargoLock(_ string, reader io.Reader) ([]*pkg.Package, []artifact.Relationship, error) {

View File

@ -1,17 +0,0 @@
package cataloger
import "github.com/anchore/syft/syft/source"
type SearchConfig struct {
IncludeIndexedArchives bool
IncludeUnindexedArchives bool
Scope source.Scope
}
func DefaultSearchConfig() SearchConfig {
return SearchConfig{
IncludeIndexedArchives: true,
IncludeUnindexedArchives: false,
Scope: source.SquashedScope,
}
}

169
syft/tasks.go Normal file
View File

@ -0,0 +1,169 @@
package syft
import (
"fmt"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/linux"
"github.com/anchore/syft/syft/pkg/cataloger/packages"
"github.com/anchore/syft/syft/sbom"
"github.com/anchore/syft/syft/source"
)
type task func(*sbom.Artifacts, *source.Source) ([]artifact.Relationship, error)
type taskGenerator func(CatalogingConfig) (task, error)
func generateCatalogPackagesTask(config CatalogingConfig) (task, error) {
if len(config.PackageCatalogers) == 0 {
return nil, nil
}
return func(artifacts *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(config.Scope)
if err != nil {
return nil, fmt.Errorf("unable to determine resolver while cataloging packages: %w", err)
}
// find the distro
artifacts.LinuxDistribution = linux.IdentifyRelease(resolver)
// catalog packages
catalog, relationships, err := packages.Catalog(resolver, artifacts.LinuxDistribution, config.PackageCatalogers...)
if err != nil {
return nil, err
}
artifacts.PackageCatalog = catalog
return relationships, nil
}, nil
}
func generateCatalogFileMetadataTask(config CatalogingConfig) (task, error) {
if !config.CaptureFileMetadata {
return nil, nil
}
metadataCataloger := file.NewMetadataCataloger()
return func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(config.Scope)
if err != nil {
return nil, err
}
result, err := metadataCataloger.Catalog(resolver)
if err != nil {
return nil, err
}
results.FileMetadata = result
return nil, nil
}, nil
}
func generateCatalogFileDigestsTask(config CatalogingConfig) (task, error) {
if len(config.DigestHashes) == 0 {
return nil, nil
}
digestsCataloger, err := file.NewDigestsCataloger(config.DigestHashes)
if err != nil {
return nil, err
}
return func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(config.Scope)
if err != nil {
return nil, err
}
result, err := digestsCataloger.Catalog(resolver)
if err != nil {
return nil, err
}
results.FileDigests = result
return nil, nil
}, nil
}
func generateCatalogContentsTask(config CatalogingConfig) (task, error) {
if len(config.ContentsConfig.Globs) > 0 {
return nil, nil
}
contentsCataloger, err := file.NewContentsCataloger(config.ContentsConfig)
if err != nil {
return nil, err
}
return func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(config.Scope)
if err != nil {
return nil, err
}
result, err := contentsCataloger.Catalog(resolver)
if err != nil {
return nil, err
}
results.FileContents = result
return nil, nil
}, nil
}
func generateCatalogSecretsTask(config CatalogingConfig) (task, error) {
if !config.CaptureSecrets {
return nil, nil
}
//patterns, err := file.GenerateSearchPatterns(file.DefaultSecretsPatterns, appConfig.Secrets.AdditionalPatterns, appConfig.Secrets.ExcludePatternNames)
//if err != nil {
// return nil, err
//}
secretsCataloger, err := file.NewSecretsCataloger(config.SecretsConfig)
if err != nil {
return nil, err
}
return func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(config.SecretsScope)
if err != nil {
return nil, err
}
result, err := secretsCataloger.Catalog(resolver)
if err != nil {
return nil, err
}
results.Secrets = result
return nil, nil
}, nil
}
func generateCatalogFileClassificationsTask(config CatalogingConfig) (task, error) {
if !config.ClassifyFiles {
return nil, nil
}
classifierCataloger, err := file.NewClassificationCataloger(config.FileClassifiers)
if err != nil {
return nil, err
}
return func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(config.Scope)
if err != nil {
return nil, err
}
result, err := classifierCataloger.Catalog(resolver)
if err != nil {
return nil, err
}
results.FileClassifications = result
return nil, nil
}, nil
}

View File

@ -1,11 +1,11 @@
package integration package integration
import ( import (
"github.com/anchore/syft/syft/pkg/cataloger/packages"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"testing" "testing"
"github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/linux"
"github.com/anchore/syft/syft/pkg/cataloger"
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
"github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/stereoscope/pkg/imagetest"
@ -21,7 +21,7 @@ func BenchmarkImagePackageCatalogers(b *testing.B) {
tarPath := imagetest.GetFixtureImageTarPath(b, fixtureImageName) tarPath := imagetest.GetFixtureImageTarPath(b, fixtureImageName)
var pc *pkg.Catalog var pc *pkg.Catalog
for _, c := range cataloger.ImageCatalogers(cataloger.DefaultConfig()) { for _, c := range packages.InstalledCatalogers(packages.DefaultSearchConfig()) {
// in case of future alteration where state is persisted, assume no dependency is safe to reuse // in case of future alteration where state is persisted, assume no dependency is safe to reuse
userInput := "docker-archive:" + tarPath userInput := "docker-archive:" + tarPath
sourceInput, err := source.ParseInput(userInput, "", false) sourceInput, err := source.ParseInput(userInput, "", false)
@ -41,7 +41,7 @@ func BenchmarkImagePackageCatalogers(b *testing.B) {
b.Run(c.Name(), func(b *testing.B) { b.Run(c.Name(), func(b *testing.B) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
pc, _, err = cataloger.Catalog(resolver, theDistro, c) pc, _, err = packages.Catalog(resolver, theDistro, c)
if err != nil { if err != nil {
b.Fatalf("failure during benchmark: %+v", err) b.Fatalf("failure during benchmark: %+v", err)
} }

View File

@ -1,15 +1,14 @@
package integration package integration
import ( import (
"github.com/anchore/syft/syft/linux"
"github.com/anchore/syft/syft/pkg/cataloger/packages"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"testing" "testing"
"github.com/anchore/syft/syft/pkg/cataloger"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
"github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/stereoscope/pkg/imagetest"
"github.com/anchore/syft/syft"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
@ -23,10 +22,11 @@ func catalogFixtureImage(t *testing.T, fixtureImageName string) (sbom.SBOM, *sou
t.Cleanup(cleanupSource) t.Cleanup(cleanupSource)
require.NoError(t, err) require.NoError(t, err)
// TODO: this would be better with functional options (after/during API refactor) // TODO: this would be better with functional options (after/during API refactor)... this should be replaced
c := cataloger.DefaultConfig() resolver, err := theSource.FileResolver(source.SquashedScope)
c.Search.Scope = source.SquashedScope require.NoError(t, err)
pkgCatalog, relationships, actualDistro, err := syft.CatalogPackages(theSource, c) release := linux.IdentifyRelease(resolver)
pkgCatalog, relationships, err := packages.Catalog(resolver, release, packages.CatalogersBySourceScheme(theSource.Metadata.Scheme, packages.DefaultSearchConfig())...)
if err != nil { if err != nil {
t.Fatalf("failed to catalog image: %+v", err) t.Fatalf("failed to catalog image: %+v", err)
} }
@ -34,7 +34,7 @@ func catalogFixtureImage(t *testing.T, fixtureImageName string) (sbom.SBOM, *sou
return sbom.SBOM{ return sbom.SBOM{
Artifacts: sbom.Artifacts{ Artifacts: sbom.Artifacts{
PackageCatalog: pkgCatalog, PackageCatalog: pkgCatalog,
LinuxDistribution: actualDistro, LinuxDistribution: release,
}, },
Relationships: relationships, Relationships: relationships,
Source: theSource.Metadata, Source: theSource.Metadata,
@ -59,9 +59,10 @@ func catalogDirectory(t *testing.T, dir string) (sbom.SBOM, *source.Source) {
require.NoError(t, err) require.NoError(t, err)
// TODO: this would be better with functional options (after/during API refactor) // TODO: this would be better with functional options (after/during API refactor)
c := cataloger.DefaultConfig() resolver, err := theSource.FileResolver(source.AllLayersScope)
c.Search.Scope = source.AllLayersScope require.NoError(t, err)
pkgCatalog, relationships, actualDistro, err := syft.CatalogPackages(theSource, c) release := linux.IdentifyRelease(resolver)
pkgCatalog, relationships, err := packages.Catalog(resolver, release, packages.CatalogersBySourceScheme(theSource.Metadata.Scheme, packages.DefaultSearchConfig())...)
if err != nil { if err != nil {
t.Fatalf("failed to catalog image: %+v", err) t.Fatalf("failed to catalog image: %+v", err)
} }
@ -69,7 +70,7 @@ func catalogDirectory(t *testing.T, dir string) (sbom.SBOM, *source.Source) {
return sbom.SBOM{ return sbom.SBOM{
Artifacts: sbom.Artifacts{ Artifacts: sbom.Artifacts{
PackageCatalog: pkgCatalog, PackageCatalog: pkgCatalog,
LinuxDistribution: actualDistro, LinuxDistribution: release,
}, },
Relationships: relationships, Relationships: relationships,
Source: theSource.Metadata, Source: theSource.Metadata,