keep file catalogers separate from file-related definitions

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
Alex Goodman 2022-03-22 17:18:36 -04:00
parent 3308079158
commit b3ca75646c
No known key found for this signature in database
GPG Key ID: 5CB45AE22BAB7EA7
86 changed files with 305 additions and 280 deletions

View File

@ -1,12 +1,14 @@
package file package archive
import ( import (
"errors" "errors"
"fmt" "fmt"
"io" "io"
"github.com/anchore/syft/syft/file"
) )
const perFileReadLimit = 2 * GB const perFileReadLimit = 2 * file.GB
// safeCopy limits the copy from the reader. This is useful when extracting files from archives to // safeCopy limits the copy from the reader. This is useful when extracting files from archives to
// protect against decompression bomb attacks. // protect against decompression bomb attacks.

View File

@ -1,38 +1,39 @@
package file package archive
import ( import (
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"path/filepath" "path/filepath"
"github.com/anchore/syft/syft/file"
"github.com/bmatcuk/doublestar/v4" "github.com/bmatcuk/doublestar/v4"
"github.com/mholt/archiver/v3" "github.com/mholt/archiver/v3"
) )
// ExtractGlobsFromTarToUniqueTempFile extracts paths matching the given globs within the given archive to a temporary directory, returning file openers for each file extracted. // ExtractGlobsFromTarToUniqueTempFile extracts paths matching the given globs within the given archive to a temporary directory, returning file openers for each file extracted.
func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...string) (map[string]Opener, error) { func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...string) (map[string]file.Opener, error) {
results := make(map[string]Opener) results := make(map[string]file.Opener)
// don't allow for full traversal, only select traversal from given paths // don't allow for full traversal, only select traversal from given paths
if len(globs) == 0 { if len(globs) == 0 {
return results, nil return results, nil
} }
visitor := func(file archiver.File) error { visitor := func(f archiver.File) error {
defer file.Close() defer f.Close()
// ignore directories // ignore directories
if file.FileInfo.IsDir() { if f.FileInfo.IsDir() {
return nil return nil
} }
// ignore any filename that doesn't match the given globs... // ignore any filename that doesn't match the given globs...
if !matchesAnyGlob(file.Name(), globs...) { if !matchesAnyGlob(f.Name(), globs...) {
return nil return nil
} }
// we have a file we want to extract.... // we have a file we want to extract....
tempfilePrefix := filepath.Base(filepath.Clean(file.Name())) + "-" tempfilePrefix := filepath.Base(filepath.Clean(f.Name())) + "-"
tempFile, err := ioutil.TempFile(dir, tempfilePrefix) tempFile, err := ioutil.TempFile(dir, tempfilePrefix)
if err != nil { if err != nil {
return fmt.Errorf("unable to create temp file: %w", err) return fmt.Errorf("unable to create temp file: %w", err)
@ -42,11 +43,11 @@ func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...strin
// provides a ReadCloser. It is up to the caller to handle closing the file explicitly. // provides a ReadCloser. It is up to the caller to handle closing the file explicitly.
defer tempFile.Close() defer tempFile.Close()
if err := safeCopy(tempFile, file.ReadCloser); err != nil { if err := safeCopy(tempFile, f.ReadCloser); err != nil {
return fmt.Errorf("unable to copy source=%q for tar=%q: %w", file.Name(), archivePath, err) return fmt.Errorf("unable to copy source=%q for tar=%q: %w", f.Name(), archivePath, err)
} }
results[file.Name()] = Opener{path: tempFile.Name()} results[f.Name()] = file.NewOpener(tempFile.Name())
return nil return nil
} }

View File

@ -1,4 +1,4 @@
package file package archive
import ( import (
"io/ioutil" "io/ioutil"

View File

@ -1,7 +1,8 @@
package file package archive
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/syft/file"
"os" "os"
"sort" "sort"
"strings" "strings"
@ -48,7 +49,7 @@ func (z ZipFileManifest) GlobMatch(patterns ...string) []string {
// so that glob logic is consistent inside and outside of ZIP archives // so that glob logic is consistent inside and outside of ZIP archives
normalizedEntry := normalizeZipEntryName(entry) normalizedEntry := normalizeZipEntryName(entry)
if GlobMatch(pattern, normalizedEntry) { if file.GlobMatch(pattern, normalizedEntry) {
uniqueMatches.Add(entry) uniqueMatches.Add(entry)
} }
} }

View File

@ -1,7 +1,7 @@
//go:build !windows //go:build !windows
// +build !windows // +build !windows
package file package archive
import ( import (
"encoding/json" "encoding/json"

View File

@ -1,9 +1,10 @@
package file package archive
import ( import (
"archive/zip" "archive/zip"
"bytes" "bytes"
"fmt" "fmt"
"github.com/anchore/syft/syft/file"
"io/ioutil" "io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
@ -12,14 +13,6 @@ import (
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
) )
const (
// represents the order of bytes
_ = iota
KB = 1 << (10 * iota)
MB
GB
)
type errZipSlipDetected struct { type errZipSlipDetected struct {
Prefix string Prefix string
JoinArgs []string JoinArgs []string
@ -71,16 +64,16 @@ func TraverseFilesInZip(archivePath string, visitor func(*zip.File) error, paths
} }
// ExtractFromZipToUniqueTempFile extracts select paths for the given archive to a temporary directory, returning file openers for each file extracted. // ExtractFromZipToUniqueTempFile extracts select paths for the given archive to a temporary directory, returning file openers for each file extracted.
func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (map[string]Opener, error) { func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (map[string]file.Opener, error) {
results := make(map[string]Opener) results := make(map[string]file.Opener)
// don't allow for full traversal, only select traversal from given paths // don't allow for full traversal, only select traversal from given paths
if len(paths) == 0 { if len(paths) == 0 {
return results, nil return results, nil
} }
visitor := func(file *zip.File) error { visitor := func(f *zip.File) error {
tempfilePrefix := filepath.Base(filepath.Clean(file.Name)) + "-" tempfilePrefix := filepath.Base(filepath.Clean(f.Name)) + "-"
tempFile, err := ioutil.TempFile(dir, tempfilePrefix) tempFile, err := ioutil.TempFile(dir, tempfilePrefix)
if err != nil { if err != nil {
@ -91,26 +84,26 @@ func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (m
// provides a ReadCloser. It is up to the caller to handle closing the file explicitly. // provides a ReadCloser. It is up to the caller to handle closing the file explicitly.
defer tempFile.Close() defer tempFile.Close()
zippedFile, err := file.Open() zippedFile, err := f.Open()
if err != nil { if err != nil {
return fmt.Errorf("unable to read file=%q from zip=%q: %w", file.Name, archivePath, err) return fmt.Errorf("unable to read file=%q from zip=%q: %w", f.Name, archivePath, err)
} }
defer func() { defer func() {
err := zippedFile.Close() err := zippedFile.Close()
if err != nil { if err != nil {
log.Errorf("unable to close source file=%q from zip=%q: %+v", file.Name, archivePath, err) log.Errorf("unable to close source file=%q from zip=%q: %+v", f.Name, archivePath, err)
} }
}() }()
if file.FileInfo().IsDir() { if f.FileInfo().IsDir() {
return fmt.Errorf("unable to extract directories, only files: %s", file.Name) return fmt.Errorf("unable to extract directories, only files: %s", f.Name)
} }
if err := safeCopy(tempFile, zippedFile); err != nil { if err := safeCopy(tempFile, zippedFile); err != nil {
return fmt.Errorf("unable to copy source=%q for zip=%q: %w", file.Name, archivePath, err) return fmt.Errorf("unable to copy source=%q for zip=%q: %w", f.Name, archivePath, err)
} }
results[file.Name] = Opener{path: tempFile.Name()} results[f.Name] = file.NewOpener(tempFile.Name())
return nil return nil
} }

View File

@ -1,7 +1,7 @@
//go:build !windows //go:build !windows
// +build !windows // +build !windows
package file package archive
import ( import (
"crypto/sha256" "crypto/sha256"

View File

@ -1,4 +1,4 @@
package file package archive
import ( import (
"archive/zip" "archive/zip"

View File

@ -1,7 +1,7 @@
//go:build !windows //go:build !windows
// +build !windows // +build !windows
package file package archive
import ( import (
"os" "os"

View File

@ -45,7 +45,7 @@ type Application struct {
FileMetadata FileMetadata `yaml:"file-metadata" json:"file-metadata" mapstructure:"file-metadata"` FileMetadata FileMetadata `yaml:"file-metadata" json:"file-metadata" mapstructure:"file-metadata"`
FileClassification fileClassification `yaml:"file-classification" json:"file-classification" mapstructure:"file-classification"` FileClassification fileClassification `yaml:"file-classification" json:"file-classification" mapstructure:"file-classification"`
FileContents fileContents `yaml:"file-contents" json:"file-contents" mapstructure:"file-contents"` FileContents fileContents `yaml:"file-contents" json:"file-contents" mapstructure:"file-contents"`
Secrets secrets `yaml:"secrets" json:"secrets" mapstructure:"secrets"` Secrets secretsCfg `yaml:"secrets" json:"secrets" mapstructure:"secrets"`
Registry registry `yaml:"registry" json:"registry" mapstructure:"registry"` Registry registry `yaml:"registry" json:"registry" mapstructure:"registry"`
Exclusions []string `yaml:"exclude" json:"exclude" mapstructure:"exclude"` Exclusions []string `yaml:"exclude" json:"exclude" mapstructure:"exclude"`
Attest attest `yaml:"attest" json:"attest" mapstructure:"attest"` Attest attest `yaml:"attest" json:"attest" mapstructure:"attest"`

View File

@ -1,8 +1,8 @@
package config package config
import ( import (
internalFile "github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/file/cataloger/filecontents"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/spf13/viper" "github.com/spf13/viper"
) )
@ -16,7 +16,7 @@ type fileContents struct {
func (cfg fileContents) loadDefaultValues(v *viper.Viper) { func (cfg fileContents) loadDefaultValues(v *viper.Viper) {
v.SetDefault("file-contents.cataloger.enabled", catalogerEnabledDefault) v.SetDefault("file-contents.cataloger.enabled", catalogerEnabledDefault)
v.SetDefault("file-contents.cataloger.scope", source.SquashedScope) v.SetDefault("file-contents.cataloger.scope", source.SquashedScope)
v.SetDefault("file-contents.skip-files-above-size", 1*internalFile.MB) v.SetDefault("file-contents.skip-files-above-size", 1*file.MB)
v.SetDefault("file-contents.globs", []string{}) v.SetDefault("file-contents.globs", []string{})
} }
@ -24,8 +24,8 @@ func (cfg *fileContents) parseConfigValues() error {
return cfg.Cataloger.parseConfigValues() return cfg.Cataloger.parseConfigValues()
} }
func (cfg fileContents) ToConfig() file.ContentsCatalogerConfig { func (cfg fileContents) ToConfig() filecontents.CatalogerConfig {
return file.ContentsCatalogerConfig{ return filecontents.CatalogerConfig{
Globs: cfg.Globs, Globs: cfg.Globs,
SkipFilesAboveSizeInBytes: cfg.SkipFilesAboveSize, SkipFilesAboveSizeInBytes: cfg.SkipFilesAboveSize,
} }

View File

@ -2,13 +2,13 @@ package config
import ( import (
"fmt" "fmt"
internalFile "github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/file/cataloger/secrets"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/spf13/viper" "github.com/spf13/viper"
) )
type secrets struct { type secretsCfg struct {
Cataloger catalogerOptions `yaml:"cataloger" json:"cataloger" mapstructure:"cataloger"` Cataloger catalogerOptions `yaml:"cataloger" json:"cataloger" mapstructure:"cataloger"`
AdditionalPatterns map[string]string `yaml:"additional-patterns" json:"additional-patterns" mapstructure:"additional-patterns"` AdditionalPatterns map[string]string `yaml:"additional-patterns" json:"additional-patterns" mapstructure:"additional-patterns"`
ExcludePatternNames []string `yaml:"exclude-pattern-names" json:"exclude-pattern-names" mapstructure:"exclude-pattern-names"` ExcludePatternNames []string `yaml:"exclude-pattern-names" json:"exclude-pattern-names" mapstructure:"exclude-pattern-names"`
@ -16,25 +16,25 @@ type secrets struct {
SkipFilesAboveSize int64 `yaml:"skip-files-above-size" json:"skip-files-above-size" mapstructure:"skip-files-above-size"` SkipFilesAboveSize int64 `yaml:"skip-files-above-size" json:"skip-files-above-size" mapstructure:"skip-files-above-size"`
} }
func (cfg secrets) loadDefaultValues(v *viper.Viper) { func (cfg secretsCfg) loadDefaultValues(v *viper.Viper) {
v.SetDefault("secrets.cataloger.enabled", catalogerEnabledDefault) v.SetDefault("secrets.cataloger.enabled", catalogerEnabledDefault)
v.SetDefault("secrets.cataloger.scope", source.AllLayersScope) v.SetDefault("secrets.cataloger.scope", source.AllLayersScope)
v.SetDefault("secrets.reveal-values", false) v.SetDefault("secrets.reveal-values", false)
v.SetDefault("secrets.skip-files-above-size", 1*internalFile.MB) v.SetDefault("secrets.skip-files-above-size", 1*file.MB)
v.SetDefault("secrets.additional-patterns", map[string]string{}) v.SetDefault("secrets.additional-patterns", map[string]string{})
v.SetDefault("secrets.exclude-pattern-names", []string{}) v.SetDefault("secrets.exclude-pattern-names", []string{})
} }
func (cfg *secrets) parseConfigValues() error { func (cfg *secretsCfg) parseConfigValues() error {
return cfg.Cataloger.parseConfigValues() return cfg.Cataloger.parseConfigValues()
} }
func (cfg secrets) ToConfig() (*file.SecretsCatalogerConfig, error) { func (cfg secretsCfg) ToConfig() (*secrets.CatalogerConfig, error) {
patterns, err := file.GenerateSearchPatterns(file.DefaultSecretsPatterns, cfg.AdditionalPatterns, cfg.ExcludePatternNames) patterns, err := file.GenerateSearchPatterns(secrets.DefaultSecretsPatterns, cfg.AdditionalPatterns, cfg.ExcludePatternNames)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to process secrets config patterns: %w", err) return nil, fmt.Errorf("unable to process secrets config patterns: %w", err)
} }
return &file.SecretsCatalogerConfig{ return &secrets.CatalogerConfig{
Patterns: patterns, Patterns: patterns,
RevealValues: cfg.RevealValues, RevealValues: cfg.RevealValues,
MaxFileSize: cfg.SkipFilesAboveSize, MaxFileSize: cfg.SkipFilesAboveSize,

View File

@ -19,12 +19,12 @@ func Catalog(src *source.Source, options ...CatalogingOption) (*sbom.SBOM, error
var tasks []task var tasks []task
generators := []taskGenerator{ generators := []taskGenerator{
generateCatalogPackagesTask, generatePackagesCatalogingTask,
generateCatalogFileMetadataTask, generateFileMetadataCatalogingTask,
generateCatalogFileDigestsTask, generateFileDigestsCatalogingTask,
generateCatalogSecretsTask, generateSecretsCatalogingTask,
generateCatalogFileClassificationsTask, generateFileClassifierTask,
generateCatalogContentsTask, generateContentsCatalogingTask,
} }
for _, generator := range generators { for _, generator := range generators {

View File

@ -2,6 +2,8 @@ package syft
import ( import (
"crypto" "crypto"
"github.com/anchore/syft/syft/file/cataloger/filecontents"
"github.com/anchore/syft/syft/file/cataloger/secrets"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/version" "github.com/anchore/syft/internal/version"
@ -25,13 +27,13 @@ type CatalogingConfig struct {
DigestHashes []crypto.Hash DigestHashes []crypto.Hash
// secrets // secrets
CaptureSecrets bool CaptureSecrets bool
SecretsConfig file.SecretsCatalogerConfig SecretsConfig secrets.CatalogerConfig
SecretsScope source.Scope SecretsScope source.Scope
// file classification // file classification
ClassifyFiles bool ClassifyFiles bool
FileClassifiers []file.Classifier FileClassifiers []file.Classifier
// file contents // file contents
ContentsConfig file.ContentsCatalogerConfig ContentsConfig filecontents.CatalogerConfig
} }
func DefaultCatalogingConfig() CatalogingConfig { func DefaultCatalogingConfig() CatalogingConfig {
@ -40,8 +42,8 @@ func DefaultCatalogingConfig() CatalogingConfig {
ToolName: internal.ApplicationName, ToolName: internal.ApplicationName,
ToolVersion: version.Guess(), ToolVersion: version.Guess(),
SecretsScope: source.AllLayersScope, SecretsScope: source.AllLayersScope,
SecretsConfig: file.DefaultSecretsCatalogerConfig(), SecretsConfig: secrets.DefaultCatalogerConfig(),
FileClassifiers: file.DefaultClassifiers(), FileClassifiers: file.DefaultClassifiers(),
ContentsConfig: file.DefaultContentsCatalogerConfig(), ContentsConfig: filecontents.DefaultCatalogerConfig(),
} }
} }

View File

@ -3,6 +3,7 @@ package syft
import ( import (
"crypto" "crypto"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/file/cataloger/secrets"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/packages" "github.com/anchore/syft/syft/pkg/cataloger/packages"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
@ -82,7 +83,7 @@ func WithFileDigests(hashes ...crypto.Hash) CatalogingOption {
} }
} }
func WithSecrets(secretConfig *file.SecretsCatalogerConfig) CatalogingOption { func WithSecrets(secretConfig *secrets.CatalogerConfig) CatalogingOption {
return func(_ *source.Source, config *CatalogingConfig) error { return func(_ *source.Source, config *CatalogingConfig) error {
config.CaptureSecrets = true config.CaptureSecrets = true
if secretConfig != nil { if secretConfig != nil {

9
syft/file/byte_sizes.go Normal file
View File

@ -0,0 +1,9 @@
package file
const (
// represents the order of bytes
_ = iota
KB = 1 << (10 * iota)
MB
GB
)

View File

@ -1,25 +1,26 @@
package file package fileclassifier
import ( import (
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
type ClassificationCataloger struct { type Cataloger struct {
classifiers []Classifier classifiers []file.Classifier
} }
func NewClassificationCataloger(classifiers []Classifier) (*ClassificationCataloger, error) { func NewCataloger(classifiers []file.Classifier) (*Cataloger, error) {
return &ClassificationCataloger{ return &Cataloger{
classifiers: classifiers, classifiers: classifiers,
}, nil }, nil
} }
func (i *ClassificationCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]Classification, error) { func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]file.Classification, error) {
results := make(map[source.Coordinates][]Classification) results := make(map[source.Coordinates][]file.Classification)
numResults := 0 numResults := 0
for _, location := range allRegularFiles(resolver) { for _, location := range source.AllRegularFiles(resolver) {
for _, classifier := range i.classifiers { for _, classifier := range i.classifiers {
result, err := classifier.Classify(resolver, location) result, err := classifier.Classify(resolver, location)
if err != nil { if err != nil {

View File

@ -1,9 +1,10 @@
package file package fileclassifier
import ( import (
"github.com/anchore/stereoscope/pkg/imagetest"
"testing" "testing"
"github.com/anchore/stereoscope/pkg/imagetest"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -13,14 +14,14 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
name string name string
fixtureDir string fixtureDir string
location string location string
expected []Classification expected []file.Classification
expectedErr func(assert.TestingT, error, ...interface{}) bool expectedErr func(assert.TestingT, error, ...interface{}) bool
}{ }{
{ {
name: "positive-libpython3.7.so", name: "positive-libpython3.7.so",
fixtureDir: "test-fixtures/classifiers/positive", fixtureDir: "test-fixtures/classifiers/positive",
location: "libpython3.7.so", location: "libpython3.7.so",
expected: []Classification{ expected: []file.Classification{
{ {
Class: "python-binary", Class: "python-binary",
Metadata: map[string]string{ Metadata: map[string]string{
@ -34,7 +35,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
name: "positive-python3.6", name: "positive-python3.6",
fixtureDir: "test-fixtures/classifiers/positive", fixtureDir: "test-fixtures/classifiers/positive",
location: "python3.6", location: "python3.6",
expected: []Classification{ expected: []file.Classification{
{ {
Class: "python-binary", Class: "python-binary",
Metadata: map[string]string{ Metadata: map[string]string{
@ -48,7 +49,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
name: "positive-patchlevel.h", name: "positive-patchlevel.h",
fixtureDir: "test-fixtures/classifiers/positive", fixtureDir: "test-fixtures/classifiers/positive",
location: "patchlevel.h", location: "patchlevel.h",
expected: []Classification{ expected: []file.Classification{
{ {
Class: "cpython-source", Class: "cpython-source",
Metadata: map[string]string{ Metadata: map[string]string{
@ -62,7 +63,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
name: "positive-go", name: "positive-go",
fixtureDir: "test-fixtures/classifiers/positive", fixtureDir: "test-fixtures/classifiers/positive",
location: "go", location: "go",
expected: []Classification{ expected: []file.Classification{
{ {
Class: "go-binary", Class: "go-binary",
Metadata: map[string]string{ Metadata: map[string]string{
@ -76,7 +77,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
name: "positive-go-hint", name: "positive-go-hint",
fixtureDir: "test-fixtures/classifiers/positive", fixtureDir: "test-fixtures/classifiers/positive",
location: "VERSION", location: "VERSION",
expected: []Classification{ expected: []file.Classification{
{ {
Class: "go-binary-hint", Class: "go-binary-hint",
Metadata: map[string]string{ Metadata: map[string]string{
@ -90,7 +91,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
name: "positive-busybox", name: "positive-busybox",
fixtureDir: "test-fixtures/classifiers/positive", fixtureDir: "test-fixtures/classifiers/positive",
location: "[", // note: busybox is a link to [ location: "[", // note: busybox is a link to [
expected: []Classification{ expected: []file.Classification{
{ {
Class: "busybox-binary", Class: "busybox-binary",
Metadata: map[string]string{ Metadata: map[string]string{
@ -105,7 +106,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
for _, test := range tests { for _, test := range tests {
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
c, err := NewClassificationCataloger(DefaultClassifiers) c, err := NewCataloger(file.DefaultClassifiers())
test.expectedErr(t, err) test.expectedErr(t, err)
src, err := source.NewFromDirectory(test.fixtureDir) src, err := source.NewFromDirectory(test.fixtureDir)
@ -138,14 +139,14 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases_Image(t *testing.T
name string name string
fixtureImage string fixtureImage string
location string location string
expected []Classification expected []file.Classification
expectedErr func(assert.TestingT, error, ...interface{}) bool expectedErr func(assert.TestingT, error, ...interface{}) bool
}{ }{
{ {
name: "busybox-regression", name: "busybox-regression",
fixtureImage: "image-busybox", fixtureImage: "image-busybox",
location: "/bin/[", location: "/bin/[",
expected: []Classification{ expected: []file.Classification{
{ {
Class: "busybox-binary", Class: "busybox-binary",
Metadata: map[string]string{ Metadata: map[string]string{
@ -160,7 +161,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases_Image(t *testing.T
for _, test := range tests { for _, test := range tests {
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
c, err := NewClassificationCataloger(DefaultClassifiers) c, err := NewCataloger(file.DefaultClassifiers())
test.expectedErr(t, err) test.expectedErr(t, err)
img := imagetest.GetFixtureImage(t, "docker-archive", test.fixtureImage) img := imagetest.GetFixtureImage(t, "docker-archive", test.fixtureImage)
@ -191,7 +192,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases_Image(t *testing.T
func TestClassifierCataloger_DefaultClassifiers_NegativeCases(t *testing.T) { func TestClassifierCataloger_DefaultClassifiers_NegativeCases(t *testing.T) {
c, err := NewClassificationCataloger(DefaultClassifiers) c, err := NewCataloger(file.DefaultClassifiers())
assert.NoError(t, err) assert.NoError(t, err)
src, err := source.NewFromDirectory("test-fixtures/classifiers/negative") src, err := source.NewFromDirectory("test-fixtures/classifiers/negative")

View File

@ -1,41 +1,40 @@
package file package filecontents
import ( import (
"bytes" "bytes"
"encoding/base64" "encoding/base64"
"fmt" "fmt"
"github.com/anchore/syft/internal/file"
"io" "io"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
type ContentsCatalogerConfig struct { type CatalogerConfig struct {
Globs []string Globs []string
SkipFilesAboveSizeInBytes int64 SkipFilesAboveSizeInBytes int64
} }
type ContentsCataloger struct { type Cataloger struct {
config ContentsCatalogerConfig config CatalogerConfig
} }
func DefaultContentsCatalogerConfig() ContentsCatalogerConfig { func DefaultCatalogerConfig() CatalogerConfig {
return ContentsCatalogerConfig{ return CatalogerConfig{
Globs: nil, Globs: nil,
SkipFilesAboveSizeInBytes: 1 * file.MB, SkipFilesAboveSizeInBytes: 1 * file.MB,
} }
} }
func NewContentsCataloger(config ContentsCatalogerConfig) (*ContentsCataloger, error) { func NewCataloger(config CatalogerConfig) (*Cataloger, error) {
return &ContentsCataloger{ return &Cataloger{
config: config, config: config,
}, nil }, nil
} }
func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]string, error) { func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]string, error) {
results := make(map[source.Coordinates]string) results := make(map[source.Coordinates]string)
var locations []source.Location var locations []source.Location
@ -68,7 +67,7 @@ func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Co
return results, nil return results, nil
} }
func (i *ContentsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) (string, error) { func (i *Cataloger) catalogLocation(resolver source.FileResolver, location source.Location) (string, error) {
contentReader, err := resolver.FileContentsByLocation(location) contentReader, err := resolver.FileContentsByLocation(location)
if err != nil { if err != nil {
return "", err return "", err

View File

@ -1,4 +1,4 @@
package file package filecontents
import ( import (
"testing" "testing"
@ -66,7 +66,10 @@ func TestContentsCataloger(t *testing.T) {
for _, test := range tests { for _, test := range tests {
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
c, err := NewContentsCataloger(test.globs, test.maxSize) c, err := NewCataloger(CatalogerConfig{
Globs: test.globs,
SkipFilesAboveSizeInBytes: test.maxSize,
})
assert.NoError(t, err) assert.NoError(t, err)
resolver := source.NewMockResolverForPaths(test.files...) resolver := source.NewMockResolverForPaths(test.files...)

View File

@ -1,37 +1,37 @@
package file package filedigests
import ( import (
"crypto" "crypto"
"errors" "errors"
"fmt" "fmt"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/syft/event"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
"hash" "hash"
"io" "io"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
) )
var errUndigestableFile = errors.New("undigestable file") var errUndigestableFile = errors.New("undigestable file")
type DigestsCataloger struct { type Cataloger struct {
hashes []crypto.Hash hashes []crypto.Hash
} }
func NewDigestsCataloger(hashes []crypto.Hash) (*DigestsCataloger, error) { func NewCataloger(hashes []crypto.Hash) (*Cataloger, error) {
return &DigestsCataloger{ return &Cataloger{
hashes: hashes, hashes: hashes,
}, nil }, nil
} }
func (i *DigestsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]Digest, error) { func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]file.Digest, error) {
results := make(map[source.Coordinates][]Digest) results := make(map[source.Coordinates][]file.Digest)
locations := allRegularFiles(resolver) locations := source.AllRegularFiles(resolver)
stage, prog := digestsCatalogingProgress(int64(len(locations))) stage, prog := digestsCatalogingProgress(int64(len(locations)))
for _, location := range locations { for _, location := range locations {
stage.Current = location.RealPath stage.Current = location.RealPath
@ -57,7 +57,7 @@ func (i *DigestsCataloger) Catalog(resolver source.FileResolver) (map[source.Coo
return results, nil return results, nil
} }
func (i *DigestsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]Digest, error) { func (i *Cataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]file.Digest, error) {
meta, err := resolver.FileMetadataByLocation(location) meta, err := resolver.FileMetadataByLocation(location)
if err != nil { if err != nil {
return nil, err return nil, err
@ -88,16 +88,16 @@ func (i *DigestsCataloger) catalogLocation(resolver source.FileResolver, locatio
} }
if size == 0 { if size == 0 {
return make([]Digest, 0), nil return make([]file.Digest, 0), nil
} }
result := make([]Digest, len(i.hashes)) result := make([]file.Digest, len(i.hashes))
// only capture digests when there is content. It is important to do this based on SIZE and not // only capture digests when there is content. It is important to do this based on SIZE and not
// FILE TYPE. The reasoning is that it is possible for a tar to be crafted with a header-only // FILE TYPE. The reasoning is that it is possible for a tar to be crafted with a header-only
// file type but a body is still allowed. // file type but a body is still allowed.
for idx, hasher := range hashers { for idx, hasher := range hashers {
result[idx] = Digest{ result[idx] = file.Digest{
Algorithm: DigestAlgorithmName(i.hashes[idx]), Algorithm: file.DigestAlgorithmName(i.hashes[idx]),
Value: fmt.Sprintf("%+x", hasher.Sum(nil)), Value: fmt.Sprintf("%+x", hasher.Sum(nil)),
} }
} }

View File

@ -1,25 +1,23 @@
package file package filedigests
import ( import (
"crypto" "crypto"
"fmt" "fmt"
"github.com/stretchr/testify/require"
"io/ioutil" "io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/anchore/stereoscope/pkg/file" stereoscopeFile "github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/stereoscope/pkg/imagetest"
"github.com/anchore/syft/syft/file"
"github.com/stretchr/testify/assert"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
) )
func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Hash) map[source.Coordinates][]Digest { func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Hash) map[source.Coordinates][]file.Digest {
digests := make(map[source.Coordinates][]Digest) digests := make(map[source.Coordinates][]file.Digest)
for _, f := range files { for _, f := range files {
fh, err := os.Open(filepath.Join(root, f)) fh, err := os.Open(filepath.Join(root, f))
@ -33,15 +31,15 @@ func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Has
if len(b) == 0 { if len(b) == 0 {
// we don't keep digests for empty files // we don't keep digests for empty files
digests[source.NewLocation(f).Coordinates] = []Digest{} digests[source.NewLocation(f).Coordinates] = []file.Digest{}
continue continue
} }
for _, hash := range hashes { for _, hash := range hashes {
h := hash.New() h := hash.New()
h.Write(b) h.Write(b)
digests[source.NewLocation(f).Coordinates] = append(digests[source.NewLocation(f).Coordinates], Digest{ digests[source.NewLocation(f).Coordinates] = append(digests[source.NewLocation(f).Coordinates], file.Digest{
Algorithm: CleanDigestAlgorithmName(hash.String()), Algorithm: file.CleanDigestAlgorithmName(hash.String()),
Value: fmt.Sprintf("%x", h.Sum(nil)), Value: fmt.Sprintf("%x", h.Sum(nil)),
}) })
} }
@ -56,7 +54,7 @@ func TestDigestsCataloger(t *testing.T) {
name string name string
digests []crypto.Hash digests []crypto.Hash
files []string files []string
expected map[source.Coordinates][]Digest expected map[source.Coordinates][]file.Digest
}{ }{
{ {
name: "md5", name: "md5",
@ -74,7 +72,7 @@ func TestDigestsCataloger(t *testing.T) {
for _, test := range tests { for _, test := range tests {
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
c, err := NewDigestsCataloger(test.digests) c, err := NewCataloger(test.digests)
require.NoError(t, err) require.NoError(t, err)
src, err := source.NewFromDirectory("test-fixtures/last/") src, err := source.NewFromDirectory("test-fixtures/last/")
@ -92,14 +90,7 @@ func TestDigestsCataloger(t *testing.T) {
} }
func TestDigestsCataloger_MixFileTypes(t *testing.T) { func TestDigestsCataloger_MixFileTypes(t *testing.T) {
testImage := "image-file-type-mix" img := imagetest.GetFixtureImage(t, "docker-archive", "image-file-type-mix")
if *updateImageGoldenFiles {
imagetest.UpdateGoldenFixtureImage(t, testImage)
}
img := imagetest.GetGoldenFixtureImage(t, testImage)
src, err := source.NewFromImage(img, "---") src, err := source.NewFromImage(img, "---")
if err != nil { if err != nil {
t.Fatalf("could not create source: %+v", err) t.Fatalf("could not create source: %+v", err)
@ -140,7 +131,7 @@ func TestDigestsCataloger_MixFileTypes(t *testing.T) {
for _, test := range tests { for _, test := range tests {
t.Run(test.path, func(t *testing.T) { t.Run(test.path, func(t *testing.T) {
c, err := NewDigestsCataloger([]crypto.Hash{crypto.MD5}) c, err := NewCataloger([]crypto.Hash{crypto.MD5})
if err != nil { if err != nil {
t.Fatalf("unable to get cataloger: %+v", err) t.Fatalf("unable to get cataloger: %+v", err)
} }
@ -150,7 +141,7 @@ func TestDigestsCataloger_MixFileTypes(t *testing.T) {
t.Fatalf("could not catalog: %+v", err) t.Fatalf("could not catalog: %+v", err)
} }
_, ref, err := img.SquashedTree().File(file.Path(test.path)) _, ref, err := img.SquashedTree().File(stereoscopeFile.Path(test.path))
if err != nil { if err != nil {
t.Fatalf("unable to get file=%q : %+v", test.path, err) t.Fatalf("unable to get file=%q : %+v", test.path, err)
} }

View File

@ -4,7 +4,6 @@ ADD file-1.txt .
RUN chmod 644 file-1.txt RUN chmod 644 file-1.txt
RUN chown 1:2 file-1.txt RUN chown 1:2 file-1.txt
RUN ln -s file-1.txt symlink-1 RUN ln -s file-1.txt symlink-1
# note: hard links may behave inconsistently, this should be a golden image
RUN ln file-1.txt hardlink-1 RUN ln file-1.txt hardlink-1
RUN mknod char-device-1 c 89 1 RUN mknod char-device-1 c 89 1
RUN mknod block-device-1 b 0 1 RUN mknod block-device-1 b 0 1

View File

@ -0,0 +1 @@
test-fixtures/last/path.txt file contents!

View File

@ -1,4 +1,4 @@
package file package filemetadata
import ( import (
"github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/bus"
@ -9,14 +9,14 @@ import (
"github.com/wagoodman/go-progress" "github.com/wagoodman/go-progress"
) )
type MetadataCataloger struct { type Cataloger struct {
} }
func NewMetadataCataloger() *MetadataCataloger { func NewCataloger() *Cataloger {
return &MetadataCataloger{} return &Cataloger{}
} }
func (i *MetadataCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]source.FileMetadata, error) { func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]source.FileMetadata, error) {
results := make(map[source.Coordinates]source.FileMetadata) results := make(map[source.Coordinates]source.FileMetadata)
var locations []source.Location var locations []source.Location
for location := range resolver.AllLocations() { for location := range resolver.AllLocations() {

View File

@ -1,7 +1,6 @@
package file package filemetadata
import ( import (
"flag"
"os" "os"
"testing" "testing"
@ -11,18 +10,10 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
var updateImageGoldenFiles = flag.Bool("update-image", false, "update the golden fixture images used for testing")
func TestFileMetadataCataloger(t *testing.T) { func TestFileMetadataCataloger(t *testing.T) {
testImage := "image-file-type-mix" img := imagetest.GetFixtureImage(t, "docker-archive", "image-file-type-mix")
if *updateImageGoldenFiles { c := NewCataloger()
imagetest.UpdateGoldenFixtureImage(t, testImage)
}
img := imagetest.GetGoldenFixtureImage(t, testImage)
c := NewMetadataCataloger()
src, err := source.NewFromImage(img, "---") src, err := source.NewFromImage(img, "---")
if err != nil { if err != nil {

View File

@ -0,0 +1,12 @@
FROM busybox:latest
ADD file-1.txt .
RUN chmod 644 file-1.txt
RUN chown 1:2 file-1.txt
RUN ln -s file-1.txt symlink-1
RUN ln file-1.txt hardlink-1
RUN mknod char-device-1 c 89 1
RUN mknod block-device-1 b 0 1
RUN mknod fifo-1 p
RUN mkdir /dir
RUN rm -rf home etc/group etc/localtime etc/mtab etc/network etc/passwd etc/shadow var usr bin/*

View File

@ -1,23 +1,22 @@
package file package secrets
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/event/monitor"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
"io" "io"
"io/ioutil" "io/ioutil"
"regexp" "regexp"
"sort" "sort"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/event/monitor"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
) )
var DefaultSecretsPatterns = map[string]string{ var DefaultSecretsPatterns = map[string]string{
@ -28,38 +27,38 @@ var DefaultSecretsPatterns = map[string]string{
"generic-api-key": `(?i)api(-|_)?key["'=:\s]*?(?P<value>[A-Z0-9]{20,60})["']?(\s|$)`, "generic-api-key": `(?i)api(-|_)?key["'=:\s]*?(?P<value>[A-Z0-9]{20,60})["']?(\s|$)`,
} }
type SecretsCatalogerConfig struct { type CatalogerConfig struct {
Patterns map[string]*regexp.Regexp Patterns map[string]*regexp.Regexp
RevealValues bool RevealValues bool
MaxFileSize int64 MaxFileSize int64
} }
type SecretsCataloger struct { type Cataloger struct {
config SecretsCatalogerConfig config CatalogerConfig
} }
func DefaultSecretsCatalogerConfig() SecretsCatalogerConfig { func DefaultCatalogerConfig() CatalogerConfig {
patterns, err := GenerateSearchPatterns(DefaultSecretsPatterns, nil, nil) patterns, err := file.GenerateSearchPatterns(DefaultSecretsPatterns, nil, nil)
if err != nil { if err != nil {
patterns = make(map[string]*regexp.Regexp) patterns = make(map[string]*regexp.Regexp)
log.Errorf("unable to create default secrets config: %w", err) log.Errorf("unable to create default secrets config: %w", err)
} }
return SecretsCatalogerConfig{ return CatalogerConfig{
Patterns: patterns, Patterns: patterns,
RevealValues: false, RevealValues: false,
MaxFileSize: 1 * file.MB, MaxFileSize: 1 * file.MB,
} }
} }
func NewSecretsCataloger(config SecretsCatalogerConfig) (*SecretsCataloger, error) { func NewCataloger(config CatalogerConfig) (*Cataloger, error) {
return &SecretsCataloger{ return &Cataloger{
config: config, config: config,
}, nil }, nil
} }
func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]SearchResult, error) { func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]file.SearchResult, error) {
results := make(map[source.Coordinates][]SearchResult) results := make(map[source.Coordinates][]file.SearchResult)
locations := allRegularFiles(resolver) locations := source.AllRegularFiles(resolver)
stage, prog, secretsDiscovered := newSecretsCatalogerMonitor(int64(len(locations))) stage, prog, secretsDiscovered := newSecretsCatalogerMonitor(int64(len(locations)))
for _, location := range locations { for _, location := range locations {
stage.Current = location.RealPath stage.Current = location.RealPath
@ -83,7 +82,7 @@ func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Coo
return results, nil return results, nil
} }
func (i *SecretsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]SearchResult, error) { func (i *Cataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]file.SearchResult, error) {
metadata, err := resolver.FileMetadataByLocation(location) metadata, err := resolver.FileMetadataByLocation(location)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -1,13 +1,11 @@
package file package secrets
import ( import (
"regexp" "regexp"
"testing" "testing"
"github.com/anchore/syft/internal/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -18,7 +16,7 @@ func TestSecretsCataloger(t *testing.T) {
reveal bool reveal bool
maxSize int64 maxSize int64
patterns map[string]string patterns map[string]string
expected []SearchResult expected []file.SearchResult
constructorErr bool constructorErr bool
catalogErr bool catalogErr bool
}{ }{
@ -29,7 +27,7 @@ func TestSecretsCataloger(t *testing.T) {
patterns: map[string]string{ patterns: map[string]string{
"simple-secret-key": `^secret_key=.*`, "simple-secret-key": `^secret_key=.*`,
}, },
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "simple-secret-key", Classification: "simple-secret-key",
LineNumber: 2, LineNumber: 2,
@ -47,7 +45,7 @@ func TestSecretsCataloger(t *testing.T) {
patterns: map[string]string{ patterns: map[string]string{
"simple-secret-key": `^secret_key=.*`, "simple-secret-key": `^secret_key=.*`,
}, },
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "simple-secret-key", Classification: "simple-secret-key",
LineNumber: 2, LineNumber: 2,
@ -65,7 +63,7 @@ func TestSecretsCataloger(t *testing.T) {
patterns: map[string]string{ patterns: map[string]string{
"simple-secret-key": `^secret_key=(?P<value>.*)`, "simple-secret-key": `^secret_key=(?P<value>.*)`,
}, },
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "simple-secret-key", Classification: "simple-secret-key",
LineNumber: 2, LineNumber: 2,
@ -83,7 +81,7 @@ func TestSecretsCataloger(t *testing.T) {
patterns: map[string]string{ patterns: map[string]string{
"simple-secret-key": `secret_key=.*`, "simple-secret-key": `secret_key=.*`,
}, },
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "simple-secret-key", Classification: "simple-secret-key",
LineNumber: 1, LineNumber: 1,
@ -126,7 +124,7 @@ func TestSecretsCataloger(t *testing.T) {
patterns: map[string]string{ patterns: map[string]string{
"simple-secret-key": `secret_key=(?P<value>.*)`, "simple-secret-key": `secret_key=(?P<value>.*)`,
}, },
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "simple-secret-key", Classification: "simple-secret-key",
LineNumber: 1, LineNumber: 1,
@ -177,7 +175,11 @@ func TestSecretsCataloger(t *testing.T) {
regexObjs[name] = obj regexObjs[name] = obj
} }
c, err := NewSecretsCataloger(regexObjs, test.reveal, test.maxSize) c, err := NewCataloger(CatalogerConfig{
Patterns: regexObjs,
RevealValues: test.reveal,
MaxFileSize: test.maxSize,
})
if err != nil && !test.constructorErr { if err != nil && !test.constructorErr {
t.Fatalf("could not create cataloger (but should have been able to): %+v", err) t.Fatalf("could not create cataloger (but should have been able to): %+v", err)
} else if err == nil && test.constructorErr { } else if err == nil && test.constructorErr {
@ -208,18 +210,18 @@ func TestSecretsCataloger(t *testing.T) {
} }
func TestSecretsCataloger_DefaultSecrets(t *testing.T) { func TestSecretsCataloger_DefaultSecrets(t *testing.T) {
regexObjs, err := GenerateSearchPatterns(DefaultSecretsPatterns, nil, nil) regexObjs, err := file.GenerateSearchPatterns(DefaultSecretsPatterns, nil, nil)
if err != nil { if err != nil {
t.Fatalf("unable to get patterns: %+v", err) t.Fatalf("unable to get patterns: %+v", err)
} }
tests := []struct { tests := []struct {
fixture string fixture string
expected []SearchResult expected []file.SearchResult
}{ }{
{ {
fixture: "test-fixtures/secrets/default/aws.env", fixture: "test-fixtures/secrets/default/aws.env",
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "aws-access-key", Classification: "aws-access-key",
LineNumber: 2, LineNumber: 2,
@ -240,7 +242,7 @@ func TestSecretsCataloger_DefaultSecrets(t *testing.T) {
}, },
{ {
fixture: "test-fixtures/secrets/default/aws.ini", fixture: "test-fixtures/secrets/default/aws.ini",
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "aws-access-key", Classification: "aws-access-key",
LineNumber: 3, LineNumber: 3,
@ -261,7 +263,7 @@ func TestSecretsCataloger_DefaultSecrets(t *testing.T) {
}, },
{ {
fixture: "test-fixtures/secrets/default/private-key.pem", fixture: "test-fixtures/secrets/default/private-key.pem",
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "pem-private-key", Classification: "pem-private-key",
LineNumber: 2, LineNumber: 2,
@ -281,7 +283,7 @@ z3P668YfhUbKdRF6S42Cg6zn
}, },
{ {
fixture: "test-fixtures/secrets/default/private-key-openssl.pem", fixture: "test-fixtures/secrets/default/private-key-openssl.pem",
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "pem-private-key", Classification: "pem-private-key",
LineNumber: 2, LineNumber: 2,
@ -303,7 +305,7 @@ z3P668YfhUbKdRF6S42Cg6zn
// note: this test proves that the PEM regex matches the smallest possible match // note: this test proves that the PEM regex matches the smallest possible match
// since the test catches two adjacent secrets // since the test catches two adjacent secrets
fixture: "test-fixtures/secrets/default/private-keys.pem", fixture: "test-fixtures/secrets/default/private-keys.pem",
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "pem-private-key", Classification: "pem-private-key",
LineNumber: 1, LineNumber: 1,
@ -346,7 +348,7 @@ j4f668YfhUbKdRF6S6734856
// 2. a named capture group with the correct line number and line offset case // 2. a named capture group with the correct line number and line offset case
// 3. the named capture group is in a different line than the match start, and both the match start and the capture group have different line offsets // 3. the named capture group is in a different line than the match start, and both the match start and the capture group have different line offsets
fixture: "test-fixtures/secrets/default/docker-config.json", fixture: "test-fixtures/secrets/default/docker-config.json",
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "docker-config-auth", Classification: "docker-config-auth",
LineNumber: 5, LineNumber: 5,
@ -363,7 +365,7 @@ j4f668YfhUbKdRF6S6734856
}, },
{ {
fixture: "test-fixtures/secrets/default/api-key.txt", fixture: "test-fixtures/secrets/default/api-key.txt",
expected: []SearchResult{ expected: []file.SearchResult{
{ {
Classification: "generic-api-key", Classification: "generic-api-key",
LineNumber: 2, LineNumber: 2,
@ -419,7 +421,11 @@ j4f668YfhUbKdRF6S6734856
for _, test := range tests { for _, test := range tests {
t.Run(test.fixture, func(t *testing.T) { t.Run(test.fixture, func(t *testing.T) {
c, err := NewSecretsCataloger(regexObjs, true, 10*file.MB) c, err := NewCataloger(CatalogerConfig{
Patterns: regexObjs,
RevealValues: true,
MaxFileSize: 10 * file.MB,
})
if err != nil { if err != nil {
t.Fatalf("could not create cataloger: %+v", err) t.Fatalf("could not create cataloger: %+v", err)
} }

View File

@ -1,4 +1,4 @@
package file package secrets
import "io" import "io"

View File

@ -1,4 +1,4 @@
package file package secrets
import ( import (
"bufio" "bufio"

View File

@ -1,9 +1,10 @@
package file package secrets
import ( import (
"bufio" "bufio"
"errors" "errors"
"fmt" "fmt"
"github.com/anchore/syft/syft/file"
"io" "io"
"io/ioutil" "io/ioutil"
"regexp" "regexp"
@ -13,7 +14,7 @@ import (
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
func catalogLocationByLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp) ([]SearchResult, error) { func catalogLocationByLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp) ([]file.SearchResult, error) {
readCloser, err := resolver.FileContentsByLocation(location) readCloser, err := resolver.FileContentsByLocation(location)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err) return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
@ -22,7 +23,7 @@ func catalogLocationByLine(resolver source.FileResolver, location source.Locatio
var scanner = bufio.NewReader(readCloser) var scanner = bufio.NewReader(readCloser)
var position int64 var position int64
var allSecrets []SearchResult var allSecrets []file.SearchResult
var lineNo int64 var lineNo int64
var readErr error var readErr error
for !errors.Is(readErr, io.EOF) { for !errors.Is(readErr, io.EOF) {
@ -45,8 +46,8 @@ func catalogLocationByLine(resolver source.FileResolver, location source.Locatio
return allSecrets, nil return allSecrets, nil
} }
func searchForSecretsWithinLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]SearchResult, error) { func searchForSecretsWithinLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]file.SearchResult, error) {
var secrets []SearchResult var secrets []file.SearchResult
for name, pattern := range patterns { for name, pattern := range patterns {
matches := pattern.FindAllIndex(line, -1) matches := pattern.FindAllIndex(line, -1)
for i, match := range matches { for i, match := range matches {
@ -91,7 +92,7 @@ func readerAtPosition(resolver source.FileResolver, location source.Location, se
return readCloser, nil return readCloser, nil
} }
func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *regexp.Regexp, lineNo, lineOffset, seekPosition int64) *SearchResult { func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *regexp.Regexp, lineNo, lineOffset, seekPosition int64) *file.SearchResult {
reader := &newlineCounter{RuneReader: bufio.NewReader(readCloser)} reader := &newlineCounter{RuneReader: bufio.NewReader(readCloser)}
positions := pattern.FindReaderSubmatchIndex(reader) positions := pattern.FindReaderSubmatchIndex(reader)
if len(positions) == 0 { if len(positions) == 0 {
@ -127,7 +128,7 @@ func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *r
lineOffsetOfSecret += lineOffset lineOffsetOfSecret += lineOffset
} }
return &SearchResult{ return &file.SearchResult{
Classification: name, Classification: name,
SeekPosition: start + seekPosition, SeekPosition: start + seekPosition,
Length: stop - start, Length: stop - start,

View File

@ -0,0 +1,6 @@
package file
type Classification struct {
Class string `json:"class"`
Metadata map[string]string `json:"metadata"`
}

View File

@ -11,6 +11,12 @@ import (
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
type Classifier struct {
Class string
FilepathPatterns []*regexp.Regexp
EvidencePatternTemplates []string
}
func DefaultClassifiers() []Classifier { func DefaultClassifiers() []Classifier {
return []Classifier{ return []Classifier{
{ {
@ -62,17 +68,6 @@ func DefaultClassifiers() []Classifier {
} }
} }
type Classifier struct {
Class string
FilepathPatterns []*regexp.Regexp
EvidencePatternTemplates []string
}
type Classification struct {
Class string `json:"class"`
Metadata map[string]string `json:"metadata"`
}
func (c Classifier) Classify(resolver source.FileResolver, location source.Location) (*Classification, error) { func (c Classifier) Classify(resolver source.FileResolver, location source.Location) (*Classification, error) {
doesFilepathMatch, filepathNamedGroupValues := filepathMatches(c.FilepathPatterns, location) doesFilepathMatch, filepathNamedGroupValues := filepathMatches(c.FilepathPatterns, location)
if !doesFilepathMatch { if !doesFilepathMatch {

View File

@ -10,6 +10,12 @@ type Opener struct {
path string path string
} }
func NewOpener(path string) Opener {
return Opener{
path: path,
}
}
// Open the stored path as a io.ReadCloser. // Open the stored path as a io.ReadCloser.
func (o Opener) Open() (io.ReadCloser, error) { func (o Opener) Open() (io.ReadCloser, error) {
return os.Open(o.path) return os.Open(o.path)

View File

@ -1 +0,0 @@
../real-root

View File

@ -2,15 +2,16 @@ package java
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/internal/archive"
"io" "io"
"path" "path"
"strings" "strings"
"github.com/anchore/syft/internal/file"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
// integrity check // integrity check
@ -35,7 +36,7 @@ var archiveFormatGlobs = []string{
} }
type archiveParser struct { type archiveParser struct {
fileManifest file.ZipFileManifest fileManifest archive.ZipFileManifest
virtualPath string virtualPath string
archivePath string archivePath string
contentPath string contentPath string
@ -74,7 +75,7 @@ func newJavaArchiveParser(virtualPath string, reader io.Reader, detectNested boo
return nil, cleanupFn, fmt.Errorf("unable to process java archive: %w", err) return nil, cleanupFn, fmt.Errorf("unable to process java archive: %w", err)
} }
fileManifest, err := file.NewZipFileManifest(archivePath) fileManifest, err := archive.NewZipFileManifest(archivePath)
if err != nil { if err != nil {
return nil, cleanupFn, fmt.Errorf("unable to read files from java archive: %w", err) return nil, cleanupFn, fmt.Errorf("unable to read files from java archive: %w", err)
} }
@ -144,7 +145,7 @@ func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) {
} }
// fetch the manifest file // fetch the manifest file
contents, err := file.ContentsFromZip(j.archivePath, manifestMatches...) contents, err := archive.ContentsFromZip(j.archivePath, manifestMatches...)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to extract java manifests (%s): %w", j.virtualPath, err) return nil, fmt.Errorf("unable to extract java manifests (%s): %w", j.virtualPath, err)
} }
@ -213,9 +214,9 @@ func (j *archiveParser) discoverPkgsFromNestedArchives(parentPkg *pkg.Package) (
// discoverPkgsFromZip finds Java archives within Java archives, returning all listed Java packages found and // discoverPkgsFromZip finds Java archives within Java archives, returning all listed Java packages found and
// associating each discovered package to the given parent package. // associating each discovered package to the given parent package.
func discoverPkgsFromZip(virtualPath, archivePath, contentPath string, fileManifest file.ZipFileManifest, parentPkg *pkg.Package) ([]*pkg.Package, []artifact.Relationship, error) { func discoverPkgsFromZip(virtualPath, archivePath, contentPath string, fileManifest archive.ZipFileManifest, parentPkg *pkg.Package) ([]*pkg.Package, []artifact.Relationship, error) {
// search and parse pom.properties files & fetch the contents // search and parse pom.properties files & fetch the contents
openers, err := file.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(archiveFormatGlobs...)...) openers, err := archive.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(archiveFormatGlobs...)...)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("unable to extract files from zip: %w", err) return nil, nil, fmt.Errorf("unable to extract files from zip: %w", err)
} }
@ -274,7 +275,7 @@ func discoverPkgsFromOpener(virtualPath, pathWithinArchive string, archiveOpener
} }
func pomPropertiesByParentPath(archivePath string, extractPaths []string, virtualPath string) (map[string]pkg.PomProperties, error) { func pomPropertiesByParentPath(archivePath string, extractPaths []string, virtualPath string) (map[string]pkg.PomProperties, error) {
contentsOfMavenPropertiesFiles, err := file.ContentsFromZip(archivePath, extractPaths...) contentsOfMavenPropertiesFiles, err := archive.ContentsFromZip(archivePath, extractPaths...)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to extract maven files: %w", err) return nil, fmt.Errorf("unable to extract maven files: %w", err)
} }
@ -302,7 +303,7 @@ func pomPropertiesByParentPath(archivePath string, extractPaths []string, virtua
} }
func pomProjectByParentPath(archivePath string, extractPaths []string, virtualPath string) (map[string]pkg.PomProject, error) { func pomProjectByParentPath(archivePath string, extractPaths []string, virtualPath string) (map[string]pkg.PomProject, error) {
contentsOfMavenProjectFiles, err := file.ContentsFromZip(archivePath, extractPaths...) contentsOfMavenProjectFiles, err := archive.ContentsFromZip(archivePath, extractPaths...)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to extract maven files: %w", err) return nil, fmt.Errorf("unable to extract maven files: %w", err)
} }

View File

@ -2,11 +2,10 @@ package java
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/internal/archive"
"github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
) )
@ -59,7 +58,7 @@ func parseTarWrappedJavaArchive(virtualPath string, reader io.Reader) ([]*pkg.Pa
} }
func discoverPkgsFromTar(virtualPath, archivePath, contentPath string) ([]*pkg.Package, []artifact.Relationship, error) { func discoverPkgsFromTar(virtualPath, archivePath, contentPath string) ([]*pkg.Package, []artifact.Relationship, error) {
openers, err := file.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...) openers, err := archive.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("unable to extract files from tar: %w", err) return nil, nil, fmt.Errorf("unable to extract files from tar: %w", err)
} }

View File

@ -2,11 +2,10 @@ package java
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/internal/archive"
"github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/generic"
"io" "io"
"github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
) )
@ -33,7 +32,7 @@ func parseZipWrappedJavaArchive(virtualPath string, reader io.Reader) ([]*pkg.Pa
// functions support zips with shell scripts prepended to the file. Specifically, the helpers use the central // functions support zips with shell scripts prepended to the file. Specifically, the helpers use the central
// header at the end of the file to determine where the beginning of the zip payload is (unlike the standard lib // header at the end of the file to determine where the beginning of the zip payload is (unlike the standard lib
// or archiver). // or archiver).
fileManifest, err := file.NewZipFileManifest(archivePath) fileManifest, err := archive.NewZipFileManifest(archivePath)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("unable to read files from java archive: %w", err) return nil, nil, fmt.Errorf("unable to read files from java archive: %w", err)
} }

View File

@ -3,11 +3,11 @@ package python
import ( import (
"bufio" "bufio"
"fmt" "fmt"
"github.com/anchore/syft/syft/file"
"io" "io"
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/anchore/syft/internal/file"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"

View File

@ -1,11 +1,10 @@
package file package source
import ( import (
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/source"
) )
func allRegularFiles(resolver source.FileResolver) (locations []source.Location) { func AllRegularFiles(resolver FileResolver) (locations []Location) {
for location := range resolver.AllLocations() { for location := range resolver.AllLocations() {
resolvedLocations, err := resolver.FilesByPath(location.RealPath) resolvedLocations, err := resolver.FilesByPath(location.RealPath)
if err != nil { if err != nil {
@ -20,7 +19,7 @@ func allRegularFiles(resolver source.FileResolver) (locations []source.Location)
continue continue
} }
if metadata.Type != source.RegularFile { if metadata.Type != RegularFile {
continue continue
} }
locations = append(locations, resolvedLocation) locations = append(locations, resolvedLocation)

View File

@ -1,8 +1,7 @@
package file package source
import ( import (
"github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/stereoscope/pkg/imagetest"
"github.com/anchore/syft/syft/source"
"github.com/scylladb/go-set/strset" "github.com/scylladb/go-set/strset"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -16,25 +15,19 @@ func Test_allRegularFiles(t *testing.T) {
} }
tests := []struct { tests := []struct {
name string name string
setup func() source.FileResolver setup func() FileResolver
wantRealPaths *strset.Set wantRealPaths *strset.Set
wantVirtualPaths *strset.Set wantVirtualPaths *strset.Set
}{ }{
{ {
name: "image", name: "image",
setup: func() source.FileResolver { setup: func() FileResolver {
testImage := "image-file-type-mix" img := imagetest.GetFixtureImage(t, "docker-archive", "image-file-type-mix")
if *updateImageGoldenFiles { s, err := NewFromImage(img, "---")
imagetest.UpdateGoldenFixtureImage(t, testImage)
}
img := imagetest.GetGoldenFixtureImage(t, testImage)
s, err := source.NewFromImage(img, "---")
require.NoError(t, err) require.NoError(t, err)
r, err := s.FileResolver(source.SquashedScope) r, err := s.FileResolver(SquashedScope)
require.NoError(t, err) require.NoError(t, err)
return r return r
@ -44,10 +37,10 @@ func Test_allRegularFiles(t *testing.T) {
}, },
{ {
name: "directory", name: "directory",
setup: func() source.FileResolver { setup: func() FileResolver {
s, err := source.NewFromDirectory("test-fixtures/symlinked-root/nested/link-root") s, err := NewFromDirectory("test-fixtures/symlinked-root/nested/link-root")
require.NoError(t, err) require.NoError(t, err)
r, err := s.FileResolver(source.SquashedScope) r, err := s.FileResolver(SquashedScope)
require.NoError(t, err) require.NoError(t, err)
return r return r
}, },
@ -58,7 +51,7 @@ func Test_allRegularFiles(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
resolver := tt.setup() resolver := tt.setup()
locations := allRegularFiles(resolver) locations := AllRegularFiles(resolver)
realLocations := strset.New() realLocations := strset.New()
virtualLocations := strset.New() virtualLocations := strset.New()
for _, l := range locations { for _, l := range locations {

View File

@ -0,0 +1,12 @@
FROM busybox:latest
ADD file-1.txt .
RUN chmod 644 file-1.txt
RUN chown 1:2 file-1.txt
RUN ln -s file-1.txt symlink-1
RUN ln file-1.txt hardlink-1
RUN mknod char-device-1 c 89 1
RUN mknod block-device-1 b 0 1
RUN mknod fifo-1 p
RUN mkdir /dir
RUN rm -rf home etc/group etc/localtime etc/mtab etc/network etc/passwd etc/shadow var usr bin/*

View File

@ -0,0 +1 @@
file 1!

View File

@ -3,7 +3,11 @@ package syft
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file/cataloger/fileclassifier"
"github.com/anchore/syft/syft/file/cataloger/filecontents"
"github.com/anchore/syft/syft/file/cataloger/filedigests"
"github.com/anchore/syft/syft/file/cataloger/filemetadata"
"github.com/anchore/syft/syft/file/cataloger/secrets"
"github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/linux"
"github.com/anchore/syft/syft/pkg/cataloger/packages" "github.com/anchore/syft/syft/pkg/cataloger/packages"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
@ -13,7 +17,7 @@ import (
type task func(*sbom.Artifacts, *source.Source) ([]artifact.Relationship, error) type task func(*sbom.Artifacts, *source.Source) ([]artifact.Relationship, error)
type taskGenerator func(CatalogingConfig) (task, error) type taskGenerator func(CatalogingConfig) (task, error)
func generateCatalogPackagesTask(config CatalogingConfig) (task, error) { func generatePackagesCatalogingTask(config CatalogingConfig) (task, error) {
if len(config.PackageCatalogers) == 0 { if len(config.PackageCatalogers) == 0 {
return nil, nil return nil, nil
} }
@ -38,12 +42,12 @@ func generateCatalogPackagesTask(config CatalogingConfig) (task, error) {
}, nil }, nil
} }
func generateCatalogFileMetadataTask(config CatalogingConfig) (task, error) { func generateFileMetadataCatalogingTask(config CatalogingConfig) (task, error) {
if !config.CaptureFileMetadata { if !config.CaptureFileMetadata {
return nil, nil return nil, nil
} }
metadataCataloger := file.NewMetadataCataloger() cataloger := filemetadata.NewCataloger()
return func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) { return func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(config.Scope) resolver, err := src.FileResolver(config.Scope)
@ -51,7 +55,7 @@ func generateCatalogFileMetadataTask(config CatalogingConfig) (task, error) {
return nil, err return nil, err
} }
result, err := metadataCataloger.Catalog(resolver) result, err := cataloger.Catalog(resolver)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -61,12 +65,12 @@ func generateCatalogFileMetadataTask(config CatalogingConfig) (task, error) {
} }
func generateCatalogFileDigestsTask(config CatalogingConfig) (task, error) { func generateFileDigestsCatalogingTask(config CatalogingConfig) (task, error) {
if len(config.DigestHashes) == 0 { if len(config.DigestHashes) == 0 {
return nil, nil return nil, nil
} }
digestsCataloger, err := file.NewDigestsCataloger(config.DigestHashes) cataloger, err := filedigests.NewCataloger(config.DigestHashes)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -77,7 +81,7 @@ func generateCatalogFileDigestsTask(config CatalogingConfig) (task, error) {
return nil, err return nil, err
} }
result, err := digestsCataloger.Catalog(resolver) result, err := cataloger.Catalog(resolver)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -87,12 +91,12 @@ func generateCatalogFileDigestsTask(config CatalogingConfig) (task, error) {
} }
func generateCatalogContentsTask(config CatalogingConfig) (task, error) { func generateContentsCatalogingTask(config CatalogingConfig) (task, error) {
if len(config.ContentsConfig.Globs) > 0 { if len(config.ContentsConfig.Globs) == 0 {
return nil, nil return nil, nil
} }
contentsCataloger, err := file.NewContentsCataloger(config.ContentsConfig) cataloger, err := filecontents.NewCataloger(config.ContentsConfig)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -103,7 +107,7 @@ func generateCatalogContentsTask(config CatalogingConfig) (task, error) {
return nil, err return nil, err
} }
result, err := contentsCataloger.Catalog(resolver) result, err := cataloger.Catalog(resolver)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -112,12 +116,12 @@ func generateCatalogContentsTask(config CatalogingConfig) (task, error) {
}, nil }, nil
} }
func generateCatalogSecretsTask(config CatalogingConfig) (task, error) { func generateSecretsCatalogingTask(config CatalogingConfig) (task, error) {
if !config.CaptureSecrets { if !config.CaptureSecrets {
return nil, nil return nil, nil
} }
secretsCataloger, err := file.NewSecretsCataloger(config.SecretsConfig) cataloger, err := secrets.NewCataloger(config.SecretsConfig)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -128,7 +132,7 @@ func generateCatalogSecretsTask(config CatalogingConfig) (task, error) {
return nil, err return nil, err
} }
result, err := secretsCataloger.Catalog(resolver) result, err := cataloger.Catalog(resolver)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -138,12 +142,12 @@ func generateCatalogSecretsTask(config CatalogingConfig) (task, error) {
} }
func generateCatalogFileClassificationsTask(config CatalogingConfig) (task, error) { func generateFileClassifierTask(config CatalogingConfig) (task, error) {
if !config.ClassifyFiles { if !config.ClassifyFiles {
return nil, nil return nil, nil
} }
classifierCataloger, err := file.NewClassificationCataloger(config.FileClassifiers) cataloger, err := fileclassifier.NewCataloger(config.FileClassifiers)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -154,7 +158,7 @@ func generateCatalogFileClassificationsTask(config CatalogingConfig) (task, erro
return nil, err return nil, err
} }
result, err := classifierCataloger.Catalog(resolver) result, err := cataloger.Catalog(resolver)
if err != nil { if err != nil {
return nil, err return nil, err
} }