mirror of
https://github.com/anchore/syft.git
synced 2025-11-17 16:33:21 +01:00
keep file catalogers separate from file-related definitions
Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
parent
3308079158
commit
b3ca75646c
@ -1,12 +1,14 @@
|
||||
package file
|
||||
package archive
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/anchore/syft/syft/file"
|
||||
)
|
||||
|
||||
const perFileReadLimit = 2 * GB
|
||||
const perFileReadLimit = 2 * file.GB
|
||||
|
||||
// safeCopy limits the copy from the reader. This is useful when extracting files from archives to
|
||||
// protect against decompression bomb attacks.
|
||||
@ -1,38 +1,39 @@
|
||||
package file
|
||||
package archive
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/mholt/archiver/v3"
|
||||
)
|
||||
|
||||
// ExtractGlobsFromTarToUniqueTempFile extracts paths matching the given globs within the given archive to a temporary directory, returning file openers for each file extracted.
|
||||
func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...string) (map[string]Opener, error) {
|
||||
results := make(map[string]Opener)
|
||||
func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...string) (map[string]file.Opener, error) {
|
||||
results := make(map[string]file.Opener)
|
||||
|
||||
// don't allow for full traversal, only select traversal from given paths
|
||||
if len(globs) == 0 {
|
||||
return results, nil
|
||||
}
|
||||
|
||||
visitor := func(file archiver.File) error {
|
||||
defer file.Close()
|
||||
visitor := func(f archiver.File) error {
|
||||
defer f.Close()
|
||||
|
||||
// ignore directories
|
||||
if file.FileInfo.IsDir() {
|
||||
if f.FileInfo.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ignore any filename that doesn't match the given globs...
|
||||
if !matchesAnyGlob(file.Name(), globs...) {
|
||||
if !matchesAnyGlob(f.Name(), globs...) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// we have a file we want to extract....
|
||||
tempfilePrefix := filepath.Base(filepath.Clean(file.Name())) + "-"
|
||||
tempfilePrefix := filepath.Base(filepath.Clean(f.Name())) + "-"
|
||||
tempFile, err := ioutil.TempFile(dir, tempfilePrefix)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to create temp file: %w", err)
|
||||
@ -42,11 +43,11 @@ func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...strin
|
||||
// provides a ReadCloser. It is up to the caller to handle closing the file explicitly.
|
||||
defer tempFile.Close()
|
||||
|
||||
if err := safeCopy(tempFile, file.ReadCloser); err != nil {
|
||||
return fmt.Errorf("unable to copy source=%q for tar=%q: %w", file.Name(), archivePath, err)
|
||||
if err := safeCopy(tempFile, f.ReadCloser); err != nil {
|
||||
return fmt.Errorf("unable to copy source=%q for tar=%q: %w", f.Name(), archivePath, err)
|
||||
}
|
||||
|
||||
results[file.Name()] = Opener{path: tempFile.Name()}
|
||||
results[f.Name()] = file.NewOpener(tempFile.Name())
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -1,4 +1,4 @@
|
||||
package file
|
||||
package archive
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
@ -1,7 +1,8 @@
|
||||
package file
|
||||
package archive
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
@ -48,7 +49,7 @@ func (z ZipFileManifest) GlobMatch(patterns ...string) []string {
|
||||
// so that glob logic is consistent inside and outside of ZIP archives
|
||||
normalizedEntry := normalizeZipEntryName(entry)
|
||||
|
||||
if GlobMatch(pattern, normalizedEntry) {
|
||||
if file.GlobMatch(pattern, normalizedEntry) {
|
||||
uniqueMatches.Add(entry)
|
||||
}
|
||||
}
|
||||
@ -1,7 +1,7 @@
|
||||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
package file
|
||||
package archive
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
@ -1,9 +1,10 @@
|
||||
package file
|
||||
package archive
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@ -12,14 +13,6 @@ import (
|
||||
"github.com/anchore/syft/internal/log"
|
||||
)
|
||||
|
||||
const (
|
||||
// represents the order of bytes
|
||||
_ = iota
|
||||
KB = 1 << (10 * iota)
|
||||
MB
|
||||
GB
|
||||
)
|
||||
|
||||
type errZipSlipDetected struct {
|
||||
Prefix string
|
||||
JoinArgs []string
|
||||
@ -71,16 +64,16 @@ func TraverseFilesInZip(archivePath string, visitor func(*zip.File) error, paths
|
||||
}
|
||||
|
||||
// ExtractFromZipToUniqueTempFile extracts select paths for the given archive to a temporary directory, returning file openers for each file extracted.
|
||||
func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (map[string]Opener, error) {
|
||||
results := make(map[string]Opener)
|
||||
func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (map[string]file.Opener, error) {
|
||||
results := make(map[string]file.Opener)
|
||||
|
||||
// don't allow for full traversal, only select traversal from given paths
|
||||
if len(paths) == 0 {
|
||||
return results, nil
|
||||
}
|
||||
|
||||
visitor := func(file *zip.File) error {
|
||||
tempfilePrefix := filepath.Base(filepath.Clean(file.Name)) + "-"
|
||||
visitor := func(f *zip.File) error {
|
||||
tempfilePrefix := filepath.Base(filepath.Clean(f.Name)) + "-"
|
||||
|
||||
tempFile, err := ioutil.TempFile(dir, tempfilePrefix)
|
||||
if err != nil {
|
||||
@ -91,26 +84,26 @@ func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (m
|
||||
// provides a ReadCloser. It is up to the caller to handle closing the file explicitly.
|
||||
defer tempFile.Close()
|
||||
|
||||
zippedFile, err := file.Open()
|
||||
zippedFile, err := f.Open()
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to read file=%q from zip=%q: %w", file.Name, archivePath, err)
|
||||
return fmt.Errorf("unable to read file=%q from zip=%q: %w", f.Name, archivePath, err)
|
||||
}
|
||||
defer func() {
|
||||
err := zippedFile.Close()
|
||||
if err != nil {
|
||||
log.Errorf("unable to close source file=%q from zip=%q: %+v", file.Name, archivePath, err)
|
||||
log.Errorf("unable to close source file=%q from zip=%q: %+v", f.Name, archivePath, err)
|
||||
}
|
||||
}()
|
||||
|
||||
if file.FileInfo().IsDir() {
|
||||
return fmt.Errorf("unable to extract directories, only files: %s", file.Name)
|
||||
if f.FileInfo().IsDir() {
|
||||
return fmt.Errorf("unable to extract directories, only files: %s", f.Name)
|
||||
}
|
||||
|
||||
if err := safeCopy(tempFile, zippedFile); err != nil {
|
||||
return fmt.Errorf("unable to copy source=%q for zip=%q: %w", file.Name, archivePath, err)
|
||||
return fmt.Errorf("unable to copy source=%q for zip=%q: %w", f.Name, archivePath, err)
|
||||
}
|
||||
|
||||
results[file.Name] = Opener{path: tempFile.Name()}
|
||||
results[f.Name] = file.NewOpener(tempFile.Name())
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -1,7 +1,7 @@
|
||||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
package file
|
||||
package archive
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
@ -1,4 +1,4 @@
|
||||
package file
|
||||
package archive
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
@ -1,7 +1,7 @@
|
||||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
package file
|
||||
package archive
|
||||
|
||||
import (
|
||||
"os"
|
||||
@ -45,7 +45,7 @@ type Application struct {
|
||||
FileMetadata FileMetadata `yaml:"file-metadata" json:"file-metadata" mapstructure:"file-metadata"`
|
||||
FileClassification fileClassification `yaml:"file-classification" json:"file-classification" mapstructure:"file-classification"`
|
||||
FileContents fileContents `yaml:"file-contents" json:"file-contents" mapstructure:"file-contents"`
|
||||
Secrets secrets `yaml:"secrets" json:"secrets" mapstructure:"secrets"`
|
||||
Secrets secretsCfg `yaml:"secrets" json:"secrets" mapstructure:"secrets"`
|
||||
Registry registry `yaml:"registry" json:"registry" mapstructure:"registry"`
|
||||
Exclusions []string `yaml:"exclude" json:"exclude" mapstructure:"exclude"`
|
||||
Attest attest `yaml:"attest" json:"attest" mapstructure:"attest"`
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
internalFile "github.com/anchore/syft/internal/file"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/file/cataloger/filecontents"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
@ -16,7 +16,7 @@ type fileContents struct {
|
||||
func (cfg fileContents) loadDefaultValues(v *viper.Viper) {
|
||||
v.SetDefault("file-contents.cataloger.enabled", catalogerEnabledDefault)
|
||||
v.SetDefault("file-contents.cataloger.scope", source.SquashedScope)
|
||||
v.SetDefault("file-contents.skip-files-above-size", 1*internalFile.MB)
|
||||
v.SetDefault("file-contents.skip-files-above-size", 1*file.MB)
|
||||
v.SetDefault("file-contents.globs", []string{})
|
||||
}
|
||||
|
||||
@ -24,8 +24,8 @@ func (cfg *fileContents) parseConfigValues() error {
|
||||
return cfg.Cataloger.parseConfigValues()
|
||||
}
|
||||
|
||||
func (cfg fileContents) ToConfig() file.ContentsCatalogerConfig {
|
||||
return file.ContentsCatalogerConfig{
|
||||
func (cfg fileContents) ToConfig() filecontents.CatalogerConfig {
|
||||
return filecontents.CatalogerConfig{
|
||||
Globs: cfg.Globs,
|
||||
SkipFilesAboveSizeInBytes: cfg.SkipFilesAboveSize,
|
||||
}
|
||||
|
||||
@ -2,13 +2,13 @@ package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
internalFile "github.com/anchore/syft/internal/file"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/file/cataloger/secrets"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
type secrets struct {
|
||||
type secretsCfg struct {
|
||||
Cataloger catalogerOptions `yaml:"cataloger" json:"cataloger" mapstructure:"cataloger"`
|
||||
AdditionalPatterns map[string]string `yaml:"additional-patterns" json:"additional-patterns" mapstructure:"additional-patterns"`
|
||||
ExcludePatternNames []string `yaml:"exclude-pattern-names" json:"exclude-pattern-names" mapstructure:"exclude-pattern-names"`
|
||||
@ -16,25 +16,25 @@ type secrets struct {
|
||||
SkipFilesAboveSize int64 `yaml:"skip-files-above-size" json:"skip-files-above-size" mapstructure:"skip-files-above-size"`
|
||||
}
|
||||
|
||||
func (cfg secrets) loadDefaultValues(v *viper.Viper) {
|
||||
func (cfg secretsCfg) loadDefaultValues(v *viper.Viper) {
|
||||
v.SetDefault("secrets.cataloger.enabled", catalogerEnabledDefault)
|
||||
v.SetDefault("secrets.cataloger.scope", source.AllLayersScope)
|
||||
v.SetDefault("secrets.reveal-values", false)
|
||||
v.SetDefault("secrets.skip-files-above-size", 1*internalFile.MB)
|
||||
v.SetDefault("secrets.skip-files-above-size", 1*file.MB)
|
||||
v.SetDefault("secrets.additional-patterns", map[string]string{})
|
||||
v.SetDefault("secrets.exclude-pattern-names", []string{})
|
||||
}
|
||||
|
||||
func (cfg *secrets) parseConfigValues() error {
|
||||
func (cfg *secretsCfg) parseConfigValues() error {
|
||||
return cfg.Cataloger.parseConfigValues()
|
||||
}
|
||||
|
||||
func (cfg secrets) ToConfig() (*file.SecretsCatalogerConfig, error) {
|
||||
patterns, err := file.GenerateSearchPatterns(file.DefaultSecretsPatterns, cfg.AdditionalPatterns, cfg.ExcludePatternNames)
|
||||
func (cfg secretsCfg) ToConfig() (*secrets.CatalogerConfig, error) {
|
||||
patterns, err := file.GenerateSearchPatterns(secrets.DefaultSecretsPatterns, cfg.AdditionalPatterns, cfg.ExcludePatternNames)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to process secrets config patterns: %w", err)
|
||||
}
|
||||
return &file.SecretsCatalogerConfig{
|
||||
return &secrets.CatalogerConfig{
|
||||
Patterns: patterns,
|
||||
RevealValues: cfg.RevealValues,
|
||||
MaxFileSize: cfg.SkipFilesAboveSize,
|
||||
|
||||
@ -19,12 +19,12 @@ func Catalog(src *source.Source, options ...CatalogingOption) (*sbom.SBOM, error
|
||||
var tasks []task
|
||||
|
||||
generators := []taskGenerator{
|
||||
generateCatalogPackagesTask,
|
||||
generateCatalogFileMetadataTask,
|
||||
generateCatalogFileDigestsTask,
|
||||
generateCatalogSecretsTask,
|
||||
generateCatalogFileClassificationsTask,
|
||||
generateCatalogContentsTask,
|
||||
generatePackagesCatalogingTask,
|
||||
generateFileMetadataCatalogingTask,
|
||||
generateFileDigestsCatalogingTask,
|
||||
generateSecretsCatalogingTask,
|
||||
generateFileClassifierTask,
|
||||
generateContentsCatalogingTask,
|
||||
}
|
||||
|
||||
for _, generator := range generators {
|
||||
|
||||
@ -2,6 +2,8 @@ package syft
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"github.com/anchore/syft/syft/file/cataloger/filecontents"
|
||||
"github.com/anchore/syft/syft/file/cataloger/secrets"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/internal/version"
|
||||
@ -25,13 +27,13 @@ type CatalogingConfig struct {
|
||||
DigestHashes []crypto.Hash
|
||||
// secrets
|
||||
CaptureSecrets bool
|
||||
SecretsConfig file.SecretsCatalogerConfig
|
||||
SecretsConfig secrets.CatalogerConfig
|
||||
SecretsScope source.Scope
|
||||
// file classification
|
||||
ClassifyFiles bool
|
||||
FileClassifiers []file.Classifier
|
||||
// file contents
|
||||
ContentsConfig file.ContentsCatalogerConfig
|
||||
ContentsConfig filecontents.CatalogerConfig
|
||||
}
|
||||
|
||||
func DefaultCatalogingConfig() CatalogingConfig {
|
||||
@ -40,8 +42,8 @@ func DefaultCatalogingConfig() CatalogingConfig {
|
||||
ToolName: internal.ApplicationName,
|
||||
ToolVersion: version.Guess(),
|
||||
SecretsScope: source.AllLayersScope,
|
||||
SecretsConfig: file.DefaultSecretsCatalogerConfig(),
|
||||
SecretsConfig: secrets.DefaultCatalogerConfig(),
|
||||
FileClassifiers: file.DefaultClassifiers(),
|
||||
ContentsConfig: file.DefaultContentsCatalogerConfig(),
|
||||
ContentsConfig: filecontents.DefaultCatalogerConfig(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,6 +3,7 @@ package syft
|
||||
import (
|
||||
"crypto"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/file/cataloger/secrets"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/packages"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
@ -82,7 +83,7 @@ func WithFileDigests(hashes ...crypto.Hash) CatalogingOption {
|
||||
}
|
||||
}
|
||||
|
||||
func WithSecrets(secretConfig *file.SecretsCatalogerConfig) CatalogingOption {
|
||||
func WithSecrets(secretConfig *secrets.CatalogerConfig) CatalogingOption {
|
||||
return func(_ *source.Source, config *CatalogingConfig) error {
|
||||
config.CaptureSecrets = true
|
||||
if secretConfig != nil {
|
||||
|
||||
9
syft/file/byte_sizes.go
Normal file
9
syft/file/byte_sizes.go
Normal file
@ -0,0 +1,9 @@
|
||||
package file
|
||||
|
||||
const (
|
||||
// represents the order of bytes
|
||||
_ = iota
|
||||
KB = 1 << (10 * iota)
|
||||
MB
|
||||
GB
|
||||
)
|
||||
@ -1,25 +1,26 @@
|
||||
package file
|
||||
package fileclassifier
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
)
|
||||
|
||||
type ClassificationCataloger struct {
|
||||
classifiers []Classifier
|
||||
type Cataloger struct {
|
||||
classifiers []file.Classifier
|
||||
}
|
||||
|
||||
func NewClassificationCataloger(classifiers []Classifier) (*ClassificationCataloger, error) {
|
||||
return &ClassificationCataloger{
|
||||
func NewCataloger(classifiers []file.Classifier) (*Cataloger, error) {
|
||||
return &Cataloger{
|
||||
classifiers: classifiers,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (i *ClassificationCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]Classification, error) {
|
||||
results := make(map[source.Coordinates][]Classification)
|
||||
func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]file.Classification, error) {
|
||||
results := make(map[source.Coordinates][]file.Classification)
|
||||
|
||||
numResults := 0
|
||||
for _, location := range allRegularFiles(resolver) {
|
||||
for _, location := range source.AllRegularFiles(resolver) {
|
||||
for _, classifier := range i.classifiers {
|
||||
result, err := classifier.Classify(resolver, location)
|
||||
if err != nil {
|
||||
@ -1,9 +1,10 @@
|
||||
package file
|
||||
package fileclassifier
|
||||
|
||||
import (
|
||||
"github.com/anchore/stereoscope/pkg/imagetest"
|
||||
"testing"
|
||||
|
||||
"github.com/anchore/stereoscope/pkg/imagetest"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
@ -13,14 +14,14 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
|
||||
name string
|
||||
fixtureDir string
|
||||
location string
|
||||
expected []Classification
|
||||
expected []file.Classification
|
||||
expectedErr func(assert.TestingT, error, ...interface{}) bool
|
||||
}{
|
||||
{
|
||||
name: "positive-libpython3.7.so",
|
||||
fixtureDir: "test-fixtures/classifiers/positive",
|
||||
location: "libpython3.7.so",
|
||||
expected: []Classification{
|
||||
expected: []file.Classification{
|
||||
{
|
||||
Class: "python-binary",
|
||||
Metadata: map[string]string{
|
||||
@ -34,7 +35,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
|
||||
name: "positive-python3.6",
|
||||
fixtureDir: "test-fixtures/classifiers/positive",
|
||||
location: "python3.6",
|
||||
expected: []Classification{
|
||||
expected: []file.Classification{
|
||||
{
|
||||
Class: "python-binary",
|
||||
Metadata: map[string]string{
|
||||
@ -48,7 +49,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
|
||||
name: "positive-patchlevel.h",
|
||||
fixtureDir: "test-fixtures/classifiers/positive",
|
||||
location: "patchlevel.h",
|
||||
expected: []Classification{
|
||||
expected: []file.Classification{
|
||||
{
|
||||
Class: "cpython-source",
|
||||
Metadata: map[string]string{
|
||||
@ -62,7 +63,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
|
||||
name: "positive-go",
|
||||
fixtureDir: "test-fixtures/classifiers/positive",
|
||||
location: "go",
|
||||
expected: []Classification{
|
||||
expected: []file.Classification{
|
||||
{
|
||||
Class: "go-binary",
|
||||
Metadata: map[string]string{
|
||||
@ -76,7 +77,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
|
||||
name: "positive-go-hint",
|
||||
fixtureDir: "test-fixtures/classifiers/positive",
|
||||
location: "VERSION",
|
||||
expected: []Classification{
|
||||
expected: []file.Classification{
|
||||
{
|
||||
Class: "go-binary-hint",
|
||||
Metadata: map[string]string{
|
||||
@ -90,7 +91,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
|
||||
name: "positive-busybox",
|
||||
fixtureDir: "test-fixtures/classifiers/positive",
|
||||
location: "[", // note: busybox is a link to [
|
||||
expected: []Classification{
|
||||
expected: []file.Classification{
|
||||
{
|
||||
Class: "busybox-binary",
|
||||
Metadata: map[string]string{
|
||||
@ -105,7 +106,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
|
||||
c, err := NewClassificationCataloger(DefaultClassifiers)
|
||||
c, err := NewCataloger(file.DefaultClassifiers())
|
||||
test.expectedErr(t, err)
|
||||
|
||||
src, err := source.NewFromDirectory(test.fixtureDir)
|
||||
@ -138,14 +139,14 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases_Image(t *testing.T
|
||||
name string
|
||||
fixtureImage string
|
||||
location string
|
||||
expected []Classification
|
||||
expected []file.Classification
|
||||
expectedErr func(assert.TestingT, error, ...interface{}) bool
|
||||
}{
|
||||
{
|
||||
name: "busybox-regression",
|
||||
fixtureImage: "image-busybox",
|
||||
location: "/bin/[",
|
||||
expected: []Classification{
|
||||
expected: []file.Classification{
|
||||
{
|
||||
Class: "busybox-binary",
|
||||
Metadata: map[string]string{
|
||||
@ -160,7 +161,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases_Image(t *testing.T
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
|
||||
c, err := NewClassificationCataloger(DefaultClassifiers)
|
||||
c, err := NewCataloger(file.DefaultClassifiers())
|
||||
test.expectedErr(t, err)
|
||||
|
||||
img := imagetest.GetFixtureImage(t, "docker-archive", test.fixtureImage)
|
||||
@ -191,7 +192,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases_Image(t *testing.T
|
||||
|
||||
func TestClassifierCataloger_DefaultClassifiers_NegativeCases(t *testing.T) {
|
||||
|
||||
c, err := NewClassificationCataloger(DefaultClassifiers)
|
||||
c, err := NewCataloger(file.DefaultClassifiers())
|
||||
assert.NoError(t, err)
|
||||
|
||||
src, err := source.NewFromDirectory("test-fixtures/classifiers/negative")
|
||||
@ -1,41 +1,40 @@
|
||||
package file
|
||||
package filecontents
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"github.com/anchore/syft/internal/file"
|
||||
"io"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
)
|
||||
|
||||
type ContentsCatalogerConfig struct {
|
||||
type CatalogerConfig struct {
|
||||
Globs []string
|
||||
SkipFilesAboveSizeInBytes int64
|
||||
}
|
||||
|
||||
type ContentsCataloger struct {
|
||||
config ContentsCatalogerConfig
|
||||
type Cataloger struct {
|
||||
config CatalogerConfig
|
||||
}
|
||||
|
||||
func DefaultContentsCatalogerConfig() ContentsCatalogerConfig {
|
||||
return ContentsCatalogerConfig{
|
||||
func DefaultCatalogerConfig() CatalogerConfig {
|
||||
return CatalogerConfig{
|
||||
Globs: nil,
|
||||
SkipFilesAboveSizeInBytes: 1 * file.MB,
|
||||
}
|
||||
}
|
||||
|
||||
func NewContentsCataloger(config ContentsCatalogerConfig) (*ContentsCataloger, error) {
|
||||
return &ContentsCataloger{
|
||||
func NewCataloger(config CatalogerConfig) (*Cataloger, error) {
|
||||
return &Cataloger{
|
||||
config: config,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]string, error) {
|
||||
func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]string, error) {
|
||||
results := make(map[source.Coordinates]string)
|
||||
var locations []source.Location
|
||||
|
||||
@ -68,7 +67,7 @@ func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Co
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (i *ContentsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) (string, error) {
|
||||
func (i *Cataloger) catalogLocation(resolver source.FileResolver, location source.Location) (string, error) {
|
||||
contentReader, err := resolver.FileContentsByLocation(location)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@ -1,4 +1,4 @@
|
||||
package file
|
||||
package filecontents
|
||||
|
||||
import (
|
||||
"testing"
|
||||
@ -66,7 +66,10 @@ func TestContentsCataloger(t *testing.T) {
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
c, err := NewContentsCataloger(test.globs, test.maxSize)
|
||||
c, err := NewCataloger(CatalogerConfig{
|
||||
Globs: test.globs,
|
||||
SkipFilesAboveSizeInBytes: test.maxSize,
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
|
||||
resolver := source.NewMockResolverForPaths(test.files...)
|
||||
@ -1,37 +1,37 @@
|
||||
package file
|
||||
package filedigests
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/internal/bus"
|
||||
"github.com/anchore/syft/syft/event"
|
||||
"github.com/wagoodman/go-partybus"
|
||||
"github.com/wagoodman/go-progress"
|
||||
"hash"
|
||||
"io"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/internal/bus"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
|
||||
"github.com/anchore/syft/syft/event"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
"github.com/wagoodman/go-partybus"
|
||||
"github.com/wagoodman/go-progress"
|
||||
)
|
||||
|
||||
var errUndigestableFile = errors.New("undigestable file")
|
||||
|
||||
type DigestsCataloger struct {
|
||||
type Cataloger struct {
|
||||
hashes []crypto.Hash
|
||||
}
|
||||
|
||||
func NewDigestsCataloger(hashes []crypto.Hash) (*DigestsCataloger, error) {
|
||||
return &DigestsCataloger{
|
||||
func NewCataloger(hashes []crypto.Hash) (*Cataloger, error) {
|
||||
return &Cataloger{
|
||||
hashes: hashes,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (i *DigestsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]Digest, error) {
|
||||
results := make(map[source.Coordinates][]Digest)
|
||||
locations := allRegularFiles(resolver)
|
||||
func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]file.Digest, error) {
|
||||
results := make(map[source.Coordinates][]file.Digest)
|
||||
locations := source.AllRegularFiles(resolver)
|
||||
stage, prog := digestsCatalogingProgress(int64(len(locations)))
|
||||
for _, location := range locations {
|
||||
stage.Current = location.RealPath
|
||||
@ -57,7 +57,7 @@ func (i *DigestsCataloger) Catalog(resolver source.FileResolver) (map[source.Coo
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (i *DigestsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]Digest, error) {
|
||||
func (i *Cataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]file.Digest, error) {
|
||||
meta, err := resolver.FileMetadataByLocation(location)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -88,16 +88,16 @@ func (i *DigestsCataloger) catalogLocation(resolver source.FileResolver, locatio
|
||||
}
|
||||
|
||||
if size == 0 {
|
||||
return make([]Digest, 0), nil
|
||||
return make([]file.Digest, 0), nil
|
||||
}
|
||||
|
||||
result := make([]Digest, len(i.hashes))
|
||||
result := make([]file.Digest, len(i.hashes))
|
||||
// only capture digests when there is content. It is important to do this based on SIZE and not
|
||||
// FILE TYPE. The reasoning is that it is possible for a tar to be crafted with a header-only
|
||||
// file type but a body is still allowed.
|
||||
for idx, hasher := range hashers {
|
||||
result[idx] = Digest{
|
||||
Algorithm: DigestAlgorithmName(i.hashes[idx]),
|
||||
result[idx] = file.Digest{
|
||||
Algorithm: file.DigestAlgorithmName(i.hashes[idx]),
|
||||
Value: fmt.Sprintf("%+x", hasher.Sum(nil)),
|
||||
}
|
||||
}
|
||||
@ -1,25 +1,23 @@
|
||||
package file
|
||||
package filedigests
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"fmt"
|
||||
"github.com/stretchr/testify/require"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/anchore/stereoscope/pkg/file"
|
||||
|
||||
stereoscopeFile "github.com/anchore/stereoscope/pkg/file"
|
||||
"github.com/anchore/stereoscope/pkg/imagetest"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Hash) map[source.Coordinates][]Digest {
|
||||
digests := make(map[source.Coordinates][]Digest)
|
||||
func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Hash) map[source.Coordinates][]file.Digest {
|
||||
digests := make(map[source.Coordinates][]file.Digest)
|
||||
|
||||
for _, f := range files {
|
||||
fh, err := os.Open(filepath.Join(root, f))
|
||||
@ -33,15 +31,15 @@ func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Has
|
||||
|
||||
if len(b) == 0 {
|
||||
// we don't keep digests for empty files
|
||||
digests[source.NewLocation(f).Coordinates] = []Digest{}
|
||||
digests[source.NewLocation(f).Coordinates] = []file.Digest{}
|
||||
continue
|
||||
}
|
||||
|
||||
for _, hash := range hashes {
|
||||
h := hash.New()
|
||||
h.Write(b)
|
||||
digests[source.NewLocation(f).Coordinates] = append(digests[source.NewLocation(f).Coordinates], Digest{
|
||||
Algorithm: CleanDigestAlgorithmName(hash.String()),
|
||||
digests[source.NewLocation(f).Coordinates] = append(digests[source.NewLocation(f).Coordinates], file.Digest{
|
||||
Algorithm: file.CleanDigestAlgorithmName(hash.String()),
|
||||
Value: fmt.Sprintf("%x", h.Sum(nil)),
|
||||
})
|
||||
}
|
||||
@ -56,7 +54,7 @@ func TestDigestsCataloger(t *testing.T) {
|
||||
name string
|
||||
digests []crypto.Hash
|
||||
files []string
|
||||
expected map[source.Coordinates][]Digest
|
||||
expected map[source.Coordinates][]file.Digest
|
||||
}{
|
||||
{
|
||||
name: "md5",
|
||||
@ -74,7 +72,7 @@ func TestDigestsCataloger(t *testing.T) {
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
c, err := NewDigestsCataloger(test.digests)
|
||||
c, err := NewCataloger(test.digests)
|
||||
require.NoError(t, err)
|
||||
|
||||
src, err := source.NewFromDirectory("test-fixtures/last/")
|
||||
@ -92,14 +90,7 @@ func TestDigestsCataloger(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestDigestsCataloger_MixFileTypes(t *testing.T) {
|
||||
testImage := "image-file-type-mix"
|
||||
|
||||
if *updateImageGoldenFiles {
|
||||
imagetest.UpdateGoldenFixtureImage(t, testImage)
|
||||
}
|
||||
|
||||
img := imagetest.GetGoldenFixtureImage(t, testImage)
|
||||
|
||||
img := imagetest.GetFixtureImage(t, "docker-archive", "image-file-type-mix")
|
||||
src, err := source.NewFromImage(img, "---")
|
||||
if err != nil {
|
||||
t.Fatalf("could not create source: %+v", err)
|
||||
@ -140,7 +131,7 @@ func TestDigestsCataloger_MixFileTypes(t *testing.T) {
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.path, func(t *testing.T) {
|
||||
c, err := NewDigestsCataloger([]crypto.Hash{crypto.MD5})
|
||||
c, err := NewCataloger([]crypto.Hash{crypto.MD5})
|
||||
if err != nil {
|
||||
t.Fatalf("unable to get cataloger: %+v", err)
|
||||
}
|
||||
@ -150,7 +141,7 @@ func TestDigestsCataloger_MixFileTypes(t *testing.T) {
|
||||
t.Fatalf("could not catalog: %+v", err)
|
||||
}
|
||||
|
||||
_, ref, err := img.SquashedTree().File(file.Path(test.path))
|
||||
_, ref, err := img.SquashedTree().File(stereoscopeFile.Path(test.path))
|
||||
if err != nil {
|
||||
t.Fatalf("unable to get file=%q : %+v", test.path, err)
|
||||
}
|
||||
@ -4,7 +4,6 @@ ADD file-1.txt .
|
||||
RUN chmod 644 file-1.txt
|
||||
RUN chown 1:2 file-1.txt
|
||||
RUN ln -s file-1.txt symlink-1
|
||||
# note: hard links may behave inconsistently, this should be a golden image
|
||||
RUN ln file-1.txt hardlink-1
|
||||
RUN mknod char-device-1 c 89 1
|
||||
RUN mknod block-device-1 b 0 1
|
||||
@ -0,0 +1 @@
|
||||
test-fixtures/last/path.txt file contents!
|
||||
@ -1,4 +1,4 @@
|
||||
package file
|
||||
package filemetadata
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/internal/bus"
|
||||
@ -9,14 +9,14 @@ import (
|
||||
"github.com/wagoodman/go-progress"
|
||||
)
|
||||
|
||||
type MetadataCataloger struct {
|
||||
type Cataloger struct {
|
||||
}
|
||||
|
||||
func NewMetadataCataloger() *MetadataCataloger {
|
||||
return &MetadataCataloger{}
|
||||
func NewCataloger() *Cataloger {
|
||||
return &Cataloger{}
|
||||
}
|
||||
|
||||
func (i *MetadataCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]source.FileMetadata, error) {
|
||||
func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]source.FileMetadata, error) {
|
||||
results := make(map[source.Coordinates]source.FileMetadata)
|
||||
var locations []source.Location
|
||||
for location := range resolver.AllLocations() {
|
||||
@ -1,7 +1,6 @@
|
||||
package file
|
||||
package filemetadata
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
@ -11,18 +10,10 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var updateImageGoldenFiles = flag.Bool("update-image", false, "update the golden fixture images used for testing")
|
||||
|
||||
func TestFileMetadataCataloger(t *testing.T) {
|
||||
testImage := "image-file-type-mix"
|
||||
img := imagetest.GetFixtureImage(t, "docker-archive", "image-file-type-mix")
|
||||
|
||||
if *updateImageGoldenFiles {
|
||||
imagetest.UpdateGoldenFixtureImage(t, testImage)
|
||||
}
|
||||
|
||||
img := imagetest.GetGoldenFixtureImage(t, testImage)
|
||||
|
||||
c := NewMetadataCataloger()
|
||||
c := NewCataloger()
|
||||
|
||||
src, err := source.NewFromImage(img, "---")
|
||||
if err != nil {
|
||||
@ -0,0 +1,12 @@
|
||||
FROM busybox:latest
|
||||
|
||||
ADD file-1.txt .
|
||||
RUN chmod 644 file-1.txt
|
||||
RUN chown 1:2 file-1.txt
|
||||
RUN ln -s file-1.txt symlink-1
|
||||
RUN ln file-1.txt hardlink-1
|
||||
RUN mknod char-device-1 c 89 1
|
||||
RUN mknod block-device-1 b 0 1
|
||||
RUN mknod fifo-1 p
|
||||
RUN mkdir /dir
|
||||
RUN rm -rf home etc/group etc/localtime etc/mtab etc/network etc/passwd etc/shadow var usr bin/*
|
||||
@ -0,0 +1 @@
|
||||
file 1!
|
||||
@ -1,23 +1,22 @@
|
||||
package file
|
||||
package secrets
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/anchore/syft/internal/bus"
|
||||
"github.com/anchore/syft/internal/file"
|
||||
"github.com/anchore/syft/syft/event"
|
||||
"github.com/anchore/syft/syft/event/monitor"
|
||||
"github.com/wagoodman/go-partybus"
|
||||
"github.com/wagoodman/go-progress"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"regexp"
|
||||
"sort"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
|
||||
"github.com/anchore/syft/internal/bus"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/event"
|
||||
"github.com/anchore/syft/syft/event/monitor"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
"github.com/wagoodman/go-partybus"
|
||||
"github.com/wagoodman/go-progress"
|
||||
)
|
||||
|
||||
var DefaultSecretsPatterns = map[string]string{
|
||||
@ -28,38 +27,38 @@ var DefaultSecretsPatterns = map[string]string{
|
||||
"generic-api-key": `(?i)api(-|_)?key["'=:\s]*?(?P<value>[A-Z0-9]{20,60})["']?(\s|$)`,
|
||||
}
|
||||
|
||||
type SecretsCatalogerConfig struct {
|
||||
type CatalogerConfig struct {
|
||||
Patterns map[string]*regexp.Regexp
|
||||
RevealValues bool
|
||||
MaxFileSize int64
|
||||
}
|
||||
|
||||
type SecretsCataloger struct {
|
||||
config SecretsCatalogerConfig
|
||||
type Cataloger struct {
|
||||
config CatalogerConfig
|
||||
}
|
||||
|
||||
func DefaultSecretsCatalogerConfig() SecretsCatalogerConfig {
|
||||
patterns, err := GenerateSearchPatterns(DefaultSecretsPatterns, nil, nil)
|
||||
func DefaultCatalogerConfig() CatalogerConfig {
|
||||
patterns, err := file.GenerateSearchPatterns(DefaultSecretsPatterns, nil, nil)
|
||||
if err != nil {
|
||||
patterns = make(map[string]*regexp.Regexp)
|
||||
log.Errorf("unable to create default secrets config: %w", err)
|
||||
}
|
||||
return SecretsCatalogerConfig{
|
||||
return CatalogerConfig{
|
||||
Patterns: patterns,
|
||||
RevealValues: false,
|
||||
MaxFileSize: 1 * file.MB,
|
||||
}
|
||||
}
|
||||
|
||||
func NewSecretsCataloger(config SecretsCatalogerConfig) (*SecretsCataloger, error) {
|
||||
return &SecretsCataloger{
|
||||
func NewCataloger(config CatalogerConfig) (*Cataloger, error) {
|
||||
return &Cataloger{
|
||||
config: config,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]SearchResult, error) {
|
||||
results := make(map[source.Coordinates][]SearchResult)
|
||||
locations := allRegularFiles(resolver)
|
||||
func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]file.SearchResult, error) {
|
||||
results := make(map[source.Coordinates][]file.SearchResult)
|
||||
locations := source.AllRegularFiles(resolver)
|
||||
stage, prog, secretsDiscovered := newSecretsCatalogerMonitor(int64(len(locations)))
|
||||
for _, location := range locations {
|
||||
stage.Current = location.RealPath
|
||||
@ -83,7 +82,7 @@ func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Coo
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (i *SecretsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]SearchResult, error) {
|
||||
func (i *Cataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]file.SearchResult, error) {
|
||||
metadata, err := resolver.FileMetadataByLocation(location)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -1,13 +1,11 @@
|
||||
package file
|
||||
package secrets
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/anchore/syft/internal/file"
|
||||
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
@ -18,7 +16,7 @@ func TestSecretsCataloger(t *testing.T) {
|
||||
reveal bool
|
||||
maxSize int64
|
||||
patterns map[string]string
|
||||
expected []SearchResult
|
||||
expected []file.SearchResult
|
||||
constructorErr bool
|
||||
catalogErr bool
|
||||
}{
|
||||
@ -29,7 +27,7 @@ func TestSecretsCataloger(t *testing.T) {
|
||||
patterns: map[string]string{
|
||||
"simple-secret-key": `^secret_key=.*`,
|
||||
},
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "simple-secret-key",
|
||||
LineNumber: 2,
|
||||
@ -47,7 +45,7 @@ func TestSecretsCataloger(t *testing.T) {
|
||||
patterns: map[string]string{
|
||||
"simple-secret-key": `^secret_key=.*`,
|
||||
},
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "simple-secret-key",
|
||||
LineNumber: 2,
|
||||
@ -65,7 +63,7 @@ func TestSecretsCataloger(t *testing.T) {
|
||||
patterns: map[string]string{
|
||||
"simple-secret-key": `^secret_key=(?P<value>.*)`,
|
||||
},
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "simple-secret-key",
|
||||
LineNumber: 2,
|
||||
@ -83,7 +81,7 @@ func TestSecretsCataloger(t *testing.T) {
|
||||
patterns: map[string]string{
|
||||
"simple-secret-key": `secret_key=.*`,
|
||||
},
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "simple-secret-key",
|
||||
LineNumber: 1,
|
||||
@ -126,7 +124,7 @@ func TestSecretsCataloger(t *testing.T) {
|
||||
patterns: map[string]string{
|
||||
"simple-secret-key": `secret_key=(?P<value>.*)`,
|
||||
},
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "simple-secret-key",
|
||||
LineNumber: 1,
|
||||
@ -177,7 +175,11 @@ func TestSecretsCataloger(t *testing.T) {
|
||||
regexObjs[name] = obj
|
||||
}
|
||||
|
||||
c, err := NewSecretsCataloger(regexObjs, test.reveal, test.maxSize)
|
||||
c, err := NewCataloger(CatalogerConfig{
|
||||
Patterns: regexObjs,
|
||||
RevealValues: test.reveal,
|
||||
MaxFileSize: test.maxSize,
|
||||
})
|
||||
if err != nil && !test.constructorErr {
|
||||
t.Fatalf("could not create cataloger (but should have been able to): %+v", err)
|
||||
} else if err == nil && test.constructorErr {
|
||||
@ -208,18 +210,18 @@ func TestSecretsCataloger(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSecretsCataloger_DefaultSecrets(t *testing.T) {
|
||||
regexObjs, err := GenerateSearchPatterns(DefaultSecretsPatterns, nil, nil)
|
||||
regexObjs, err := file.GenerateSearchPatterns(DefaultSecretsPatterns, nil, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to get patterns: %+v", err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
fixture string
|
||||
expected []SearchResult
|
||||
expected []file.SearchResult
|
||||
}{
|
||||
{
|
||||
fixture: "test-fixtures/secrets/default/aws.env",
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "aws-access-key",
|
||||
LineNumber: 2,
|
||||
@ -240,7 +242,7 @@ func TestSecretsCataloger_DefaultSecrets(t *testing.T) {
|
||||
},
|
||||
{
|
||||
fixture: "test-fixtures/secrets/default/aws.ini",
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "aws-access-key",
|
||||
LineNumber: 3,
|
||||
@ -261,7 +263,7 @@ func TestSecretsCataloger_DefaultSecrets(t *testing.T) {
|
||||
},
|
||||
{
|
||||
fixture: "test-fixtures/secrets/default/private-key.pem",
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "pem-private-key",
|
||||
LineNumber: 2,
|
||||
@ -281,7 +283,7 @@ z3P668YfhUbKdRF6S42Cg6zn
|
||||
},
|
||||
{
|
||||
fixture: "test-fixtures/secrets/default/private-key-openssl.pem",
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "pem-private-key",
|
||||
LineNumber: 2,
|
||||
@ -303,7 +305,7 @@ z3P668YfhUbKdRF6S42Cg6zn
|
||||
// note: this test proves that the PEM regex matches the smallest possible match
|
||||
// since the test catches two adjacent secrets
|
||||
fixture: "test-fixtures/secrets/default/private-keys.pem",
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "pem-private-key",
|
||||
LineNumber: 1,
|
||||
@ -346,7 +348,7 @@ j4f668YfhUbKdRF6S6734856
|
||||
// 2. a named capture group with the correct line number and line offset case
|
||||
// 3. the named capture group is in a different line than the match start, and both the match start and the capture group have different line offsets
|
||||
fixture: "test-fixtures/secrets/default/docker-config.json",
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "docker-config-auth",
|
||||
LineNumber: 5,
|
||||
@ -363,7 +365,7 @@ j4f668YfhUbKdRF6S6734856
|
||||
},
|
||||
{
|
||||
fixture: "test-fixtures/secrets/default/api-key.txt",
|
||||
expected: []SearchResult{
|
||||
expected: []file.SearchResult{
|
||||
{
|
||||
Classification: "generic-api-key",
|
||||
LineNumber: 2,
|
||||
@ -419,7 +421,11 @@ j4f668YfhUbKdRF6S6734856
|
||||
for _, test := range tests {
|
||||
t.Run(test.fixture, func(t *testing.T) {
|
||||
|
||||
c, err := NewSecretsCataloger(regexObjs, true, 10*file.MB)
|
||||
c, err := NewCataloger(CatalogerConfig{
|
||||
Patterns: regexObjs,
|
||||
RevealValues: true,
|
||||
MaxFileSize: 10 * file.MB,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("could not create cataloger: %+v", err)
|
||||
}
|
||||
@ -1,4 +1,4 @@
|
||||
package file
|
||||
package secrets
|
||||
|
||||
import "io"
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
package file
|
||||
package secrets
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
@ -1,9 +1,10 @@
|
||||
package file
|
||||
package secrets
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"regexp"
|
||||
@ -13,7 +14,7 @@ import (
|
||||
"github.com/anchore/syft/syft/source"
|
||||
)
|
||||
|
||||
func catalogLocationByLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp) ([]SearchResult, error) {
|
||||
func catalogLocationByLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp) ([]file.SearchResult, error) {
|
||||
readCloser, err := resolver.FileContentsByLocation(location)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
|
||||
@ -22,7 +23,7 @@ func catalogLocationByLine(resolver source.FileResolver, location source.Locatio
|
||||
|
||||
var scanner = bufio.NewReader(readCloser)
|
||||
var position int64
|
||||
var allSecrets []SearchResult
|
||||
var allSecrets []file.SearchResult
|
||||
var lineNo int64
|
||||
var readErr error
|
||||
for !errors.Is(readErr, io.EOF) {
|
||||
@ -45,8 +46,8 @@ func catalogLocationByLine(resolver source.FileResolver, location source.Locatio
|
||||
return allSecrets, nil
|
||||
}
|
||||
|
||||
func searchForSecretsWithinLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]SearchResult, error) {
|
||||
var secrets []SearchResult
|
||||
func searchForSecretsWithinLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]file.SearchResult, error) {
|
||||
var secrets []file.SearchResult
|
||||
for name, pattern := range patterns {
|
||||
matches := pattern.FindAllIndex(line, -1)
|
||||
for i, match := range matches {
|
||||
@ -91,7 +92,7 @@ func readerAtPosition(resolver source.FileResolver, location source.Location, se
|
||||
return readCloser, nil
|
||||
}
|
||||
|
||||
func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *regexp.Regexp, lineNo, lineOffset, seekPosition int64) *SearchResult {
|
||||
func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *regexp.Regexp, lineNo, lineOffset, seekPosition int64) *file.SearchResult {
|
||||
reader := &newlineCounter{RuneReader: bufio.NewReader(readCloser)}
|
||||
positions := pattern.FindReaderSubmatchIndex(reader)
|
||||
if len(positions) == 0 {
|
||||
@ -127,7 +128,7 @@ func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *r
|
||||
lineOffsetOfSecret += lineOffset
|
||||
}
|
||||
|
||||
return &SearchResult{
|
||||
return &file.SearchResult{
|
||||
Classification: name,
|
||||
SeekPosition: start + seekPosition,
|
||||
Length: stop - start,
|
||||
6
syft/file/classification.go
Normal file
6
syft/file/classification.go
Normal file
@ -0,0 +1,6 @@
|
||||
package file
|
||||
|
||||
type Classification struct {
|
||||
Class string `json:"class"`
|
||||
Metadata map[string]string `json:"metadata"`
|
||||
}
|
||||
@ -11,6 +11,12 @@ import (
|
||||
"github.com/anchore/syft/syft/source"
|
||||
)
|
||||
|
||||
type Classifier struct {
|
||||
Class string
|
||||
FilepathPatterns []*regexp.Regexp
|
||||
EvidencePatternTemplates []string
|
||||
}
|
||||
|
||||
func DefaultClassifiers() []Classifier {
|
||||
return []Classifier{
|
||||
{
|
||||
@ -62,17 +68,6 @@ func DefaultClassifiers() []Classifier {
|
||||
}
|
||||
}
|
||||
|
||||
type Classifier struct {
|
||||
Class string
|
||||
FilepathPatterns []*regexp.Regexp
|
||||
EvidencePatternTemplates []string
|
||||
}
|
||||
|
||||
type Classification struct {
|
||||
Class string `json:"class"`
|
||||
Metadata map[string]string `json:"metadata"`
|
||||
}
|
||||
|
||||
func (c Classifier) Classify(resolver source.FileResolver, location source.Location) (*Classification, error) {
|
||||
doesFilepathMatch, filepathNamedGroupValues := filepathMatches(c.FilepathPatterns, location)
|
||||
if !doesFilepathMatch {
|
||||
|
||||
@ -10,6 +10,12 @@ type Opener struct {
|
||||
path string
|
||||
}
|
||||
|
||||
func NewOpener(path string) Opener {
|
||||
return Opener{
|
||||
path: path,
|
||||
}
|
||||
}
|
||||
|
||||
// Open the stored path as a io.ReadCloser.
|
||||
func (o Opener) Open() (io.ReadCloser, error) {
|
||||
return os.Open(o.path)
|
||||
Binary file not shown.
@ -1 +0,0 @@
|
||||
../real-root
|
||||
@ -1 +0,0 @@
|
||||
contents!
|
||||
@ -1 +0,0 @@
|
||||
more contents!
|
||||
@ -1 +0,0 @@
|
||||
../file1.txt
|
||||
@ -2,15 +2,16 @@ package java
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
"github.com/anchore/syft/internal/archive"
|
||||
"io"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/anchore/syft/internal/file"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// integrity check
|
||||
@ -35,7 +36,7 @@ var archiveFormatGlobs = []string{
|
||||
}
|
||||
|
||||
type archiveParser struct {
|
||||
fileManifest file.ZipFileManifest
|
||||
fileManifest archive.ZipFileManifest
|
||||
virtualPath string
|
||||
archivePath string
|
||||
contentPath string
|
||||
@ -74,7 +75,7 @@ func newJavaArchiveParser(virtualPath string, reader io.Reader, detectNested boo
|
||||
return nil, cleanupFn, fmt.Errorf("unable to process java archive: %w", err)
|
||||
}
|
||||
|
||||
fileManifest, err := file.NewZipFileManifest(archivePath)
|
||||
fileManifest, err := archive.NewZipFileManifest(archivePath)
|
||||
if err != nil {
|
||||
return nil, cleanupFn, fmt.Errorf("unable to read files from java archive: %w", err)
|
||||
}
|
||||
@ -144,7 +145,7 @@ func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) {
|
||||
}
|
||||
|
||||
// fetch the manifest file
|
||||
contents, err := file.ContentsFromZip(j.archivePath, manifestMatches...)
|
||||
contents, err := archive.ContentsFromZip(j.archivePath, manifestMatches...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to extract java manifests (%s): %w", j.virtualPath, err)
|
||||
}
|
||||
@ -213,9 +214,9 @@ func (j *archiveParser) discoverPkgsFromNestedArchives(parentPkg *pkg.Package) (
|
||||
|
||||
// discoverPkgsFromZip finds Java archives within Java archives, returning all listed Java packages found and
|
||||
// associating each discovered package to the given parent package.
|
||||
func discoverPkgsFromZip(virtualPath, archivePath, contentPath string, fileManifest file.ZipFileManifest, parentPkg *pkg.Package) ([]*pkg.Package, []artifact.Relationship, error) {
|
||||
func discoverPkgsFromZip(virtualPath, archivePath, contentPath string, fileManifest archive.ZipFileManifest, parentPkg *pkg.Package) ([]*pkg.Package, []artifact.Relationship, error) {
|
||||
// search and parse pom.properties files & fetch the contents
|
||||
openers, err := file.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(archiveFormatGlobs...)...)
|
||||
openers, err := archive.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(archiveFormatGlobs...)...)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("unable to extract files from zip: %w", err)
|
||||
}
|
||||
@ -274,7 +275,7 @@ func discoverPkgsFromOpener(virtualPath, pathWithinArchive string, archiveOpener
|
||||
}
|
||||
|
||||
func pomPropertiesByParentPath(archivePath string, extractPaths []string, virtualPath string) (map[string]pkg.PomProperties, error) {
|
||||
contentsOfMavenPropertiesFiles, err := file.ContentsFromZip(archivePath, extractPaths...)
|
||||
contentsOfMavenPropertiesFiles, err := archive.ContentsFromZip(archivePath, extractPaths...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to extract maven files: %w", err)
|
||||
}
|
||||
@ -302,7 +303,7 @@ func pomPropertiesByParentPath(archivePath string, extractPaths []string, virtua
|
||||
}
|
||||
|
||||
func pomProjectByParentPath(archivePath string, extractPaths []string, virtualPath string) (map[string]pkg.PomProject, error) {
|
||||
contentsOfMavenProjectFiles, err := file.ContentsFromZip(archivePath, extractPaths...)
|
||||
contentsOfMavenProjectFiles, err := archive.ContentsFromZip(archivePath, extractPaths...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to extract maven files: %w", err)
|
||||
}
|
||||
|
||||
@ -2,11 +2,10 @@ package java
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/anchore/syft/internal/archive"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
"io"
|
||||
|
||||
"github.com/anchore/syft/internal/file"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
)
|
||||
@ -59,7 +58,7 @@ func parseTarWrappedJavaArchive(virtualPath string, reader io.Reader) ([]*pkg.Pa
|
||||
}
|
||||
|
||||
func discoverPkgsFromTar(virtualPath, archivePath, contentPath string) ([]*pkg.Package, []artifact.Relationship, error) {
|
||||
openers, err := file.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...)
|
||||
openers, err := archive.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("unable to extract files from tar: %w", err)
|
||||
}
|
||||
|
||||
@ -2,11 +2,10 @@ package java
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/anchore/syft/internal/archive"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
"io"
|
||||
|
||||
"github.com/anchore/syft/internal/file"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
)
|
||||
@ -33,7 +32,7 @@ func parseZipWrappedJavaArchive(virtualPath string, reader io.Reader) ([]*pkg.Pa
|
||||
// functions support zips with shell scripts prepended to the file. Specifically, the helpers use the central
|
||||
// header at the end of the file to determine where the beginning of the zip payload is (unlike the standard lib
|
||||
// or archiver).
|
||||
fileManifest, err := file.NewZipFileManifest(archivePath)
|
||||
fileManifest, err := archive.NewZipFileManifest(archivePath)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("unable to read files from java archive: %w", err)
|
||||
}
|
||||
|
||||
@ -3,11 +3,11 @@ package python
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/anchore/syft/internal/file"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
|
||||
"github.com/mitchellh/mapstructure"
|
||||
|
||||
@ -1,11 +1,10 @@
|
||||
package file
|
||||
package source
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
)
|
||||
|
||||
func allRegularFiles(resolver source.FileResolver) (locations []source.Location) {
|
||||
func AllRegularFiles(resolver FileResolver) (locations []Location) {
|
||||
for location := range resolver.AllLocations() {
|
||||
resolvedLocations, err := resolver.FilesByPath(location.RealPath)
|
||||
if err != nil {
|
||||
@ -20,7 +19,7 @@ func allRegularFiles(resolver source.FileResolver) (locations []source.Location)
|
||||
continue
|
||||
}
|
||||
|
||||
if metadata.Type != source.RegularFile {
|
||||
if metadata.Type != RegularFile {
|
||||
continue
|
||||
}
|
||||
locations = append(locations, resolvedLocation)
|
||||
@ -1,8 +1,7 @@
|
||||
package file
|
||||
package source
|
||||
|
||||
import (
|
||||
"github.com/anchore/stereoscope/pkg/imagetest"
|
||||
"github.com/anchore/syft/syft/source"
|
||||
"github.com/scylladb/go-set/strset"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
@ -16,25 +15,19 @@ func Test_allRegularFiles(t *testing.T) {
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
setup func() source.FileResolver
|
||||
setup func() FileResolver
|
||||
wantRealPaths *strset.Set
|
||||
wantVirtualPaths *strset.Set
|
||||
}{
|
||||
{
|
||||
name: "image",
|
||||
setup: func() source.FileResolver {
|
||||
testImage := "image-file-type-mix"
|
||||
setup: func() FileResolver {
|
||||
img := imagetest.GetFixtureImage(t, "docker-archive", "image-file-type-mix")
|
||||
|
||||
if *updateImageGoldenFiles {
|
||||
imagetest.UpdateGoldenFixtureImage(t, testImage)
|
||||
}
|
||||
|
||||
img := imagetest.GetGoldenFixtureImage(t, testImage)
|
||||
|
||||
s, err := source.NewFromImage(img, "---")
|
||||
s, err := NewFromImage(img, "---")
|
||||
require.NoError(t, err)
|
||||
|
||||
r, err := s.FileResolver(source.SquashedScope)
|
||||
r, err := s.FileResolver(SquashedScope)
|
||||
require.NoError(t, err)
|
||||
|
||||
return r
|
||||
@ -44,10 +37,10 @@ func Test_allRegularFiles(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "directory",
|
||||
setup: func() source.FileResolver {
|
||||
s, err := source.NewFromDirectory("test-fixtures/symlinked-root/nested/link-root")
|
||||
setup: func() FileResolver {
|
||||
s, err := NewFromDirectory("test-fixtures/symlinked-root/nested/link-root")
|
||||
require.NoError(t, err)
|
||||
r, err := s.FileResolver(source.SquashedScope)
|
||||
r, err := s.FileResolver(SquashedScope)
|
||||
require.NoError(t, err)
|
||||
return r
|
||||
},
|
||||
@ -58,7 +51,7 @@ func Test_allRegularFiles(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
resolver := tt.setup()
|
||||
locations := allRegularFiles(resolver)
|
||||
locations := AllRegularFiles(resolver)
|
||||
realLocations := strset.New()
|
||||
virtualLocations := strset.New()
|
||||
for _, l := range locations {
|
||||
12
syft/source/test-fixtures/image-file-type-mix/Dockerfile
Normal file
12
syft/source/test-fixtures/image-file-type-mix/Dockerfile
Normal file
@ -0,0 +1,12 @@
|
||||
FROM busybox:latest
|
||||
|
||||
ADD file-1.txt .
|
||||
RUN chmod 644 file-1.txt
|
||||
RUN chown 1:2 file-1.txt
|
||||
RUN ln -s file-1.txt symlink-1
|
||||
RUN ln file-1.txt hardlink-1
|
||||
RUN mknod char-device-1 c 89 1
|
||||
RUN mknod block-device-1 b 0 1
|
||||
RUN mknod fifo-1 p
|
||||
RUN mkdir /dir
|
||||
RUN rm -rf home etc/group etc/localtime etc/mtab etc/network etc/passwd etc/shadow var usr bin/*
|
||||
1
syft/source/test-fixtures/image-file-type-mix/file-1.txt
Normal file
1
syft/source/test-fixtures/image-file-type-mix/file-1.txt
Normal file
@ -0,0 +1 @@
|
||||
file 1!
|
||||
@ -3,7 +3,11 @@ package syft
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/file/cataloger/fileclassifier"
|
||||
"github.com/anchore/syft/syft/file/cataloger/filecontents"
|
||||
"github.com/anchore/syft/syft/file/cataloger/filedigests"
|
||||
"github.com/anchore/syft/syft/file/cataloger/filemetadata"
|
||||
"github.com/anchore/syft/syft/file/cataloger/secrets"
|
||||
"github.com/anchore/syft/syft/linux"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/packages"
|
||||
"github.com/anchore/syft/syft/sbom"
|
||||
@ -13,7 +17,7 @@ import (
|
||||
type task func(*sbom.Artifacts, *source.Source) ([]artifact.Relationship, error)
|
||||
type taskGenerator func(CatalogingConfig) (task, error)
|
||||
|
||||
func generateCatalogPackagesTask(config CatalogingConfig) (task, error) {
|
||||
func generatePackagesCatalogingTask(config CatalogingConfig) (task, error) {
|
||||
if len(config.PackageCatalogers) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
@ -38,12 +42,12 @@ func generateCatalogPackagesTask(config CatalogingConfig) (task, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
func generateCatalogFileMetadataTask(config CatalogingConfig) (task, error) {
|
||||
func generateFileMetadataCatalogingTask(config CatalogingConfig) (task, error) {
|
||||
if !config.CaptureFileMetadata {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
metadataCataloger := file.NewMetadataCataloger()
|
||||
cataloger := filemetadata.NewCataloger()
|
||||
|
||||
return func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) {
|
||||
resolver, err := src.FileResolver(config.Scope)
|
||||
@ -51,7 +55,7 @@ func generateCatalogFileMetadataTask(config CatalogingConfig) (task, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result, err := metadataCataloger.Catalog(resolver)
|
||||
result, err := cataloger.Catalog(resolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -61,12 +65,12 @@ func generateCatalogFileMetadataTask(config CatalogingConfig) (task, error) {
|
||||
|
||||
}
|
||||
|
||||
func generateCatalogFileDigestsTask(config CatalogingConfig) (task, error) {
|
||||
func generateFileDigestsCatalogingTask(config CatalogingConfig) (task, error) {
|
||||
if len(config.DigestHashes) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
digestsCataloger, err := file.NewDigestsCataloger(config.DigestHashes)
|
||||
cataloger, err := filedigests.NewCataloger(config.DigestHashes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -77,7 +81,7 @@ func generateCatalogFileDigestsTask(config CatalogingConfig) (task, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result, err := digestsCataloger.Catalog(resolver)
|
||||
result, err := cataloger.Catalog(resolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -87,12 +91,12 @@ func generateCatalogFileDigestsTask(config CatalogingConfig) (task, error) {
|
||||
|
||||
}
|
||||
|
||||
func generateCatalogContentsTask(config CatalogingConfig) (task, error) {
|
||||
if len(config.ContentsConfig.Globs) > 0 {
|
||||
func generateContentsCatalogingTask(config CatalogingConfig) (task, error) {
|
||||
if len(config.ContentsConfig.Globs) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
contentsCataloger, err := file.NewContentsCataloger(config.ContentsConfig)
|
||||
cataloger, err := filecontents.NewCataloger(config.ContentsConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -103,7 +107,7 @@ func generateCatalogContentsTask(config CatalogingConfig) (task, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result, err := contentsCataloger.Catalog(resolver)
|
||||
result, err := cataloger.Catalog(resolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -112,12 +116,12 @@ func generateCatalogContentsTask(config CatalogingConfig) (task, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
func generateCatalogSecretsTask(config CatalogingConfig) (task, error) {
|
||||
func generateSecretsCatalogingTask(config CatalogingConfig) (task, error) {
|
||||
if !config.CaptureSecrets {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
secretsCataloger, err := file.NewSecretsCataloger(config.SecretsConfig)
|
||||
cataloger, err := secrets.NewCataloger(config.SecretsConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -128,7 +132,7 @@ func generateCatalogSecretsTask(config CatalogingConfig) (task, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result, err := secretsCataloger.Catalog(resolver)
|
||||
result, err := cataloger.Catalog(resolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -138,12 +142,12 @@ func generateCatalogSecretsTask(config CatalogingConfig) (task, error) {
|
||||
|
||||
}
|
||||
|
||||
func generateCatalogFileClassificationsTask(config CatalogingConfig) (task, error) {
|
||||
func generateFileClassifierTask(config CatalogingConfig) (task, error) {
|
||||
if !config.ClassifyFiles {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
classifierCataloger, err := file.NewClassificationCataloger(config.FileClassifiers)
|
||||
cataloger, err := fileclassifier.NewCataloger(config.FileClassifiers)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -154,7 +158,7 @@ func generateCatalogFileClassificationsTask(config CatalogingConfig) (task, erro
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result, err := classifierCataloger.Catalog(resolver)
|
||||
result, err := cataloger.Catalog(resolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user