mirror of
https://github.com/anchore/syft.git
synced 2025-11-17 16:33:21 +01:00
Power user command support for directory scans (#467)
* Power-user directory source support Signed-off-by: Mikey Strauss <mikey@scribe-security.com> Signed-off-by: houdini91 <mdstrauss91@gmail.com> * Remove newline Signed-off-by: houdini91 <mdstrauss91@gmail.com> * Shared filetree (#1) * Shared directory resolver filetree Signed-off-by: houdini91 <mdstrauss91@gmail.com> * PR - change error ErrObserve to ErrPath Signed-off-by: houdini91 <mdstrauss91@gmail.com> * PR - share directory resolver * Use pointer to source struct Signed-off-by: houdini91 <mdstrauss91@gmail.com> * Fix Lint Signed-off-by: houdini91 <mdstrauss91@gmail.com>
This commit is contained in:
parent
06dcd3261d
commit
2f99a35f51
@ -237,7 +237,7 @@ func packagesExecWorker(userInput string) <-chan error {
|
|||||||
return errs
|
return errs
|
||||||
}
|
}
|
||||||
|
|
||||||
func runPackageSbomUpload(src source.Source, s source.Metadata, catalog *pkg.Catalog, d *distro.Distro, scope source.Scope) error {
|
func runPackageSbomUpload(src *source.Source, s source.Metadata, catalog *pkg.Catalog, d *distro.Distro, scope source.Scope) error {
|
||||||
log.Infof("uploading results to %s", appConfig.Anchore.Host)
|
log.Infof("uploading results to %s", appConfig.Anchore.Host)
|
||||||
|
|
||||||
if src.Metadata.Scheme != source.ImageScheme {
|
if src.Metadata.Scheme != source.ImageScheme {
|
||||||
|
|||||||
@ -102,11 +102,6 @@ func powerUserExecWorker(userInput string) <-chan error {
|
|||||||
}
|
}
|
||||||
defer cleanup()
|
defer cleanup()
|
||||||
|
|
||||||
if src.Metadata.Scheme != source.ImageScheme {
|
|
||||||
errs <- fmt.Errorf("the power-user subcommand only allows for 'image' schemes, given %q", src.Metadata.Scheme)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
analysisResults := poweruser.JSONDocumentConfig{
|
analysisResults := poweruser.JSONDocumentConfig{
|
||||||
SourceMetadata: src.Metadata,
|
SourceMetadata: src.Metadata,
|
||||||
ApplicationConfig: *appConfig,
|
ApplicationConfig: *appConfig,
|
||||||
|
|||||||
@ -10,7 +10,7 @@ import (
|
|||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
)
|
)
|
||||||
|
|
||||||
type powerUserTask func(*poweruser.JSONDocumentConfig, source.Source) error
|
type powerUserTask func(*poweruser.JSONDocumentConfig, *source.Source) error
|
||||||
|
|
||||||
func powerUserTasks() ([]powerUserTask, error) {
|
func powerUserTasks() ([]powerUserTask, error) {
|
||||||
var tasks []powerUserTask
|
var tasks []powerUserTask
|
||||||
@ -42,7 +42,7 @@ func catalogPackagesTask() (powerUserTask, error) {
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src source.Source) error {
|
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
||||||
packageCatalog, theDistro, err := syft.CatalogPackages(src, appConfig.Package.Cataloger.ScopeOpt)
|
packageCatalog, theDistro, err := syft.CatalogPackages(src, appConfig.Package.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -64,7 +64,7 @@ func catalogFileMetadataTask() (powerUserTask, error) {
|
|||||||
|
|
||||||
metadataCataloger := file.NewMetadataCataloger()
|
metadataCataloger := file.NewMetadataCataloger()
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src source.Source) error {
|
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
||||||
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
|
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -110,7 +110,7 @@ func catalogFileDigestsTask() (powerUserTask, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src source.Source) error {
|
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
||||||
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
|
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -142,7 +142,7 @@ func catalogSecretsTask() (powerUserTask, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src source.Source) error {
|
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
||||||
resolver, err := src.FileResolver(appConfig.Secrets.Cataloger.ScopeOpt)
|
resolver, err := src.FileResolver(appConfig.Secrets.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -170,7 +170,7 @@ func catalogFileClassificationsTask() (powerUserTask, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src source.Source) error {
|
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
||||||
resolver, err := src.FileResolver(appConfig.FileClassification.Cataloger.ScopeOpt)
|
resolver, err := src.FileResolver(appConfig.FileClassification.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -197,7 +197,7 @@ func catalogContentsTask() (powerUserTask, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src source.Source) error {
|
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
||||||
resolver, err := src.FileResolver(appConfig.FileContents.Cataloger.ScopeOpt)
|
resolver, err := src.FileResolver(appConfig.FileContents.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|||||||
@ -1,7 +1,9 @@
|
|||||||
package internal
|
package internal
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"os"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
)
|
)
|
||||||
@ -12,3 +14,25 @@ func CloseAndLogError(closer io.Closer, location string) {
|
|||||||
log.Warnf("unable to close file for location=%q: %+v", location, err)
|
log.Warnf("unable to close file for location=%q: %+v", location, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ErrPath struct {
|
||||||
|
Path string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ErrPath) Error() string {
|
||||||
|
return fmt.Sprintf("unable to observe contents of %+v: %v", e.Path, e.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func IsErrPath(err error) bool {
|
||||||
|
_, ok := err.(ErrPath)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func IsErrPathPermission(err error) bool {
|
||||||
|
pathErr, ok := err.(ErrPath)
|
||||||
|
if ok {
|
||||||
|
return os.IsPermission(pathErr.Err)
|
||||||
|
}
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|||||||
@ -118,11 +118,18 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) {
|
|||||||
|
|
||||||
loc := source.NewLocation(test.location)
|
loc := source.NewLocation(test.location)
|
||||||
|
|
||||||
if _, ok := actualResults[loc]; !ok {
|
ok := false
|
||||||
|
for actual_loc, actual_classification := range actualResults {
|
||||||
|
if loc.RealPath == actual_loc.RealPath {
|
||||||
|
ok = true
|
||||||
|
assert.Equal(t, test.expected, actual_classification)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !ok {
|
||||||
t.Fatalf("could not find test location=%q", test.location)
|
t.Fatalf("could not find test location=%q", test.location)
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(t, test.expected, actualResults[loc])
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,7 +3,6 @@ package file
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
@ -32,7 +31,6 @@ func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Lo
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, location := range locations {
|
for _, location := range locations {
|
||||||
metadata, err := resolver.FileMetadataByLocation(location)
|
metadata, err := resolver.FileMetadataByLocation(location)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -44,6 +42,10 @@ func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Lo
|
|||||||
}
|
}
|
||||||
|
|
||||||
result, err := i.catalogLocation(resolver, location)
|
result, err := i.catalogLocation(resolver, location)
|
||||||
|
if internal.IsErrPathPermission(err) {
|
||||||
|
log.Debugf("file contents cataloger skipping - %+v", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -63,7 +65,7 @@ func (i *ContentsCataloger) catalogLocation(resolver source.FileResolver, locati
|
|||||||
|
|
||||||
buf := &bytes.Buffer{}
|
buf := &bytes.Buffer{}
|
||||||
if _, err = io.Copy(base64.NewEncoder(base64.StdEncoding, buf), contentReader); err != nil {
|
if _, err = io.Copy(base64.NewEncoder(base64.StdEncoding, buf), contentReader); err != nil {
|
||||||
return "", fmt.Errorf("unable to observe contents of %+v: %w", location.RealPath, err)
|
return "", internal.ErrPath{Path: location.RealPath, Err: err}
|
||||||
}
|
}
|
||||||
|
|
||||||
return buf.String(), nil
|
return buf.String(), nil
|
||||||
|
|||||||
@ -39,6 +39,11 @@ func (i *DigestsCataloger) Catalog(resolver source.FileResolver) (map[source.Loc
|
|||||||
for _, location := range locations {
|
for _, location := range locations {
|
||||||
stage.Current = location.RealPath
|
stage.Current = location.RealPath
|
||||||
result, err := i.catalogLocation(resolver, location)
|
result, err := i.catalogLocation(resolver, location)
|
||||||
|
if internal.IsErrPathPermission(err) {
|
||||||
|
log.Debugf("file digests cataloger skipping - %+v", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -67,7 +72,7 @@ func (i *DigestsCataloger) catalogLocation(resolver source.FileResolver, locatio
|
|||||||
|
|
||||||
size, err := io.Copy(io.MultiWriter(writers...), contentReader)
|
size, err := io.Copy(io.MultiWriter(writers...), contentReader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to observe contents of %+v: %+v", location.RealPath, err)
|
return nil, internal.ErrPath{Path: location.RealPath, Err: err}
|
||||||
}
|
}
|
||||||
|
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
|
|||||||
@ -50,6 +50,11 @@ func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Loc
|
|||||||
for _, location := range locations {
|
for _, location := range locations {
|
||||||
stage.Current = location.RealPath
|
stage.Current = location.RealPath
|
||||||
result, err := i.catalogLocation(resolver, location)
|
result, err := i.catalogLocation(resolver, location)
|
||||||
|
if internal.IsErrPathPermission(err) {
|
||||||
|
log.Debugf("secrets cataloger skipping - %+v", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -77,7 +82,7 @@ func (i *SecretsCataloger) catalogLocation(resolver source.FileResolver, locatio
|
|||||||
// TODO: in the future we can swap out search strategies here
|
// TODO: in the future we can swap out search strategies here
|
||||||
secrets, err := catalogLocationByLine(resolver, location, i.patterns)
|
secrets, err := catalogLocationByLine(resolver, location, i.patterns)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, internal.ErrPath{Path: location.RealPath, Err: err}
|
||||||
}
|
}
|
||||||
|
|
||||||
if i.revealValues {
|
if i.revealValues {
|
||||||
|
|||||||
@ -32,7 +32,7 @@ import (
|
|||||||
// CatalogPackages takes an inventory of packages from the given image from a particular perspective
|
// CatalogPackages takes an inventory of packages from the given image from a particular perspective
|
||||||
// (e.g. squashed source, all-layers source). Returns the discovered set of packages, the identified Linux
|
// (e.g. squashed source, all-layers source). Returns the discovered set of packages, the identified Linux
|
||||||
// distribution, and the source object used to wrap the data source.
|
// distribution, and the source object used to wrap the data source.
|
||||||
func CatalogPackages(src source.Source, scope source.Scope) (*pkg.Catalog, *distro.Distro, error) {
|
func CatalogPackages(src *source.Source, scope source.Scope) (*pkg.Catalog, *distro.Distro, error) {
|
||||||
resolver, err := src.FileResolver(scope)
|
resolver, err := src.FileResolver(scope)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("unable to determine resolver while cataloging packages: %w", err)
|
return nil, nil, fmt.Errorf("unable to determine resolver while cataloging packages: %w", err)
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
"syscall"
|
||||||
|
|
||||||
"github.com/anchore/stereoscope/pkg/file"
|
"github.com/anchore/stereoscope/pkg/file"
|
||||||
"github.com/anchore/stereoscope/pkg/filetree"
|
"github.com/anchore/stereoscope/pkg/filetree"
|
||||||
@ -218,7 +219,12 @@ func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error)
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
references = append(references, NewLocation(r.responsePath(userStrPath)))
|
exists, ref, err := r.fileTree.File(file.Path(userStrPath))
|
||||||
|
if err == nil && exists {
|
||||||
|
references = append(references, NewLocationFromDirectory(r.responsePath(userStrPath), *ref))
|
||||||
|
} else {
|
||||||
|
log.Warnf("path (%s) not found in file tree: Exists: %t Err:%+v", userStrPath, exists, err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return references, nil
|
return references, nil
|
||||||
@ -234,7 +240,7 @@ func (r directoryResolver) FilesByGlob(patterns ...string) ([]Location, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
for _, globResult := range globResults {
|
for _, globResult := range globResults {
|
||||||
result = append(result, NewLocation(r.responsePath(string(globResult.MatchPath))))
|
result = append(result, NewLocationFromDirectory(r.responsePath(string(globResult.MatchPath)), globResult.Reference))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -267,7 +273,7 @@ func (r *directoryResolver) AllLocations() <-chan Location {
|
|||||||
go func() {
|
go func() {
|
||||||
defer close(results)
|
defer close(results)
|
||||||
for _, ref := range r.fileTree.AllFiles() {
|
for _, ref := range r.fileTree.AllFiles() {
|
||||||
results <- NewLocation(r.responsePath(string(ref.RealPath)))
|
results <- NewLocationFromDirectory(r.responsePath(string(ref.RealPath)), ref)
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
return results
|
return results
|
||||||
@ -276,15 +282,22 @@ func (r *directoryResolver) AllLocations() <-chan Location {
|
|||||||
func (r *directoryResolver) FileMetadataByLocation(location Location) (FileMetadata, error) {
|
func (r *directoryResolver) FileMetadataByLocation(location Location) (FileMetadata, error) {
|
||||||
info, exists := r.infos[location.ref.ID()]
|
info, exists := r.infos[location.ref.ID()]
|
||||||
if !exists {
|
if !exists {
|
||||||
return FileMetadata{}, fmt.Errorf("location: %+v : %w", location, os.ErrExist)
|
return FileMetadata{}, fmt.Errorf("location: %+v : %w", location, os.ErrNotExist)
|
||||||
|
}
|
||||||
|
|
||||||
|
uid := -1
|
||||||
|
gid := -1
|
||||||
|
if stat, ok := info.Sys().(*syscall.Stat_t); ok {
|
||||||
|
uid = int(stat.Uid)
|
||||||
|
gid = int(stat.Gid)
|
||||||
}
|
}
|
||||||
|
|
||||||
return FileMetadata{
|
return FileMetadata{
|
||||||
Mode: info.Mode(),
|
Mode: info.Mode(),
|
||||||
Type: newFileTypeFromMode(info.Mode()),
|
Type: newFileTypeFromMode(info.Mode()),
|
||||||
// unsupported across platforms
|
// unsupported across platforms
|
||||||
UserID: -1,
|
UserID: uid,
|
||||||
GroupID: -1,
|
GroupID: gid,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -297,6 +310,8 @@ func indexAllRoots(root string, indexer func(string, *progress.Stage) ([]string,
|
|||||||
// in which case we need to additionally index where the link resolves to. it's for this reason why the filetree
|
// in which case we need to additionally index where the link resolves to. it's for this reason why the filetree
|
||||||
// must be relative to the root of the filesystem (and not just relative to the given path).
|
// must be relative to the root of the filesystem (and not just relative to the given path).
|
||||||
pathsToIndex := []string{root}
|
pathsToIndex := []string{root}
|
||||||
|
fullPathsMap := map[string]struct{}{}
|
||||||
|
|
||||||
stager, prog := indexingProgress(root)
|
stager, prog := indexingProgress(root)
|
||||||
defer prog.SetCompleted()
|
defer prog.SetCompleted()
|
||||||
loop:
|
loop:
|
||||||
@ -315,7 +330,13 @@ loop:
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to index filesystem path=%q: %w", currentPath, err)
|
return fmt.Errorf("unable to index filesystem path=%q: %w", currentPath, err)
|
||||||
}
|
}
|
||||||
pathsToIndex = append(pathsToIndex, additionalRoots...)
|
|
||||||
|
for _, newRoot := range additionalRoots {
|
||||||
|
if _, ok := fullPathsMap[newRoot]; !ok {
|
||||||
|
fullPathsMap[newRoot] = struct{}{}
|
||||||
|
pathsToIndex = append(pathsToIndex, newRoot)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@ -178,9 +178,17 @@ func TestDirectoryResolverDoesNotIgnoreRelativeSystemPaths(t *testing.T) {
|
|||||||
assert.Len(t, refs, 6)
|
assert.Len(t, refs, 6)
|
||||||
|
|
||||||
// ensure that symlink indexing outside of root worked
|
// ensure that symlink indexing outside of root worked
|
||||||
assert.Contains(t, refs, Location{
|
ok := false
|
||||||
RealPath: "test-fixtures/system_paths/outside_root/link_target/place",
|
test_location := "test-fixtures/system_paths/outside_root/link_target/place"
|
||||||
})
|
for _, actual_loc := range refs {
|
||||||
|
if test_location == actual_loc.RealPath {
|
||||||
|
ok = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
t.Fatalf("could not find test location=%q", test_location)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDirectoryResolverUsesPathFilterFunction(t *testing.T) {
|
func TestDirectoryResolverUsesPathFilterFunction(t *testing.T) {
|
||||||
|
|||||||
@ -45,6 +45,14 @@ func NewLocationFromImage(virtualPath string, ref file.Reference, img *image.Ima
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NewLocationFromDirectory creates a new Location representing the given path (extracted from the ref) relative to the given directory.
|
||||||
|
func NewLocationFromDirectory(responsePath string, ref file.Reference) Location {
|
||||||
|
return Location{
|
||||||
|
RealPath: responsePath,
|
||||||
|
ref: ref,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func NewLocationFromReference(ref file.Reference) Location {
|
func NewLocationFromReference(ref file.Reference) Location {
|
||||||
return Location{
|
return Location{
|
||||||
VirtualPath: string(ref.RealPath),
|
VirtualPath: string(ref.RealPath),
|
||||||
|
|||||||
@ -7,6 +7,7 @@ package source
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"github.com/anchore/stereoscope"
|
"github.com/anchore/stereoscope"
|
||||||
"github.com/anchore/stereoscope/pkg/image"
|
"github.com/anchore/stereoscope/pkg/image"
|
||||||
@ -17,57 +18,60 @@ import (
|
|||||||
// in cataloging (based on the data source and configuration)
|
// in cataloging (based on the data source and configuration)
|
||||||
type Source struct {
|
type Source struct {
|
||||||
Image *image.Image // the image object to be cataloged (image only)
|
Image *image.Image // the image object to be cataloged (image only)
|
||||||
|
DirectoryResolver *directoryResolver
|
||||||
Metadata Metadata
|
Metadata Metadata
|
||||||
|
Mutex *sync.Mutex
|
||||||
}
|
}
|
||||||
|
|
||||||
type sourceDetector func(string) (image.Source, string, error)
|
type sourceDetector func(string) (image.Source, string, error)
|
||||||
|
|
||||||
// New produces a Source based on userInput like dir: or image:tag
|
// New produces a Source based on userInput like dir: or image:tag
|
||||||
func New(userInput string, registryOptions *image.RegistryOptions) (Source, func(), error) {
|
func New(userInput string, registryOptions *image.RegistryOptions) (*Source, func(), error) {
|
||||||
fs := afero.NewOsFs()
|
fs := afero.NewOsFs()
|
||||||
parsedScheme, imageSource, location, err := detectScheme(fs, image.DetectSource, userInput)
|
parsedScheme, imageSource, location, err := detectScheme(fs, image.DetectSource, userInput)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Source{}, func() {}, fmt.Errorf("unable to parse input=%q: %w", userInput, err)
|
return &Source{}, func() {}, fmt.Errorf("unable to parse input=%q: %w", userInput, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
switch parsedScheme {
|
switch parsedScheme {
|
||||||
case DirectoryScheme:
|
case DirectoryScheme:
|
||||||
fileMeta, err := fs.Stat(location)
|
fileMeta, err := fs.Stat(location)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Source{}, func() {}, fmt.Errorf("unable to stat dir=%q: %w", location, err)
|
return &Source{}, func() {}, fmt.Errorf("unable to stat dir=%q: %w", location, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !fileMeta.IsDir() {
|
if !fileMeta.IsDir() {
|
||||||
return Source{}, func() {}, fmt.Errorf("given path is not a directory (path=%q): %w", location, err)
|
return &Source{}, func() {}, fmt.Errorf("given path is not a directory (path=%q): %w", location, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
s, err := NewFromDirectory(location)
|
s, err := NewFromDirectory(location)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Source{}, func() {}, fmt.Errorf("could not populate source from path=%q: %w", location, err)
|
return &Source{}, func() {}, fmt.Errorf("could not populate source from path=%q: %w", location, err)
|
||||||
}
|
}
|
||||||
return s, func() {}, nil
|
return &s, func() {}, nil
|
||||||
|
|
||||||
case ImageScheme:
|
case ImageScheme:
|
||||||
img, err := stereoscope.GetImageFromSource(location, imageSource, registryOptions)
|
img, err := stereoscope.GetImageFromSource(location, imageSource, registryOptions)
|
||||||
cleanup := stereoscope.Cleanup
|
cleanup := stereoscope.Cleanup
|
||||||
|
|
||||||
if err != nil || img == nil {
|
if err != nil || img == nil {
|
||||||
return Source{}, cleanup, fmt.Errorf("could not fetch image '%s': %w", location, err)
|
return &Source{}, cleanup, fmt.Errorf("could not fetch image '%s': %w", location, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
s, err := NewFromImage(img, location)
|
s, err := NewFromImage(img, location)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Source{}, cleanup, fmt.Errorf("could not populate source with image: %w", err)
|
return &Source{}, cleanup, fmt.Errorf("could not populate source with image: %w", err)
|
||||||
}
|
}
|
||||||
return s, cleanup, nil
|
return &s, cleanup, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return Source{}, func() {}, fmt.Errorf("unable to process input for scanning: '%s'", userInput)
|
return &Source{}, func() {}, fmt.Errorf("unable to process input for scanning: '%s'", userInput)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewFromDirectory creates a new source object tailored to catalog a given filesystem directory recursively.
|
// NewFromDirectory creates a new source object tailored to catalog a given filesystem directory recursively.
|
||||||
func NewFromDirectory(path string) (Source, error) {
|
func NewFromDirectory(path string) (Source, error) {
|
||||||
return Source{
|
return Source{
|
||||||
|
Mutex: &sync.Mutex{},
|
||||||
Metadata: Metadata{
|
Metadata: Metadata{
|
||||||
Scheme: DirectoryScheme,
|
Scheme: DirectoryScheme,
|
||||||
Path: path,
|
Path: path,
|
||||||
@ -91,10 +95,19 @@ func NewFromImage(img *image.Image, userImageStr string) (Source, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s Source) FileResolver(scope Scope) (FileResolver, error) {
|
func (s *Source) FileResolver(scope Scope) (FileResolver, error) {
|
||||||
switch s.Metadata.Scheme {
|
switch s.Metadata.Scheme {
|
||||||
case DirectoryScheme:
|
case DirectoryScheme:
|
||||||
return newDirectoryResolver(s.Metadata.Path)
|
s.Mutex.Lock()
|
||||||
|
defer s.Mutex.Unlock()
|
||||||
|
if s.DirectoryResolver == nil {
|
||||||
|
directoryResolver, err := newDirectoryResolver(s.Metadata.Path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
s.DirectoryResolver = directoryResolver
|
||||||
|
}
|
||||||
|
return s.DirectoryResolver, nil
|
||||||
case ImageScheme:
|
case ImageScheme:
|
||||||
switch scope {
|
switch scope {
|
||||||
case SquashedScope:
|
case SquashedScope:
|
||||||
|
|||||||
@ -89,6 +89,68 @@ func TestNewFromDirectory(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestNewFromDirectoryShared(t *testing.T) {
|
||||||
|
testCases := []struct {
|
||||||
|
desc string
|
||||||
|
input string
|
||||||
|
expString string
|
||||||
|
notExist string
|
||||||
|
inputPaths []string
|
||||||
|
expRefs int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "path detected",
|
||||||
|
input: "test-fixtures",
|
||||||
|
notExist: "foobar/",
|
||||||
|
inputPaths: []string{"test-fixtures/path-detected/.vimrc"},
|
||||||
|
expRefs: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "directory ignored",
|
||||||
|
input: "test-fixtures",
|
||||||
|
notExist: "foobar/",
|
||||||
|
inputPaths: []string{"test-fixtures/path-detected"},
|
||||||
|
expRefs: 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "no files-by-path detected",
|
||||||
|
input: "test-fixtures",
|
||||||
|
notExist: "foobar/",
|
||||||
|
inputPaths: []string{"test-fixtures/no-path-detected"},
|
||||||
|
expRefs: 0,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range testCases {
|
||||||
|
t.Run(test.desc, func(t *testing.T) {
|
||||||
|
src, err := NewFromDirectory(test.input)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("could not create NewDirScope: %+v", err)
|
||||||
|
}
|
||||||
|
if src.Metadata.Path != test.input {
|
||||||
|
t.Errorf("mismatched stringer: '%s' != '%s'", src.Metadata.Path, test.input)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = src.FileResolver(SquashedScope)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
src.Metadata.Path = test.notExist
|
||||||
|
resolver2, err := src.FileResolver(SquashedScope)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
refs, err := resolver2.FilesByPath(test.inputPaths...)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("FilesByPath call produced an error: %+v", err)
|
||||||
|
}
|
||||||
|
if len(refs) != test.expRefs {
|
||||||
|
t.Errorf("unexpected number of refs returned: %d != %d", len(refs), test.expRefs)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestFilesByPathDoesNotExist(t *testing.T) {
|
func TestFilesByPathDoesNotExist(t *testing.T) {
|
||||||
testCases := []struct {
|
testCases := []struct {
|
||||||
desc string
|
desc string
|
||||||
|
|||||||
@ -71,6 +71,29 @@ func TestPowerUserCmdFlags(t *testing.T) {
|
|||||||
assertSuccessfulReturnCode,
|
assertSuccessfulReturnCode,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "default-dir-results-w-pkg-coverage",
|
||||||
|
args: []string{"power-user", "dir:test-fixtures/image-pkg-coverage"},
|
||||||
|
assertions: []traitAssertion{
|
||||||
|
assertNotInOutput(" command is deprecated"), // only the root command should be deprecated
|
||||||
|
assertInOutput(`"type": "RegularFile"`), // proof of file-metadata data
|
||||||
|
assertInOutput(`"algorithm": "sha256"`), // proof of file-metadata default digest algorithm of sha256
|
||||||
|
assertInOutput(`"metadataType": "ApkMetadata"`), // proof of package artifacts data
|
||||||
|
assertSuccessfulReturnCode,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "defaut-secrets-dir-results-w-reveal-values",
|
||||||
|
env: map[string]string{
|
||||||
|
"SYFT_SECRETS_REVEAL_VALUES": "true",
|
||||||
|
},
|
||||||
|
args: []string{"power-user", "dir:test-fixtures/image-secrets"},
|
||||||
|
assertions: []traitAssertion{
|
||||||
|
assertInOutput(`"classification": "generic-api-key"`), // proof of the secrets cataloger finding something
|
||||||
|
assertInOutput(`"12345A7a901b345678901234567890123456789012345678901234567890"`), // proof of the secrets cataloger finding the api key
|
||||||
|
assertSuccessfulReturnCode,
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
|
|||||||
@ -10,7 +10,7 @@ import (
|
|||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
)
|
)
|
||||||
|
|
||||||
func catalogFixtureImage(t *testing.T, fixtureImageName string) (*pkg.Catalog, *distro.Distro, source.Source) {
|
func catalogFixtureImage(t *testing.T, fixtureImageName string) (*pkg.Catalog, *distro.Distro, *source.Source) {
|
||||||
imagetest.GetFixtureImage(t, "docker-archive", fixtureImageName)
|
imagetest.GetFixtureImage(t, "docker-archive", fixtureImageName)
|
||||||
tarPath := imagetest.GetFixtureImageTarPath(t, fixtureImageName)
|
tarPath := imagetest.GetFixtureImageTarPath(t, fixtureImageName)
|
||||||
|
|
||||||
@ -28,7 +28,7 @@ func catalogFixtureImage(t *testing.T, fixtureImageName string) (*pkg.Catalog, *
|
|||||||
return pkgCatalog, actualDistro, theSource
|
return pkgCatalog, actualDistro, theSource
|
||||||
}
|
}
|
||||||
|
|
||||||
func catalogDirectory(t *testing.T, dir string) (*pkg.Catalog, *distro.Distro, source.Source) {
|
func catalogDirectory(t *testing.T, dir string) (*pkg.Catalog, *distro.Distro, *source.Source) {
|
||||||
theSource, cleanupSource, err := source.New("dir:"+dir, nil)
|
theSource, cleanupSource, err := source.New("dir:"+dir, nil)
|
||||||
t.Cleanup(cleanupSource)
|
t.Cleanup(cleanupSource)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user