diff --git a/cmd/attest.go b/cmd/attest.go index 3a79bc86a..d4c7a33e1 100644 --- a/cmd/attest.go +++ b/cmd/attest.go @@ -136,12 +136,12 @@ func attestExec(ctx context.Context, _ *cobra.Command, args []string) error { return fmt.Errorf("could not generate source input for attest command: %w", err) } - switch si.Scheme { - case source.ImageScheme, source.UnknownScheme: + switch si.Type { + case source.ImageType, source.UnknownType: // at this point we know that it cannot be dir: or file: schemes, so we will assume that the unknown scheme could represent an image - si.Scheme = source.ImageScheme + si.Type = source.ImageType default: - return fmt.Errorf("attest command can only be used with image sources but discovered %q when given %q", si.Scheme, userInput) + return fmt.Errorf("attest command can only be used with image sources but discovered %q when given %q", si.Type, userInput) } // if the original detection was from a local daemon we want to short circuit diff --git a/cmd/packages.go b/cmd/packages.go index fcaf89eb4..c99d0b99e 100644 --- a/cmd/packages.go +++ b/cmd/packages.go @@ -312,7 +312,7 @@ func packagesExecWorker(si source.Input, writer sbom.Writer) <-chan error { func runPackageSbomUpload(src *source.Source, s sbom.SBOM) error { log.Infof("uploading results to %s", appConfig.Anchore.Host) - if src.Metadata.Scheme != source.ImageScheme { + if src.Metadata.Scheme != source.ImageType { return fmt.Errorf("unable to upload results: only images are supported") } diff --git a/internal/anchore/import.go b/internal/anchore/import.go index 3b1e4c1a2..aab399662 100644 --- a/internal/anchore/import.go +++ b/internal/anchore/import.go @@ -4,11 +4,12 @@ import ( "context" "errors" "fmt" + "time" + "github.com/anchore/syft/internal/bus" "github.com/anchore/syft/syft/event" "github.com/wagoodman/go-partybus" "github.com/wagoodman/go-progress" - "time" "github.com/anchore/client-go/pkg/external" "github.com/anchore/stereoscope/pkg/image" diff --git a/internal/anchore/import_package_sbom_test.go b/internal/anchore/import_package_sbom_test.go index c6939c91f..75025bb0d 100644 --- a/internal/anchore/import_package_sbom_test.go +++ b/internal/anchore/import_package_sbom_test.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "fmt" + "github.com/anchore/syft/syft/file" "net/http" "strings" "testing" @@ -56,9 +57,9 @@ func sbomFixture() sbom.SBOM { Name: "name", Version: "version", FoundBy: "foundBy", - Locations: []source.Location{ + Locations: []file.Location{ { - Coordinates: source.Coordinates{ + Coordinates: file.Coordinates{ RealPath: "path", FileSystemID: "layerID", }, @@ -102,13 +103,13 @@ func sbomFixture() sbom.SBOM { }, Relationships: []artifact.Relationship{ { - From: source.NewLocation("/place1"), - To: source.NewLocation("/place2"), + From: file.NewLocation("/place1"), + To: file.NewLocation("/place2"), Type: artifact.ContainsRelationship, }, }, Source: source.Metadata{ - Scheme: source.ImageScheme, + Scheme: source.ImageType, ImageMetadata: source.ImageMetadata{ UserInput: "user-in", Layers: nil, diff --git a/internal/archive/zip_file_traversal.go b/internal/archive/zip_file_traversal.go index 2a4eac716..ebb4f05fb 100644 --- a/internal/archive/zip_file_traversal.go +++ b/internal/archive/zip_file_traversal.go @@ -4,12 +4,13 @@ import ( "archive/zip" "bytes" "fmt" - "github.com/anchore/syft/syft/file" "io/ioutil" "os" "path/filepath" "strings" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/internal/log" ) diff --git a/internal/config/application.go b/internal/config/application.go index 253b179e6..81b66e49c 100644 --- a/internal/config/application.go +++ b/internal/config/application.go @@ -3,13 +3,15 @@ package config import ( "errors" "fmt" - "github.com/anchore/syft/internal/version" - "github.com/anchore/syft/syft" - "github.com/anchore/syft/syft/file" "path" "reflect" "strings" + "github.com/anchore/syft/internal/version" + "github.com/anchore/syft/syft" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/file/cataloger/fileclassifier" + "github.com/adrg/xdg" "github.com/anchore/syft/internal" "github.com/mitchellh/go-homedir" @@ -76,7 +78,7 @@ func (cfg Application) ToCatalogingConfig() (*syft.CatalogingConfig, error) { SecretsConfig: *secretsConfig, SecretsScope: cfg.Secrets.Cataloger.ScopeOpt, ClassifyFiles: cfg.FileClassification.Cataloger.Enabled, - FileClassifiers: file.DefaultClassifiers(), + FileClassifiers: fileclassifier.DefaultClassifiers(), ContentsConfig: cfg.FileContents.ToConfig(), }, nil } diff --git a/internal/config/secrets.go b/internal/config/secrets.go index 7fb112bd8..c11dcef38 100644 --- a/internal/config/secrets.go +++ b/internal/config/secrets.go @@ -2,6 +2,7 @@ package config import ( "fmt" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file/cataloger/secrets" "github.com/anchore/syft/syft/source" diff --git a/internal/formats/common/cyclonedxhelpers/component.go b/internal/formats/common/cyclonedxhelpers/component.go index 27b26d236..0345e3214 100644 --- a/internal/formats/common/cyclonedxhelpers/component.go +++ b/internal/formats/common/cyclonedxhelpers/component.go @@ -3,11 +3,12 @@ package cyclonedxhelpers import ( "reflect" + "github.com/anchore/syft/syft/file" + "github.com/CycloneDX/cyclonedx-go" "github.com/anchore/syft/internal/formats/common" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func encodeComponent(p pkg.Package) cyclonedx.Component { @@ -71,9 +72,9 @@ func decodeComponent(c *cyclonedx.Component) *pkg.Package { return p } -func decodeLocations(vals map[string]string) []source.Location { - v := common.Decode(reflect.TypeOf([]source.Location{}), vals, "syft:location", CycloneDXFields) - out, _ := v.([]source.Location) +func decodeLocations(vals map[string]string) []file.Location { + v := common.Decode(reflect.TypeOf([]file.Location{}), vals, "syft:location", CycloneDXFields) + out, _ := v.([]file.Location) return out } diff --git a/internal/formats/common/cyclonedxhelpers/component_test.go b/internal/formats/common/cyclonedxhelpers/component_test.go index b267abe6d..b0cce0fd2 100644 --- a/internal/formats/common/cyclonedxhelpers/component_test.go +++ b/internal/formats/common/cyclonedxhelpers/component_test.go @@ -1,13 +1,13 @@ package cyclonedxhelpers import ( + "github.com/anchore/syft/syft/file" "testing" "github.com/CycloneDX/cyclonedx-go" "github.com/stretchr/testify/assert" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_encodeComponentProperties(t *testing.T) { @@ -26,8 +26,8 @@ func Test_encodeComponentProperties(t *testing.T) { name: "from apk", input: pkg.Package{ FoundBy: "cataloger", - Locations: []source.Location{ - {Coordinates: source.Coordinates{RealPath: "test"}}, + Locations: []file.Location{ + {Coordinates: file.Coordinates{RealPath: "test"}}, }, Metadata: pkg.ApkMetadata{ Package: "libc-utils", diff --git a/internal/formats/common/cyclonedxhelpers/decoder.go b/internal/formats/common/cyclonedxhelpers/decoder.go index 2c2a0e684..eb56eba6c 100644 --- a/internal/formats/common/cyclonedxhelpers/decoder.go +++ b/internal/formats/common/cyclonedxhelpers/decoder.go @@ -199,7 +199,7 @@ func decodeMetadata(component *cyclonedx.Component) source.Metadata { switch component.Type { case cyclonedx.ComponentTypeContainer: return source.Metadata{ - Scheme: source.ImageScheme, + Scheme: source.ImageType, ImageMetadata: source.ImageMetadata{ UserInput: component.Name, ID: component.BOMRef, @@ -208,7 +208,7 @@ func decodeMetadata(component *cyclonedx.Component) source.Metadata { } case cyclonedx.ComponentTypeFile: return source.Metadata{ - Scheme: source.FileScheme, // or source.DirectoryScheme + Scheme: source.FileType, // or source.DirectoryType Path: component.Name, ImageMetadata: source.ImageMetadata{ UserInput: component.Name, diff --git a/internal/formats/common/cyclonedxhelpers/format.go b/internal/formats/common/cyclonedxhelpers/format.go index 1df56aae2..ded8be88d 100644 --- a/internal/formats/common/cyclonedxhelpers/format.go +++ b/internal/formats/common/cyclonedxhelpers/format.go @@ -155,7 +155,7 @@ func toDependencies(relationships []artifact.Relationship) []cyclonedx.Dependenc func toBomDescriptorComponent(srcMetadata source.Metadata) *cyclonedx.Component { switch srcMetadata.Scheme { - case source.ImageScheme: + case source.ImageType: bomRef, err := artifact.IDByHash(srcMetadata.ImageMetadata.ID) if err != nil { log.Warnf("unable to get fingerprint of image metadata=%s: %+v", srcMetadata.ImageMetadata.ID, err) @@ -166,7 +166,7 @@ func toBomDescriptorComponent(srcMetadata source.Metadata) *cyclonedx.Component Name: srcMetadata.ImageMetadata.UserInput, Version: srcMetadata.ImageMetadata.ManifestDigest, } - case source.DirectoryScheme, source.FileScheme: + case source.DirectoryType, source.FileType: bomRef, err := artifact.IDByHash(srcMetadata.Path) if err != nil { log.Warnf("unable to get fingerprint of source metadata path=%s: %+v", srcMetadata.Path, err) diff --git a/internal/formats/common/spdxhelpers/document_name.go b/internal/formats/common/spdxhelpers/document_name.go index af80a52ed..142761eba 100644 --- a/internal/formats/common/spdxhelpers/document_name.go +++ b/internal/formats/common/spdxhelpers/document_name.go @@ -10,9 +10,9 @@ import ( func DocumentName(srcMetadata source.Metadata) (string, error) { switch srcMetadata.Scheme { - case source.ImageScheme: + case source.ImageType: return cleanName(srcMetadata.ImageMetadata.UserInput), nil - case source.DirectoryScheme, source.FileScheme: + case source.DirectoryType, source.FileType: return cleanName(srcMetadata.Path), nil } diff --git a/internal/formats/common/spdxhelpers/document_name_test.go b/internal/formats/common/spdxhelpers/document_name_test.go index ab1095e46..aaa91bda1 100644 --- a/internal/formats/common/spdxhelpers/document_name_test.go +++ b/internal/formats/common/spdxhelpers/document_name_test.go @@ -14,7 +14,7 @@ import ( func Test_DocumentName(t *testing.T) { allSchemes := strset.New() - for _, s := range source.AllSchemes { + for _, s := range source.AllTypes { allSchemes.Add(string(s)) } testedSchemes := strset.New() @@ -29,7 +29,7 @@ func Test_DocumentName(t *testing.T) { name: "image", inputName: "my-name", srcMetadata: source.Metadata{ - Scheme: source.ImageScheme, + Scheme: source.ImageType, ImageMetadata: source.ImageMetadata{ UserInput: "image-repo/name:tag", ID: "id", @@ -42,7 +42,7 @@ func Test_DocumentName(t *testing.T) { name: "directory", inputName: "my-name", srcMetadata: source.Metadata{ - Scheme: source.DirectoryScheme, + Scheme: source.DirectoryType, Path: "some/path/to/place", }, expected: "some/path/to/place", @@ -51,7 +51,7 @@ func Test_DocumentName(t *testing.T) { name: "file", inputName: "my-name", srcMetadata: source.Metadata{ - Scheme: source.FileScheme, + Scheme: source.FileType, Path: "some/path/to/place", }, expected: "some/path/to/place", diff --git a/internal/formats/common/spdxhelpers/document_namespace.go b/internal/formats/common/spdxhelpers/document_namespace.go index d3c438a74..6c99e4682 100644 --- a/internal/formats/common/spdxhelpers/document_namespace.go +++ b/internal/formats/common/spdxhelpers/document_namespace.go @@ -21,11 +21,11 @@ func DocumentNameAndNamespace(srcMetadata source.Metadata) (string, string, erro func DocumentNamespace(name string, srcMetadata source.Metadata) string { input := "unknown-source-type" switch srcMetadata.Scheme { - case source.ImageScheme: + case source.ImageType: input = "image" - case source.DirectoryScheme: + case source.DirectoryType: input = "dir" - case source.FileScheme: + case source.FileType: input = "file" } diff --git a/internal/formats/common/spdxhelpers/document_namespace_test.go b/internal/formats/common/spdxhelpers/document_namespace_test.go index 12030100e..86087a057 100644 --- a/internal/formats/common/spdxhelpers/document_namespace_test.go +++ b/internal/formats/common/spdxhelpers/document_namespace_test.go @@ -12,7 +12,7 @@ import ( func Test_documentNamespace(t *testing.T) { allSchemes := strset.New() - for _, s := range source.AllSchemes { + for _, s := range source.AllTypes { allSchemes.Add(string(s)) } testedSchemes := strset.New() @@ -27,7 +27,7 @@ func Test_documentNamespace(t *testing.T) { name: "image", inputName: "my-name", srcMetadata: source.Metadata{ - Scheme: source.ImageScheme, + Scheme: source.ImageType, ImageMetadata: source.ImageMetadata{ UserInput: "image-repo/name:tag", ID: "id", @@ -40,7 +40,7 @@ func Test_documentNamespace(t *testing.T) { name: "directory", inputName: "my-name", srcMetadata: source.Metadata{ - Scheme: source.DirectoryScheme, + Scheme: source.DirectoryType, Path: "some/path/to/place", }, expected: "https://anchore.com/syft/dir/my-name-", @@ -49,7 +49,7 @@ func Test_documentNamespace(t *testing.T) { name: "file", inputName: "my-name", srcMetadata: source.Metadata{ - Scheme: source.FileScheme, + Scheme: source.FileType, Path: "some/path/to/place", }, expected: "https://anchore.com/syft/file/my-name-", diff --git a/internal/formats/common/spdxhelpers/source_info_test.go b/internal/formats/common/spdxhelpers/source_info_test.go index 05e99d1bf..87220c67e 100644 --- a/internal/formats/common/spdxhelpers/source_info_test.go +++ b/internal/formats/common/spdxhelpers/source_info_test.go @@ -1,10 +1,10 @@ package spdxhelpers import ( + "github.com/anchore/syft/syft/file" "testing" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" "github.com/stretchr/testify/assert" ) @@ -18,9 +18,9 @@ func Test_SourceInfo(t *testing.T) { name: "locations are captured", input: pkg.Package{ // note: no type given - Locations: []source.Location{ - source.NewVirtualLocation("/a-place", "/b-place"), - source.NewVirtualLocation("/c-place", "/d-place"), + Locations: []file.Location{ + file.NewVirtualLocation("/a-place", "/b-place"), + file.NewVirtualLocation("/c-place", "/d-place"), }, }, expected: []string{ diff --git a/internal/formats/common/spdxhelpers/to_syft_model.go b/internal/formats/common/spdxhelpers/to_syft_model.go index 63ed039a8..4f5db9944 100644 --- a/internal/formats/common/spdxhelpers/to_syft_model.go +++ b/internal/formats/common/spdxhelpers/to_syft_model.go @@ -13,7 +13,6 @@ import ( "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" - "github.com/anchore/syft/syft/source" ) func ToSyftModel(doc *spdx.Document2_2) (*sbom.SBOM, error) { @@ -22,8 +21,8 @@ func ToSyftModel(doc *spdx.Document2_2) (*sbom.SBOM, error) { s := &sbom.SBOM{ Artifacts: sbom.Artifacts{ PackageCatalog: pkg.NewCatalog(), - FileMetadata: map[source.Coordinates]source.FileMetadata{}, - FileDigests: map[source.Coordinates][]file.Digest{}, + FileMetadata: map[file.Coordinates]file.Metadata{}, + FileDigests: map[file.Coordinates][]file.Digest{}, LinuxDistribution: findLinuxReleaseByPURL(doc), }, } @@ -98,7 +97,7 @@ func toFileDigests(f *spdx.File2_2) (digests []file.Digest) { return digests } -func toFileMetadata(f *spdx.File2_2) (meta source.FileMetadata) { +func toFileMetadata(f *spdx.File2_2) (meta file.Metadata) { // FIXME Syft is currently lossy due to the SPDX 2.2.1 spec not supporting arbitrary mimetypes for _, typ := range f.FileType { switch FileType(typ) { @@ -132,7 +131,7 @@ func toSyftRelationships(spdxIDMap map[string]interface{}, doc *spdx.Document2_2 b := spdxIDMap[string(r.RefB.ElementRefID)] from, fromOk := a.(*pkg.Package) toPackage, toPackageOk := b.(*pkg.Package) - toLocation, toLocationOk := b.(*source.Location) + toLocation, toLocationOk := b.(*file.Location) if !fromOk || !(toPackageOk || toLocationOk) { log.Debugf("unable to find valid relationship mapping from SPDX 2.2 JSON, ignoring: (from: %+v) (to: %+v)", a, b) continue @@ -174,7 +173,7 @@ func toSyftRelationships(spdxIDMap map[string]interface{}, doc *spdx.Document2_2 return out } -func toSyftCoordinates(f *spdx.File2_2) source.Coordinates { +func toSyftCoordinates(f *spdx.File2_2) file.Coordinates { const layerIDPrefix = "layerID: " var fileSystemID string if strings.Index(f.FileComment, layerIDPrefix) == 0 { @@ -183,14 +182,14 @@ func toSyftCoordinates(f *spdx.File2_2) source.Coordinates { if strings.Index(string(f.FileSPDXIdentifier), layerIDPrefix) == 0 { fileSystemID = strings.TrimPrefix(string(f.FileSPDXIdentifier), layerIDPrefix) } - return source.Coordinates{ + return file.Coordinates{ RealPath: f.FileName, FileSystemID: fileSystemID, } } -func toSyftLocation(f *spdx.File2_2) *source.Location { - return &source.Location{ +func toSyftLocation(f *spdx.File2_2) *file.Location { + return &file.Location{ Coordinates: toSyftCoordinates(f), VirtualPath: f.FileName, } diff --git a/internal/formats/common/testutils/utils.go b/internal/formats/common/testutils/utils.go index 6b5b5f670..7369f6e3f 100644 --- a/internal/formats/common/testutils/utils.go +++ b/internal/formats/common/testutils/utils.go @@ -5,6 +5,8 @@ import ( "strings" "testing" + "github.com/anchore/syft/syft/file" + "github.com/anchore/go-testutils" "github.com/anchore/stereoscope/pkg/filetree" "github.com/anchore/stereoscope/pkg/image" @@ -157,8 +159,8 @@ func populateImageCatalog(catalog *pkg.Catalog, img *image.Image) { catalog.Add(pkg.Package{ Name: "package-1", Version: "1.0.1", - Locations: []source.Location{ - source.NewLocationFromImage(string(ref1.RealPath), *ref1, img), + Locations: []file.Location{ + file.NewLocationFromImage(string(ref1.RealPath), *ref1, img), }, Type: pkg.PythonPkg, FoundBy: "the-cataloger-1", @@ -177,8 +179,8 @@ func populateImageCatalog(catalog *pkg.Catalog, img *image.Image) { catalog.Add(pkg.Package{ Name: "package-2", Version: "2.0.1", - Locations: []source.Location{ - source.NewLocationFromImage(string(ref2.RealPath), *ref2, img), + Locations: []file.Location{ + file.NewLocationFromImage(string(ref2.RealPath), *ref2, img), }, Type: pkg.DebPkg, FoundBy: "the-cataloger-2", @@ -234,8 +236,8 @@ func newDirectoryCatalog() *pkg.Catalog { Version: "1.0.1", Type: pkg.PythonPkg, FoundBy: "the-cataloger-1", - Locations: []source.Location{ - source.NewLocation("/some/path/pkg1"), + Locations: []file.Location{ + file.NewLocation("/some/path/pkg1"), }, Language: pkg.Python, MetadataType: pkg.PythonPackageMetadataType, @@ -259,8 +261,8 @@ func newDirectoryCatalog() *pkg.Catalog { Version: "2.0.1", Type: pkg.DebPkg, FoundBy: "the-cataloger-2", - Locations: []source.Location{ - source.NewLocation("/some/path/pkg1"), + Locations: []file.Location{ + file.NewLocation("/some/path/pkg1"), }, MetadataType: pkg.DpkgMetadataType, Metadata: pkg.DpkgMetadata{ diff --git a/internal/formats/spdx22json/to_format_model.go b/internal/formats/spdx22json/to_format_model.go index 761993b3e..adfa7543c 100644 --- a/internal/formats/spdx22json/to_format_model.go +++ b/internal/formats/spdx22json/to_format_model.go @@ -16,7 +16,6 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" - "github.com/anchore/syft/syft/source" ) // toFormatModel creates and populates a new JSON document struct that follows the SPDX 2.2 spec from the given cataloging results. @@ -94,7 +93,7 @@ func fileIDsForPackage(packageSpdxID string, relationships []artifact.Relationsh continue } - if _, ok := relationship.To.(source.Coordinates); !ok { + if _, ok := relationship.To.(file.Coordinates); !ok { continue } @@ -110,7 +109,7 @@ func toFiles(s sbom.SBOM) []model.File { artifacts := s.Artifacts for _, coordinates := range sbom.AllCoordinates(s) { - var metadata *source.FileMetadata + var metadata *file.Metadata if metadataForLocation, exists := artifacts.FileMetadata[coordinates]; exists { metadata = &metadataForLocation } @@ -165,7 +164,7 @@ func toChecksumAlgorithm(algorithm string) string { return strings.ToUpper(algorithm) } -func toFileTypes(metadata *source.FileMetadata) (ty []string) { +func toFileTypes(metadata *file.Metadata) (ty []string) { if metadata == nil { return nil } diff --git a/internal/formats/spdx22json/to_format_model_test.go b/internal/formats/spdx22json/to_format_model_test.go index d1f5da9a8..8036730c9 100644 --- a/internal/formats/spdx22json/to_format_model_test.go +++ b/internal/formats/spdx22json/to_format_model_test.go @@ -11,7 +11,6 @@ import ( "github.com/anchore/syft/internal/formats/common/spdxhelpers" "github.com/anchore/syft/internal/formats/spdx22json/model" - "github.com/anchore/syft/syft/source" "github.com/stretchr/testify/assert" ) @@ -19,12 +18,12 @@ func Test_toFileTypes(t *testing.T) { tests := []struct { name string - metadata source.FileMetadata + metadata file.Metadata expected []string }{ { name: "application", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "application/vnd.unknown", }, expected: []string{ @@ -33,7 +32,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "archive", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "application/zip", }, expected: []string{ @@ -43,7 +42,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "audio", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "audio/ogg", }, expected: []string{ @@ -52,7 +51,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "video", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "video/3gpp", }, expected: []string{ @@ -61,7 +60,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "text", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "text/html", }, expected: []string{ @@ -70,7 +69,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "image", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "image/png", }, expected: []string{ @@ -79,7 +78,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "binary", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "application/x-sharedlib", }, expected: []string{ @@ -175,7 +174,7 @@ func Test_fileIDsForPackage(t *testing.T) { Name: "bogus", } - c := source.Coordinates{ + c := file.Coordinates{ RealPath: "/path", FileSystemID: "nowhere", } diff --git a/internal/formats/syftjson/encoder_test.go b/internal/formats/syftjson/encoder_test.go index 10d2576ca..5fd8edc96 100644 --- a/internal/formats/syftjson/encoder_test.go +++ b/internal/formats/syftjson/encoder_test.go @@ -42,9 +42,9 @@ func TestEncodeFullJSONDocument(t *testing.T) { p1 := pkg.Package{ Name: "package-1", Version: "1.0.1", - Locations: []source.Location{ + Locations: []file.Location{ { - Coordinates: source.Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/a/place/a", }, }, @@ -68,9 +68,9 @@ func TestEncodeFullJSONDocument(t *testing.T) { p2 := pkg.Package{ Name: "package-2", Version: "2.0.1", - Locations: []source.Location{ + Locations: []file.Location{ { - Coordinates: source.Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/b/place/b", }, }, @@ -95,49 +95,49 @@ func TestEncodeFullJSONDocument(t *testing.T) { s := sbom.SBOM{ Artifacts: sbom.Artifacts{ PackageCatalog: catalog, - FileMetadata: map[source.Coordinates]source.FileMetadata{ - source.NewLocation("/a/place").Coordinates: { + FileMetadata: map[file.Coordinates]file.Metadata{ + file.NewLocation("/a/place").Coordinates: { Mode: 0775, Type: "directory", UserID: 0, GroupID: 0, }, - source.NewLocation("/a/place/a").Coordinates: { + file.NewLocation("/a/place/a").Coordinates: { Mode: 0775, Type: "regularFile", UserID: 0, GroupID: 0, }, - source.NewLocation("/b").Coordinates: { + file.NewLocation("/b").Coordinates: { Mode: 0775, Type: "symbolicLink", LinkDestination: "/c", UserID: 0, GroupID: 0, }, - source.NewLocation("/b/place/b").Coordinates: { + file.NewLocation("/b/place/b").Coordinates: { Mode: 0644, Type: "regularFile", UserID: 1, GroupID: 2, }, }, - FileDigests: map[source.Coordinates][]file.Digest{ - source.NewLocation("/a/place/a").Coordinates: { + FileDigests: map[file.Coordinates][]file.Digest{ + file.NewLocation("/a/place/a").Coordinates: { { Algorithm: "sha256", Value: "366a3f5653e34673b875891b021647440d0127c2ef041e3b1a22da2a7d4f3703", }, }, - source.NewLocation("/b/place/b").Coordinates: { + file.NewLocation("/b/place/b").Coordinates: { { Algorithm: "sha256", Value: "1b3722da2a7d90d033b87581a2a3f12021647445653e34666ef041e3b4f3707c", }, }, }, - FileContents: map[source.Coordinates]string{ - source.NewLocation("/a/place/a").Coordinates: "the-contents", + FileContents: map[file.Coordinates]string{ + file.NewLocation("/a/place/a").Coordinates: "the-contents", }, LinuxDistribution: &linux.Release{ ID: "redhat", @@ -159,7 +159,7 @@ func TestEncodeFullJSONDocument(t *testing.T) { }, }, Source: source.Metadata{ - Scheme: source.ImageScheme, + Scheme: source.ImageType, ImageMetadata: source.ImageMetadata{ UserInput: "user-image-input", ID: "sha256:c2b46b4eb06296933b7cf0722683964e9ecbd93265b9ef6ae9642e3952afbba0", diff --git a/internal/formats/syftjson/model/file.go b/internal/formats/syftjson/model/file.go index be2c88df3..3d7736477 100644 --- a/internal/formats/syftjson/model/file.go +++ b/internal/formats/syftjson/model/file.go @@ -2,13 +2,11 @@ package model import ( "github.com/anchore/syft/syft/file" - - "github.com/anchore/syft/syft/source" ) type File struct { ID string `json:"id"` - Location source.Coordinates `json:"location"` + Location file.Coordinates `json:"location"` Metadata *FileMetadataEntry `json:"metadata,omitempty"` Contents string `json:"contents,omitempty"` Digests []file.Digest `json:"digests,omitempty"` @@ -16,10 +14,10 @@ type File struct { } type FileMetadataEntry struct { - Mode int `json:"mode"` - Type source.FileType `json:"type"` - LinkDestination string `json:"linkDestination,omitempty"` - UserID int `json:"userID"` - GroupID int `json:"groupID"` - MIMEType string `json:"mimeType"` + Mode int `json:"mode"` + Type file.Type `json:"type"` + LinkDestination string `json:"linkDestination,omitempty"` + UserID int `json:"userID"` + GroupID int `json:"groupID"` + MIMEType string `json:"mimeType"` } diff --git a/internal/formats/syftjson/model/package.go b/internal/formats/syftjson/model/package.go index f5000350d..1b9416468 100644 --- a/internal/formats/syftjson/model/package.go +++ b/internal/formats/syftjson/model/package.go @@ -4,7 +4,7 @@ import ( "encoding/json" "fmt" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/pkg" @@ -18,16 +18,16 @@ type Package struct { // PackageBasicData contains non-ambiguous values (type-wise) from pkg.Package. type PackageBasicData struct { - ID string `json:"id"` - Name string `json:"name"` - Version string `json:"version"` - Type pkg.Type `json:"type"` - FoundBy string `json:"foundBy"` - Locations []source.Coordinates `json:"locations"` - Licenses []string `json:"licenses"` - Language pkg.Language `json:"language"` - CPEs []string `json:"cpes"` - PURL string `json:"purl"` + ID string `json:"id"` + Name string `json:"name"` + Version string `json:"version"` + Type pkg.Type `json:"type"` + FoundBy string `json:"foundBy"` + Locations []file.Coordinates `json:"locations"` + Licenses []string `json:"licenses"` + Language pkg.Language `json:"language"` + CPEs []string `json:"cpes"` + PURL string `json:"purl"` } // PackageCustomData contains ambiguous values (type-wise) from pkg.Package. diff --git a/internal/formats/syftjson/model/secrets.go b/internal/formats/syftjson/model/secrets.go index c5f468576..5562b76bb 100644 --- a/internal/formats/syftjson/model/secrets.go +++ b/internal/formats/syftjson/model/secrets.go @@ -2,10 +2,9 @@ package model import ( "github.com/anchore/syft/syft/file" - "github.com/anchore/syft/syft/source" ) type Secrets struct { - Location source.Coordinates `json:"location"` + Location file.Coordinates `json:"location"` Secrets []file.SearchResult `json:"secrets"` } diff --git a/internal/formats/syftjson/to_format_model.go b/internal/formats/syftjson/to_format_model.go index 7d4feee4a..99bd388c7 100644 --- a/internal/formats/syftjson/to_format_model.go +++ b/internal/formats/syftjson/to_format_model.go @@ -73,7 +73,7 @@ func toDescriptor(d sbom.Descriptor) model.Descriptor { } } -func toSecrets(data map[source.Coordinates][]file.SearchResult) []model.Secrets { +func toSecrets(data map[file.Coordinates][]file.SearchResult) []model.Secrets { results := make([]model.Secrets, 0) for coordinates, secrets := range data { results = append(results, model.Secrets{ @@ -94,7 +94,7 @@ func toFile(s sbom.SBOM) []model.File { artifacts := s.Artifacts for _, coordinates := range sbom.AllCoordinates(s) { - var metadata *source.FileMetadata + var metadata *file.Metadata if metadataForLocation, exists := artifacts.FileMetadata[coordinates]; exists { metadata = &metadataForLocation } @@ -131,7 +131,7 @@ func toFile(s sbom.SBOM) []model.File { return results } -func toFileMetadataEntry(coordinates source.Coordinates, metadata *source.FileMetadata) *model.FileMetadataEntry { +func toFileMetadataEntry(coordinates file.Coordinates, metadata *file.Metadata) *model.FileMetadataEntry { if metadata == nil { return nil } @@ -175,7 +175,7 @@ func toPackageModel(p pkg.Package) model.Package { licenses = p.Licenses } - var coordinates = make([]source.Coordinates, len(p.Locations)) + var coordinates = make([]file.Coordinates, len(p.Locations)) for i, l := range p.Locations { coordinates[i] = l.Coordinates } @@ -216,7 +216,7 @@ func toRelationshipModel(relationships []artifact.Relationship) []model.Relation // toSourceModel creates a new source object to be represented into JSON. func toSourceModel(src source.Metadata) (model.Source, error) { switch src.Scheme { - case source.ImageScheme: + case source.ImageType: metadata := src.ImageMetadata // ensure that empty collections are not shown as null if metadata.RepoDigests == nil { @@ -229,12 +229,12 @@ func toSourceModel(src source.Metadata) (model.Source, error) { Type: "image", Target: metadata, }, nil - case source.DirectoryScheme: + case source.DirectoryType: return model.Source{ Type: "directory", Target: src.Path, }, nil - case source.FileScheme: + case source.FileType: return model.Source{ Type: "file", Target: src.Path, diff --git a/internal/formats/syftjson/to_format_model_test.go b/internal/formats/syftjson/to_format_model_test.go index 18d67936d..97f56dbd1 100644 --- a/internal/formats/syftjson/to_format_model_test.go +++ b/internal/formats/syftjson/to_format_model_test.go @@ -13,7 +13,7 @@ import ( func Test_toSourceModel(t *testing.T) { allSchemes := strset.New() - for _, s := range source.AllSchemes { + for _, s := range source.AllTypes { allSchemes.Add(string(s)) } testedSchemes := strset.New() @@ -26,7 +26,7 @@ func Test_toSourceModel(t *testing.T) { { name: "directory", src: source.Metadata{ - Scheme: source.DirectoryScheme, + Scheme: source.DirectoryType, Path: "some/path", }, expected: model.Source{ @@ -37,7 +37,7 @@ func Test_toSourceModel(t *testing.T) { { name: "file", src: source.Metadata{ - Scheme: source.FileScheme, + Scheme: source.FileType, Path: "some/path", }, expected: model.Source{ @@ -48,7 +48,7 @@ func Test_toSourceModel(t *testing.T) { { name: "image", src: source.Metadata{ - Scheme: source.ImageScheme, + Scheme: source.ImageType, ImageMetadata: source.ImageMetadata{ UserInput: "user-input", ID: "id...", diff --git a/internal/formats/syftjson/to_syft_model.go b/internal/formats/syftjson/to_syft_model.go index 9b6f0aa64..b235b90f8 100644 --- a/internal/formats/syftjson/to_syft_model.go +++ b/internal/formats/syftjson/to_syft_model.go @@ -4,6 +4,7 @@ import ( "github.com/anchore/syft/internal/formats/syftjson/model" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" @@ -111,17 +112,17 @@ func toSyftSourceData(s model.Source) *source.Metadata { switch s.Type { case "directory": return &source.Metadata{ - Scheme: source.DirectoryScheme, + Scheme: source.DirectoryType, Path: s.Target.(string), } case "file": return &source.Metadata{ - Scheme: source.FileScheme, + Scheme: source.FileType, Path: s.Target.(string), } case "image": return &source.Metadata{ - Scheme: source.ImageScheme, + Scheme: source.ImageType, ImageMetadata: s.Target.(source.ImageMetadata), } } @@ -148,9 +149,9 @@ func toSyftPackage(p model.Package) pkg.Package { cpes = append(cpes, value) } - var locations = make([]source.Location, len(p.Locations)) + var locations = make([]file.Location, len(p.Locations)) for i, c := range p.Locations { - locations[i] = source.NewLocationFromCoordinates(c) + locations[i] = file.NewLocationFromCoordinates(c) } return pkg.Package{ diff --git a/internal/formats/syftjson/to_syft_model_test.go b/internal/formats/syftjson/to_syft_model_test.go index 7c852189d..a1e951bec 100644 --- a/internal/formats/syftjson/to_syft_model_test.go +++ b/internal/formats/syftjson/to_syft_model_test.go @@ -11,7 +11,7 @@ import ( func Test_toSyftSourceData(t *testing.T) { allSchemes := strset.New() - for _, s := range source.AllSchemes { + for _, s := range source.AllTypes { allSchemes.Add(string(s)) } testedSchemes := strset.New() @@ -24,7 +24,7 @@ func Test_toSyftSourceData(t *testing.T) { { name: "directory", expected: source.Metadata{ - Scheme: source.DirectoryScheme, + Scheme: source.DirectoryType, Path: "some/path", }, src: model.Source{ @@ -35,7 +35,7 @@ func Test_toSyftSourceData(t *testing.T) { { name: "file", expected: source.Metadata{ - Scheme: source.FileScheme, + Scheme: source.FileType, Path: "some/path", }, src: model.Source{ @@ -46,7 +46,7 @@ func Test_toSyftSourceData(t *testing.T) { { name: "image", expected: source.Metadata{ - Scheme: source.ImageScheme, + Scheme: source.ImageType, ImageMetadata: source.ImageMetadata{ UserInput: "user-input", ID: "id...", diff --git a/internal/formats/text/encoder.go b/internal/formats/text/encoder.go index ee97abd4a..33c1cefa8 100644 --- a/internal/formats/text/encoder.go +++ b/internal/formats/text/encoder.go @@ -16,9 +16,9 @@ func encoder(output io.Writer, s sbom.SBOM) error { w.Init(output, 0, 8, 0, '\t', tabwriter.AlignRight) switch s.Source.Scheme { - case source.DirectoryScheme, source.FileScheme: + case source.DirectoryType, source.FileType: fmt.Fprintf(w, "[Path: %s]\n", s.Source.Path) - case source.ImageScheme: + case source.ImageType: fmt.Fprintln(w, "[Image]") for idx, l := range s.Source.ImageMetadata.Layers { diff --git a/internal/version/guess.go b/internal/version/guess.go index b9ebf5fef..8062dc965 100644 --- a/internal/version/guess.go +++ b/internal/version/guess.go @@ -1,9 +1,10 @@ package version import ( - "github.com/anchore/syft/internal/log" "runtime/debug" "strings" + + "github.com/anchore/syft/internal/log" ) func Guess() string { diff --git a/syft/catalog.go b/syft/catalog.go index 6d7c63aca..c9f568da8 100644 --- a/syft/catalog.go +++ b/syft/catalog.go @@ -2,6 +2,7 @@ package syft import ( "fmt" + "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/source" diff --git a/syft/cataloging_config.go b/syft/cataloging_config.go index ee04b0183..041008d35 100644 --- a/syft/cataloging_config.go +++ b/syft/cataloging_config.go @@ -2,12 +2,13 @@ package syft import ( "crypto" + + "github.com/anchore/syft/syft/file/cataloger/fileclassifier" "github.com/anchore/syft/syft/file/cataloger/filecontents" "github.com/anchore/syft/syft/file/cataloger/secrets" "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/version" - "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/source" ) @@ -31,7 +32,7 @@ type CatalogingConfig struct { SecretsScope source.Scope // file classification ClassifyFiles bool - FileClassifiers []file.Classifier + FileClassifiers []fileclassifier.Classifier // file contents ContentsConfig filecontents.CatalogerConfig } @@ -43,7 +44,7 @@ func DefaultCatalogingConfig() CatalogingConfig { ToolVersion: version.Guess(), SecretsScope: source.AllLayersScope, SecretsConfig: secrets.DefaultCatalogerConfig(), - FileClassifiers: file.DefaultClassifiers(), + FileClassifiers: fileclassifier.DefaultClassifiers(), ContentsConfig: filecontents.DefaultCatalogerConfig(), } } diff --git a/syft/cataloging_option.go b/syft/cataloging_option.go index 913668730..61beaff38 100644 --- a/syft/cataloging_option.go +++ b/syft/cataloging_option.go @@ -2,7 +2,8 @@ package syft import ( "crypto" - "github.com/anchore/syft/syft/file" + + "github.com/anchore/syft/syft/file/cataloger/fileclassifier" "github.com/anchore/syft/syft/file/cataloger/secrets" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/packages" @@ -100,7 +101,7 @@ func WithFileClassification() CatalogingOption { } } -func WithFileClassifiers(classifiers ...file.Classifier) CatalogingOption { +func WithFileClassifiers(classifiers ...fileclassifier.Classifier) CatalogingOption { return func(_ *source.Source, config *CatalogingConfig) error { config.ClassifyFiles = !(len(classifiers) > 0) config.FileClassifiers = classifiers diff --git a/syft/event/parsers/parsers.go b/syft/event/parsers/parsers.go index 22f188772..9a16689a3 100644 --- a/syft/event/parsers/parsers.go +++ b/syft/event/parsers/parsers.go @@ -5,6 +5,7 @@ package parsers import ( "fmt" + "github.com/anchore/syft/syft/event/monitor" "github.com/anchore/syft/syft/event" diff --git a/syft/file/cataloger/fileclassifier/cataloger.go b/syft/file/cataloger/fileclassifier/cataloger.go index f44f4bea6..f564a43ee 100644 --- a/syft/file/cataloger/fileclassifier/cataloger.go +++ b/syft/file/cataloger/fileclassifier/cataloger.go @@ -7,17 +7,17 @@ import ( ) type Cataloger struct { - classifiers []file.Classifier + classifiers []Classifier } -func NewCataloger(classifiers []file.Classifier) (*Cataloger, error) { +func NewCataloger(classifiers []Classifier) (*Cataloger, error) { return &Cataloger{ classifiers: classifiers, }, nil } -func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]file.Classification, error) { - results := make(map[source.Coordinates][]file.Classification) +func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates][]file.Classification, error) { + results := make(map[file.Coordinates][]file.Classification) numResults := 0 for _, location := range source.AllRegularFiles(resolver) { diff --git a/syft/file/cataloger/fileclassifier/cataloger_test.go b/syft/file/cataloger/fileclassifier/cataloger_test.go index 3cdd5ab4e..e6c78c515 100644 --- a/syft/file/cataloger/fileclassifier/cataloger_test.go +++ b/syft/file/cataloger/fileclassifier/cataloger_test.go @@ -106,7 +106,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - c, err := NewCataloger(file.DefaultClassifiers()) + c, err := NewCataloger(DefaultClassifiers()) test.expectedErr(t, err) src, err := source.NewFromDirectory(test.fixtureDir) @@ -161,7 +161,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases_Image(t *testing.T for _, test := range tests { t.Run(test.name, func(t *testing.T) { - c, err := NewCataloger(file.DefaultClassifiers()) + c, err := NewCataloger(DefaultClassifiers()) test.expectedErr(t, err) img := imagetest.GetFixtureImage(t, "docker-archive", test.fixtureImage) @@ -192,7 +192,7 @@ func TestClassifierCataloger_DefaultClassifiers_PositiveCases_Image(t *testing.T func TestClassifierCataloger_DefaultClassifiers_NegativeCases(t *testing.T) { - c, err := NewCataloger(file.DefaultClassifiers()) + c, err := NewCataloger(DefaultClassifiers()) assert.NoError(t, err) src, err := source.NewFromDirectory("test-fixtures/classifiers/negative") diff --git a/syft/file/classifier.go b/syft/file/cataloger/fileclassifier/classifier.go similarity index 92% rename from syft/file/classifier.go rename to syft/file/cataloger/fileclassifier/classifier.go index 7efb15a5b..c4d39d3b3 100644 --- a/syft/file/classifier.go +++ b/syft/file/cataloger/fileclassifier/classifier.go @@ -1,4 +1,4 @@ -package file +package fileclassifier import ( "bytes" @@ -7,6 +7,8 @@ import ( "regexp" "text/template" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/source" ) @@ -68,7 +70,7 @@ func DefaultClassifiers() []Classifier { } } -func (c Classifier) Classify(resolver source.FileResolver, location source.Location) (*Classification, error) { +func (c Classifier) Classify(resolver source.FileResolver, location file.Location) (*file.Classification, error) { doesFilepathMatch, filepathNamedGroupValues := filepathMatches(c.FilepathPatterns, location) if !doesFilepathMatch { return nil, nil @@ -86,7 +88,7 @@ func (c Classifier) Classify(resolver source.FileResolver, location source.Locat return nil, err } - var result *Classification + var result *file.Classification for _, patternTemplate := range c.EvidencePatternTemplates { tmpl, err := template.New("").Parse(patternTemplate) if err != nil { @@ -110,7 +112,7 @@ func (c Classifier) Classify(resolver source.FileResolver, location source.Locat matchMetadata := internal.MatchNamedCaptureGroups(pattern, string(contents)) if result == nil { - result = &Classification{ + result = &file.Classification{ Class: c.Class, Metadata: matchMetadata, } @@ -123,7 +125,7 @@ func (c Classifier) Classify(resolver source.FileResolver, location source.Locat return result, nil } -func filepathMatches(patterns []*regexp.Regexp, location source.Location) (bool, map[string]string) { +func filepathMatches(patterns []*regexp.Regexp, location file.Location) (bool, map[string]string) { for _, path := range []string{location.RealPath, location.VirtualPath} { if path == "" { continue diff --git a/syft/file/classifier_test.go b/syft/file/cataloger/fileclassifier/classifier_test.go similarity index 81% rename from syft/file/classifier_test.go rename to syft/file/cataloger/fileclassifier/classifier_test.go index 9151f3f5b..475a221d3 100644 --- a/syft/file/classifier_test.go +++ b/syft/file/cataloger/fileclassifier/classifier_test.go @@ -1,25 +1,25 @@ -package file +package fileclassifier import ( + "github.com/anchore/syft/syft/file" "regexp" "testing" - "github.com/anchore/syft/syft/source" "github.com/stretchr/testify/assert" ) func TestFilepathMatches(t *testing.T) { tests := []struct { name string - location source.Location + location file.Location patterns []string expectedMatches bool expectedNamedGroups map[string]string }{ { name: "simple-filename-match", - location: source.Location{ - Coordinates: source.Coordinates{ + location: file.Location{ + Coordinates: file.Coordinates{ RealPath: "python2.7", }, }, @@ -30,8 +30,8 @@ func TestFilepathMatches(t *testing.T) { }, { name: "filepath-match", - location: source.Location{ - Coordinates: source.Coordinates{ + location: file.Location{ + Coordinates: file.Coordinates{ RealPath: "/usr/bin/python2.7", }, }, @@ -42,7 +42,7 @@ func TestFilepathMatches(t *testing.T) { }, { name: "virtual-filepath-match", - location: source.Location{ + location: file.Location{ VirtualPath: "/usr/bin/python2.7", }, patterns: []string{ @@ -52,7 +52,7 @@ func TestFilepathMatches(t *testing.T) { }, { name: "full-filepath-match", - location: source.Location{ + location: file.Location{ VirtualPath: "/usr/bin/python2.7", }, patterns: []string{ @@ -62,8 +62,8 @@ func TestFilepathMatches(t *testing.T) { }, { name: "anchored-filename-match-FAILS", - location: source.Location{ - Coordinates: source.Coordinates{ + location: file.Location{ + Coordinates: file.Coordinates{ RealPath: "/usr/bin/python2.7", }, }, @@ -74,7 +74,7 @@ func TestFilepathMatches(t *testing.T) { }, { name: "empty-filename-match-FAILS", - location: source.Location{}, + location: file.Location{}, patterns: []string{ `^python([0-9]+\.[0-9]+)$`, }, diff --git a/syft/file/cataloger/filecontents/cataloger.go b/syft/file/cataloger/filecontents/cataloger.go index 85ce3d097..fb0f15e43 100644 --- a/syft/file/cataloger/filecontents/cataloger.go +++ b/syft/file/cataloger/filecontents/cataloger.go @@ -34,9 +34,9 @@ func NewCataloger(config CatalogerConfig) (*Cataloger, error) { }, nil } -func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]string, error) { - results := make(map[source.Coordinates]string) - var locations []source.Location +func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates]string, error) { + results := make(map[file.Coordinates]string) + var locations []file.Location locations, err := resolver.FilesByGlob(i.config.Globs...) if err != nil { @@ -67,7 +67,7 @@ func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinate return results, nil } -func (i *Cataloger) catalogLocation(resolver source.FileResolver, location source.Location) (string, error) { +func (i *Cataloger) catalogLocation(resolver source.FileResolver, location file.Location) (string, error) { contentReader, err := resolver.FileContentsByLocation(location) if err != nil { return "", err diff --git a/syft/file/cataloger/filecontents/cataloger_test.go b/syft/file/cataloger/filecontents/cataloger_test.go index f7286278f..45f05500f 100644 --- a/syft/file/cataloger/filecontents/cataloger_test.go +++ b/syft/file/cataloger/filecontents/cataloger_test.go @@ -1,6 +1,7 @@ package filecontents import ( + "github.com/anchore/syft/syft/file" "testing" "github.com/anchore/syft/syft/source" @@ -15,41 +16,41 @@ func TestContentsCataloger(t *testing.T) { globs []string maxSize int64 files []string - expected map[source.Coordinates]string + expected map[file.Coordinates]string }{ { name: "multi-pattern", globs: []string{"test-fixtures/last/*.txt", "test-fixtures/*.txt"}, files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", }, }, { name: "no-patterns", globs: []string{}, files: []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"}, - expected: map[source.Coordinates]string{}, + expected: map[file.Coordinates]string{}, }, { name: "all-txt", globs: []string{"**/*.txt"}, files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", }, }, { name: "subpath", globs: []string{"test-fixtures/*.txt"}, files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", }, }, { @@ -57,9 +58,9 @@ func TestContentsCataloger(t *testing.T) { maxSize: 42, globs: []string{"**/*.txt"}, files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", }, }, } diff --git a/syft/file/cataloger/filedigests/cataloger.go b/syft/file/cataloger/filedigests/cataloger.go index 7529912d4..45c11ae9a 100644 --- a/syft/file/cataloger/filedigests/cataloger.go +++ b/syft/file/cataloger/filedigests/cataloger.go @@ -29,8 +29,8 @@ func NewCataloger(hashes []crypto.Hash) (*Cataloger, error) { }, nil } -func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]file.Digest, error) { - results := make(map[source.Coordinates][]file.Digest) +func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates][]file.Digest, error) { + results := make(map[file.Coordinates][]file.Digest) locations := source.AllRegularFiles(resolver) stage, prog := digestsCatalogingProgress(int64(len(locations))) for _, location := range locations { @@ -57,14 +57,14 @@ func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinate return results, nil } -func (i *Cataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]file.Digest, error) { +func (i *Cataloger) catalogLocation(resolver source.FileResolver, location file.Location) ([]file.Digest, error) { meta, err := resolver.FileMetadataByLocation(location) if err != nil { return nil, err } // we should only attempt to report digests for files that are regular files (don't attempt to resolve links) - if meta.Type != source.RegularFile { + if meta.Type != file.RegularFile { return nil, errUndigestableFile } diff --git a/syft/file/cataloger/filedigests/cataloger_test.go b/syft/file/cataloger/filedigests/cataloger_test.go index 521fea712..030a47fcb 100644 --- a/syft/file/cataloger/filedigests/cataloger_test.go +++ b/syft/file/cataloger/filedigests/cataloger_test.go @@ -16,8 +16,8 @@ import ( "github.com/stretchr/testify/require" ) -func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Hash) map[source.Coordinates][]file.Digest { - digests := make(map[source.Coordinates][]file.Digest) +func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Hash) map[file.Coordinates][]file.Digest { + digests := make(map[file.Coordinates][]file.Digest) for _, f := range files { fh, err := os.Open(filepath.Join(root, f)) @@ -31,14 +31,14 @@ func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Has if len(b) == 0 { // we don't keep digests for empty files - digests[source.NewLocation(f).Coordinates] = []file.Digest{} + digests[file.NewLocation(f).Coordinates] = []file.Digest{} continue } for _, hash := range hashes { h := hash.New() h.Write(b) - digests[source.NewLocation(f).Coordinates] = append(digests[source.NewLocation(f).Coordinates], file.Digest{ + digests[file.NewLocation(f).Coordinates] = append(digests[file.NewLocation(f).Coordinates], file.Digest{ Algorithm: file.CleanDigestAlgorithmName(hash.String()), Value: fmt.Sprintf("%x", h.Sum(nil)), }) @@ -54,7 +54,7 @@ func TestDigestsCataloger(t *testing.T) { name string digests []crypto.Hash files []string - expected map[source.Coordinates][]file.Digest + expected map[file.Coordinates][]file.Digest }{ { name: "md5", @@ -145,7 +145,7 @@ func TestDigestsCataloger_MixFileTypes(t *testing.T) { if err != nil { t.Fatalf("unable to get file=%q : %+v", test.path, err) } - l := source.NewLocationFromImage(test.path, *ref, img) + l := file.NewLocationFromImage(test.path, *ref, img) if len(actual[l.Coordinates]) == 0 { if test.expected != "" { diff --git a/syft/file/cataloger/filemetadata/cataloger.go b/syft/file/cataloger/filemetadata/cataloger.go index 634363cf9..756051dc7 100644 --- a/syft/file/cataloger/filemetadata/cataloger.go +++ b/syft/file/cataloger/filemetadata/cataloger.go @@ -4,6 +4,7 @@ import ( "github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/event" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/source" "github.com/wagoodman/go-partybus" "github.com/wagoodman/go-progress" @@ -16,9 +17,9 @@ func NewCataloger() *Cataloger { return &Cataloger{} } -func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]source.FileMetadata, error) { - results := make(map[source.Coordinates]source.FileMetadata) - var locations []source.Location +func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates]file.Metadata, error) { + results := make(map[file.Coordinates]file.Metadata) + var locations []file.Location for location := range resolver.AllLocations() { locations = append(locations, location) } diff --git a/syft/file/cataloger/filemetadata/cataloger_test.go b/syft/file/cataloger/filemetadata/cataloger_test.go index 243195c10..165d13151 100644 --- a/syft/file/cataloger/filemetadata/cataloger_test.go +++ b/syft/file/cataloger/filemetadata/cataloger_test.go @@ -4,8 +4,9 @@ import ( "os" "testing" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/source" "github.com/stretchr/testify/assert" ) @@ -33,13 +34,13 @@ func TestFileMetadataCataloger(t *testing.T) { tests := []struct { path string exists bool - expected source.FileMetadata + expected file.Metadata err bool }{ { path: "/file-1.txt", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Mode: 0644, Type: "RegularFile", UserID: 1, @@ -51,7 +52,7 @@ func TestFileMetadataCataloger(t *testing.T) { { path: "/hardlink-1", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Mode: 0644, Type: "HardLink", LinkDestination: "file-1.txt", @@ -63,7 +64,7 @@ func TestFileMetadataCataloger(t *testing.T) { { path: "/symlink-1", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Mode: 0777 | os.ModeSymlink, Type: "SymbolicLink", LinkDestination: "file-1.txt", @@ -75,7 +76,7 @@ func TestFileMetadataCataloger(t *testing.T) { { path: "/char-device-1", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Mode: 0644 | os.ModeDevice | os.ModeCharDevice, Type: "CharacterDevice", UserID: 0, @@ -86,7 +87,7 @@ func TestFileMetadataCataloger(t *testing.T) { { path: "/block-device-1", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Mode: 0644 | os.ModeDevice, Type: "BlockDevice", UserID: 0, @@ -97,7 +98,7 @@ func TestFileMetadataCataloger(t *testing.T) { { path: "/fifo-1", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Mode: 0644 | os.ModeNamedPipe, Type: "FIFONode", UserID: 0, @@ -108,7 +109,7 @@ func TestFileMetadataCataloger(t *testing.T) { { path: "/bin", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Mode: 0755 | os.ModeDir, Type: "Directory", UserID: 0, @@ -120,12 +121,12 @@ func TestFileMetadataCataloger(t *testing.T) { for _, test := range tests { t.Run(test.path, func(t *testing.T) { - _, ref, err := img.SquashedTree().File(file.Path(test.path)) + _, ref, err := img.SquashedTree().File(stereoscopeFile.Path(test.path)) if err != nil { t.Fatalf("unable to get file: %+v", err) } - l := source.NewLocationFromImage(test.path, *ref, img) + l := file.NewLocationFromImage(test.path, *ref, img) assert.Equal(t, test.expected, actual[l.Coordinates], "mismatched metadata") diff --git a/syft/file/cataloger/secrets/cataloger.go b/syft/file/cataloger/secrets/cataloger.go index 0d6031b03..9b43d7d8f 100644 --- a/syft/file/cataloger/secrets/cataloger.go +++ b/syft/file/cataloger/secrets/cataloger.go @@ -56,8 +56,8 @@ func NewCataloger(config CatalogerConfig) (*Cataloger, error) { }, nil } -func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]file.SearchResult, error) { - results := make(map[source.Coordinates][]file.SearchResult) +func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates][]file.SearchResult, error) { + results := make(map[file.Coordinates][]file.SearchResult) locations := source.AllRegularFiles(resolver) stage, prog, secretsDiscovered := newSecretsCatalogerMonitor(int64(len(locations))) for _, location := range locations { @@ -82,7 +82,7 @@ func (i *Cataloger) Catalog(resolver source.FileResolver) (map[source.Coordinate return results, nil } -func (i *Cataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]file.SearchResult, error) { +func (i *Cataloger) catalogLocation(resolver source.FileResolver, location file.Location) ([]file.SearchResult, error) { metadata, err := resolver.FileMetadataByLocation(location) if err != nil { return nil, err @@ -120,7 +120,7 @@ func (i *Cataloger) catalogLocation(resolver source.FileResolver, location sourc return secrets, nil } -func extractValue(resolver source.FileResolver, location source.Location, start, length int64) (string, error) { +func extractValue(resolver source.FileResolver, location file.Location, start, length int64) (string, error) { readCloser, err := resolver.FileContentsByLocation(location) if err != nil { return "", fmt.Errorf("unable to fetch reader for location=%q : %w", location, err) diff --git a/syft/file/cataloger/secrets/cataloger_test.go b/syft/file/cataloger/secrets/cataloger_test.go index 36fb25df9..94631030b 100644 --- a/syft/file/cataloger/secrets/cataloger_test.go +++ b/syft/file/cataloger/secrets/cataloger_test.go @@ -199,7 +199,7 @@ func TestSecretsCataloger(t *testing.T) { return } - loc := source.NewLocation(test.fixture) + loc := file.NewLocation(test.fixture) if _, exists := actualResults[loc.Coordinates]; !exists { t.Fatalf("could not find location=%q in results", loc) } @@ -437,7 +437,7 @@ j4f668YfhUbKdRF6S6734856 t.Fatalf("could not catalog: %+v", err) } - loc := source.NewLocation(test.fixture) + loc := file.NewLocation(test.fixture) if _, exists := actualResults[loc.Coordinates]; !exists && test.expected != nil { t.Fatalf("could not find location=%q in results", loc) } else if !exists && test.expected == nil { diff --git a/syft/file/cataloger/secrets/secrets_search_by_line_strategy.go b/syft/file/cataloger/secrets/secrets_search_by_line_strategy.go index c6cc01667..bfd2507d3 100644 --- a/syft/file/cataloger/secrets/secrets_search_by_line_strategy.go +++ b/syft/file/cataloger/secrets/secrets_search_by_line_strategy.go @@ -4,17 +4,18 @@ import ( "bufio" "errors" "fmt" - "github.com/anchore/syft/syft/file" "io" "io/ioutil" "regexp" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/source" ) -func catalogLocationByLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp) ([]file.SearchResult, error) { +func catalogLocationByLine(resolver source.FileResolver, location file.Location, patterns map[string]*regexp.Regexp) ([]file.SearchResult, error) { readCloser, err := resolver.FileContentsByLocation(location) if err != nil { return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err) @@ -46,7 +47,7 @@ func catalogLocationByLine(resolver source.FileResolver, location source.Locatio return allSecrets, nil } -func searchForSecretsWithinLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]file.SearchResult, error) { +func searchForSecretsWithinLine(resolver source.FileResolver, location file.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]file.SearchResult, error) { var secrets []file.SearchResult for name, pattern := range patterns { matches := pattern.FindAllIndex(line, -1) @@ -75,7 +76,7 @@ func searchForSecretsWithinLine(resolver source.FileResolver, location source.Lo return secrets, nil } -func readerAtPosition(resolver source.FileResolver, location source.Location, seekPosition int64) (io.ReadCloser, error) { +func readerAtPosition(resolver source.FileResolver, location file.Location, seekPosition int64) (io.ReadCloser, error) { readCloser, err := resolver.FileContentsByLocation(location) if err != nil { return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err) diff --git a/syft/source/coordinates.go b/syft/file/coordinates.go similarity index 99% rename from syft/source/coordinates.go rename to syft/file/coordinates.go index 653670792..009f633df 100644 --- a/syft/source/coordinates.go +++ b/syft/file/coordinates.go @@ -1,4 +1,4 @@ -package source +package file import ( "fmt" diff --git a/syft/source/coordinates_test.go b/syft/file/coordinates_test.go similarity index 98% rename from syft/source/coordinates_test.go rename to syft/file/coordinates_test.go index e9f8a4a30..81609a033 100644 --- a/syft/source/coordinates_test.go +++ b/syft/file/coordinates_test.go @@ -1,4 +1,4 @@ -package source +package file import ( "testing" diff --git a/syft/source/file_details.go b/syft/file/get_xid.go similarity index 95% rename from syft/source/file_details.go rename to syft/file/get_xid.go index f656c071d..af459dec1 100644 --- a/syft/source/file_details.go +++ b/syft/file/get_xid.go @@ -1,7 +1,7 @@ //go:build linux || darwin // +build linux darwin -package source +package file import ( "os" diff --git a/syft/source/file_details_win.go b/syft/file/get_xid_win.go similarity index 92% rename from syft/source/file_details_win.go rename to syft/file/get_xid_win.go index 31fd05063..ddf39467b 100644 --- a/syft/source/file_details_win.go +++ b/syft/file/get_xid_win.go @@ -1,7 +1,7 @@ //go:build windows // +build windows -package source +package file import ( "os" diff --git a/syft/source/location.go b/syft/file/location.go similarity index 98% rename from syft/source/location.go rename to syft/file/location.go index 3f14ed413..cfde75cc7 100644 --- a/syft/source/location.go +++ b/syft/file/location.go @@ -1,4 +1,4 @@ -package source +package file import ( "fmt" @@ -109,3 +109,7 @@ func (l Location) String() string { } return fmt.Sprintf("Location<%s>", str) } + +func (l Location) Ref() file.Reference { + return l.ref +} diff --git a/syft/source/location_test.go b/syft/file/location_test.go similarity index 98% rename from syft/source/location_test.go rename to syft/file/location_test.go index eb3532058..59d927b07 100644 --- a/syft/source/location_test.go +++ b/syft/file/location_test.go @@ -1,4 +1,4 @@ -package source +package file import ( "testing" diff --git a/syft/source/file_metadata.go b/syft/file/metadata.go similarity index 74% rename from syft/source/file_metadata.go rename to syft/file/metadata.go index 432a1d8f4..fb8a72044 100644 --- a/syft/source/file_metadata.go +++ b/syft/file/metadata.go @@ -1,18 +1,16 @@ -package source +package file import ( "os" "github.com/anchore/stereoscope/pkg/file" - - "github.com/anchore/syft/internal/log" - "github.com/anchore/stereoscope/pkg/image" + "github.com/anchore/syft/internal/log" ) -type FileMetadata struct { +type Metadata struct { Mode os.FileMode - Type FileType + Type Type UserID int GroupID int LinkDestination string @@ -20,15 +18,15 @@ type FileMetadata struct { MIMEType string } -func fileMetadataByLocation(img *image.Image, location Location) (FileMetadata, error) { +func MetadataByLocation(img *image.Image, location Location) (Metadata, error) { entry, err := img.FileCatalog.Get(location.ref) if err != nil { - return FileMetadata{}, err + return Metadata{}, err } - return FileMetadata{ + return Metadata{ Mode: entry.Metadata.Mode, - Type: newFileTypeFromTarHeaderTypeFlag(entry.Metadata.TypeFlag), + Type: NewFileTypeFromTarHeaderTypeFlag(entry.Metadata.TypeFlag), UserID: entry.Metadata.UserID, GroupID: entry.Metadata.GroupID, LinkDestination: entry.Metadata.Linkname, @@ -37,7 +35,7 @@ func fileMetadataByLocation(img *image.Image, location Location) (FileMetadata, }, nil } -func fileMetadataFromPath(path string, info os.FileInfo, withMIMEType bool) FileMetadata { +func MetadataFromPath(path string, info os.FileInfo, withMIMEType bool) Metadata { var mimeType string uid, gid := GetXid(info) @@ -57,9 +55,9 @@ func fileMetadataFromPath(path string, info os.FileInfo, withMIMEType bool) File mimeType = file.MIMEType(f) } - return FileMetadata{ + return Metadata{ Mode: info.Mode(), - Type: newFileTypeFromMode(info.Mode()), + Type: NewFileTypeFromMode(info.Mode()), // unsupported across platforms UserID: uid, GroupID: gid, diff --git a/syft/source/file_metadata_test.go b/syft/file/metadata_test.go similarity index 93% rename from syft/source/file_metadata_test.go rename to syft/file/metadata_test.go index 3bdedb42f..5ec67cf5c 100644 --- a/syft/source/file_metadata_test.go +++ b/syft/file/metadata_test.go @@ -1,7 +1,7 @@ //go:build !windows // +build !windows -package source +package file import ( "os" @@ -49,7 +49,7 @@ func Test_fileMetadataFromPath(t *testing.T) { info, err := os.Lstat(test.path) require.NoError(t, err) - actual := fileMetadataFromPath(test.path, info, test.withMIMEType) + actual := MetadataFromPath(test.path, info, test.withMIMEType) assert.Equal(t, test.expectedMIMEType, actual.MIMEType) assert.Equal(t, test.expectedType, string(actual.Type)) }) diff --git a/syft/source/file_type.go b/syft/file/type.go similarity index 69% rename from syft/source/file_type.go rename to syft/file/type.go index 370ea0f8d..bd162fb07 100644 --- a/syft/source/file_type.go +++ b/syft/file/type.go @@ -1,4 +1,4 @@ -package source +package file import ( "archive/tar" @@ -6,23 +6,23 @@ import ( ) const ( - RegularFile FileType = "RegularFile" + RegularFile Type = "RegularFile" // IrregularFile is how syft defines files that are neither regular, symbolic or directory. // For ref: the seven standard Unix file types are regular, directory, symbolic link, // FIFO special, block special, character special, and socket as defined by POSIX. - IrregularFile FileType = "IrregularFile" - HardLink FileType = "HardLink" - SymbolicLink FileType = "SymbolicLink" - CharacterDevice FileType = "CharacterDevice" - BlockDevice FileType = "BlockDevice" - Directory FileType = "Directory" - FIFONode FileType = "FIFONode" - Socket FileType = "Socket" + IrregularFile Type = "IrregularFile" + HardLink Type = "HardLink" + SymbolicLink Type = "SymbolicLink" + CharacterDevice Type = "CharacterDevice" + BlockDevice Type = "BlockDevice" + Directory Type = "Directory" + FIFONode Type = "FIFONode" + Socket Type = "Socket" ) -type FileType string +type Type string -func newFileTypeFromTarHeaderTypeFlag(flag byte) FileType { +func NewFileTypeFromTarHeaderTypeFlag(flag byte) Type { switch flag { case tar.TypeReg, tar.TypeRegA: return RegularFile @@ -42,7 +42,7 @@ func newFileTypeFromTarHeaderTypeFlag(flag byte) FileType { return IrregularFile } -func newFileTypeFromMode(mode os.FileMode) FileType { +func NewFileTypeFromMode(mode os.FileMode) Type { switch { case isSet(mode, os.ModeSymlink): return SymbolicLink diff --git a/syft/pkg/catalog_test.go b/syft/pkg/catalog_test.go index 005714c0c..366f896f5 100644 --- a/syft/pkg/catalog_test.go +++ b/syft/pkg/catalog_test.go @@ -1,13 +1,12 @@ package pkg import ( + "github.com/anchore/syft/syft/file" "testing" "github.com/stretchr/testify/assert" "github.com/scylladb/go-set/strset" - - "github.com/anchore/syft/syft/source" ) type expectedIndexes struct { @@ -19,16 +18,16 @@ func TestCatalogAddPopulatesIndex(t *testing.T) { var pkgs = []Package{ { - Locations: []source.Location{ - source.NewVirtualLocation("/a/path", "/another/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: []file.Location{ + file.NewVirtualLocation("/a/path", "/another/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), }, Type: RpmPkg, }, { - Locations: []source.Location{ - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewVirtualLocation("/d/path", "/another/path"), + Locations: []file.Location{ + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewVirtualLocation("/d/path", "/another/path"), }, Type: NpmPkg, }, @@ -106,17 +105,17 @@ func assertIndexes(t *testing.T, c *Catalog, expectedIndexes expectedIndexes) { func TestCatalog_PathIndexDeduplicatesRealVsVirtualPaths(t *testing.T) { p1 := Package{ - Locations: []source.Location{ - source.NewVirtualLocation("/b/path", "/another/path"), - source.NewVirtualLocation("/b/path", "/b/path"), + Locations: []file.Location{ + file.NewVirtualLocation("/b/path", "/another/path"), + file.NewVirtualLocation("/b/path", "/b/path"), }, Type: RpmPkg, Name: "Package-1", } p2 := Package{ - Locations: []source.Location{ - source.NewVirtualLocation("/b/path", "/b/path"), + Locations: []file.Location{ + file.NewVirtualLocation("/b/path", "/b/path"), }, Type: RpmPkg, Name: "Package-2", diff --git a/syft/pkg/cataloger/apkdb/parse_apk_db.go b/syft/pkg/cataloger/apkdb/parse_apk_db.go index bc6fb442e..5aae0f94b 100644 --- a/syft/pkg/cataloger/apkdb/parse_apk_db.go +++ b/syft/pkg/cataloger/apkdb/parse_apk_db.go @@ -3,12 +3,13 @@ package apkdb import ( "bufio" "fmt" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" "path" "strconv" "strings" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" diff --git a/syft/pkg/cataloger/deb/cataloger.go b/syft/pkg/cataloger/deb/cataloger.go index f0c3b3ea8..9181626f6 100644 --- a/syft/pkg/cataloger/deb/cataloger.go +++ b/syft/pkg/cataloger/deb/cataloger.go @@ -10,6 +10,8 @@ import ( "path/filepath" "sort" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" @@ -60,7 +62,7 @@ func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []arti for i := range pkgs { p := &pkgs[i] p.FoundBy = c.Name() - p.Locations = []source.Location{dbLocation} + p.Locations = []file.Location{dbLocation} // the current entry only has what may have been listed in the status file, however, there are additional // files that are listed in multiple other locations. We should retrieve them all and merge the file lists @@ -78,7 +80,7 @@ func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []arti return allPackages, nil, nil } -func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func addLicenses(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) { // get license information from the copyright file copyrightReader, copyrightLocation := fetchCopyrightContents(resolver, dbLocation, p) @@ -92,7 +94,7 @@ func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pk } } -func mergeFileListing(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func mergeFileListing(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) { metadata := p.Metadata.(pkg.DpkgMetadata) // get file listing (package files + additional config files) @@ -120,10 +122,10 @@ loopNewFiles: p.Locations = append(p.Locations, infoLocations...) } -func getAdditionalFileListing(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) ([]pkg.DpkgFileRecord, []source.Location) { +func getAdditionalFileListing(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) ([]pkg.DpkgFileRecord, []file.Location) { // ensure the default value for a collection is never nil since this may be shown as JSON var files = make([]pkg.DpkgFileRecord, 0) - var locations []source.Location + var locations []file.Location md5Reader, md5Location := fetchMd5Contents(resolver, dbLocation, p) @@ -150,7 +152,7 @@ func getAdditionalFileListing(resolver source.FileResolver, dbLocation source.Lo return files, locations } -func fetchMd5Contents(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) (io.ReadCloser, *source.Location) { +func fetchMd5Contents(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) (io.ReadCloser, *file.Location) { var md5Reader io.ReadCloser var err error @@ -177,7 +179,7 @@ func fetchMd5Contents(resolver source.FileResolver, dbLocation source.Location, return md5Reader, location } -func fetchConffileContents(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) (io.ReadCloser, *source.Location) { +func fetchConffileContents(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) (io.ReadCloser, *file.Location) { var reader io.ReadCloser var err error @@ -204,7 +206,7 @@ func fetchConffileContents(resolver source.FileResolver, dbLocation source.Locat return reader, location } -func fetchCopyrightContents(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) (io.ReadCloser, *source.Location) { +func fetchCopyrightContents(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) (io.ReadCloser, *file.Location) { // look for /usr/share/docs/NAME/copyright files name := p.Name copyrightPath := path.Join(docsPath, name, "copyright") diff --git a/syft/pkg/cataloger/generic/cataloger.go b/syft/pkg/cataloger/generic/cataloger.go index f0ee4460a..320306ecf 100644 --- a/syft/pkg/cataloger/generic/cataloger.go +++ b/syft/pkg/cataloger/generic/cataloger.go @@ -6,6 +6,8 @@ package generic import ( "fmt" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/internal" @@ -70,8 +72,8 @@ func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []arti } // SelectFiles takes a set of file trees and resolves and file references of interest for future cataloging -func (c *Cataloger) selectFiles(resolver source.FilePathResolver) map[source.Location]Parser { - var parserByLocation = make(map[source.Location]Parser) +func (c *Cataloger) selectFiles(resolver source.FilePathResolver) map[file.Location]Parser { + var parserByLocation = make(map[file.Location]Parser) // select by exact path for path, parser := range c.pathParsers { diff --git a/syft/pkg/cataloger/generic/parser.go b/syft/pkg/cataloger/generic/parser.go index af77dfbb6..3f4948c29 100644 --- a/syft/pkg/cataloger/generic/parser.go +++ b/syft/pkg/cataloger/generic/parser.go @@ -1,9 +1,10 @@ package generic import ( - "github.com/anchore/syft/syft/pkg" "io" + "github.com/anchore/syft/syft/pkg" + "github.com/anchore/syft/syft/artifact" ) diff --git a/syft/pkg/cataloger/golang/parse_go_bin.go b/syft/pkg/cataloger/golang/parse_go_bin.go index bcb33abda..c01707d32 100644 --- a/syft/pkg/cataloger/golang/parse_go_bin.go +++ b/syft/pkg/cataloger/golang/parse_go_bin.go @@ -6,8 +6,9 @@ import ( "io" "strings" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) const ( @@ -17,13 +18,13 @@ const ( type exeOpener func(file io.ReadCloser) ([]exe, error) -func newGoBinaryPackage(name, version, h1Digest, goVersion, architecture string, location source.Location) pkg.Package { +func newGoBinaryPackage(name, version, h1Digest, goVersion, architecture string, location file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: []source.Location{ + Locations: []file.Location{ location, }, MetadataType: pkg.GolangBinMetadataType, @@ -39,7 +40,7 @@ func newGoBinaryPackage(name, version, h1Digest, goVersion, architecture string, return p } -func parseGoBin(location source.Location, reader io.ReadCloser, opener exeOpener) (pkgs []pkg.Package, err error) { +func parseGoBin(location file.Location, reader io.ReadCloser, opener exeOpener) (pkgs []pkg.Package, err error) { var exes []exe // it has been found that there are stdlib paths within openExe that can panic. We want to prevent this behavior // bubbling up and halting execution. For this reason we try to recover from any panic and return an error. @@ -62,7 +63,7 @@ func parseGoBin(location source.Location, reader io.ReadCloser, opener exeOpener return pkgs, err } -func buildGoPkgInfo(location source.Location, mod, goVersion, arch string) []pkg.Package { +func buildGoPkgInfo(location file.Location, mod, goVersion, arch string) []pkg.Package { pkgsSlice := make([]pkg.Package, 0) scanner := bufio.NewScanner(strings.NewReader(mod)) diff --git a/syft/pkg/cataloger/golang/parse_go_bin_test.go b/syft/pkg/cataloger/golang/parse_go_bin_test.go index 7bd53b35c..8c8707324 100644 --- a/syft/pkg/cataloger/golang/parse_go_bin_test.go +++ b/syft/pkg/cataloger/golang/parse_go_bin_test.go @@ -1,11 +1,11 @@ package golang import ( + "github.com/anchore/syft/syft/file" "io" "testing" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" "github.com/stretchr/testify/assert" ) @@ -35,9 +35,9 @@ func TestBuildGoPkgInfo(t *testing.T) { Version: "v0.2.1", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: []source.Location{ + Locations: []file.Location{ { - Coordinates: source.Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -55,9 +55,9 @@ func TestBuildGoPkgInfo(t *testing.T) { Version: "v0.0.0-20210222170800-9c70f9b80bcf", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: []source.Location{ + Locations: []file.Location{ { - Coordinates: source.Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -86,9 +86,9 @@ func TestBuildGoPkgInfo(t *testing.T) { Version: "v0.0.0-20211006190231-62292e806868", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: []source.Location{ + Locations: []file.Location{ { - Coordinates: source.Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -106,9 +106,9 @@ func TestBuildGoPkgInfo(t *testing.T) { Version: "v0.0.0-20211006194710-c8a6f5223071", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: []source.Location{ + Locations: []file.Location{ { - Coordinates: source.Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -126,9 +126,9 @@ func TestBuildGoPkgInfo(t *testing.T) { Version: "v0.0.0-20210916214954-140adaaadfaf", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: []source.Location{ + Locations: []file.Location{ { - Coordinates: source.Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -151,8 +151,8 @@ func TestBuildGoPkgInfo(t *testing.T) { p := &test.expected[i] p.SetID() } - location := source.Location{ - Coordinates: source.Coordinates{ + location := file.Location{ + Coordinates: file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -178,7 +178,7 @@ func Test_parseGoBin_recoversFromPanic(t *testing.T) { } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - pkgs, err := parseGoBin(source.NewLocation("some/path"), nil, freakOut) + pkgs, err := parseGoBin(file.NewLocation("some/path"), nil, freakOut) assert.Error(t, err) assert.Nil(t, pkgs) }) diff --git a/syft/pkg/cataloger/java/archive_parser.go b/syft/pkg/cataloger/java/archive_parser.go index 584bc9df4..270044f65 100644 --- a/syft/pkg/cataloger/java/archive_parser.go +++ b/syft/pkg/cataloger/java/archive_parser.go @@ -2,11 +2,12 @@ package java import ( "fmt" - "github.com/anchore/syft/internal/archive" "io" "path" "strings" + "github.com/anchore/syft/internal/archive" + "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" diff --git a/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go b/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go index 03e5a845e..868f5dd1d 100644 --- a/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go +++ b/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go @@ -2,9 +2,10 @@ package java import ( "fmt" + "io" + "github.com/anchore/syft/internal/archive" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "io" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/pkg" diff --git a/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go b/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go index 9346f83f2..366ebd05b 100644 --- a/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go +++ b/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go @@ -2,9 +2,10 @@ package java import ( "fmt" + "io" + "github.com/anchore/syft/internal/archive" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "io" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/pkg" diff --git a/syft/pkg/cataloger/javascript/parse_package_json.go b/syft/pkg/cataloger/javascript/parse_package_json.go index c715e909a..f3781492d 100644 --- a/syft/pkg/cataloger/javascript/parse_package_json.go +++ b/syft/pkg/cataloger/javascript/parse_package_json.go @@ -4,10 +4,11 @@ import ( "encoding/json" "errors" "fmt" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" "regexp" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal" diff --git a/syft/pkg/cataloger/javascript/parse_package_lock.go b/syft/pkg/cataloger/javascript/parse_package_lock.go index 19a6396c7..273b63614 100644 --- a/syft/pkg/cataloger/javascript/parse_package_lock.go +++ b/syft/pkg/cataloger/javascript/parse_package_lock.go @@ -3,9 +3,10 @@ package javascript import ( "encoding/json" "fmt" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/pkg" ) diff --git a/syft/pkg/cataloger/javascript/parse_yarn_lock.go b/syft/pkg/cataloger/javascript/parse_yarn_lock.go index 16b38c59b..36da82696 100644 --- a/syft/pkg/cataloger/javascript/parse_yarn_lock.go +++ b/syft/pkg/cataloger/javascript/parse_yarn_lock.go @@ -3,10 +3,11 @@ package javascript import ( "bufio" "fmt" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" "regexp" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/pkg" diff --git a/syft/pkg/cataloger/packages/catalog.go b/syft/pkg/cataloger/packages/catalog.go index ca7413edd..2e0271700 100644 --- a/syft/pkg/cataloger/packages/catalog.go +++ b/syft/pkg/cataloger/packages/catalog.go @@ -2,6 +2,7 @@ package packages import ( "fmt" + "github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" diff --git a/syft/pkg/cataloger/packages/catalogers.go b/syft/pkg/cataloger/packages/catalogers.go index 090007308..8b6420c30 100644 --- a/syft/pkg/cataloger/packages/catalogers.go +++ b/syft/pkg/cataloger/packages/catalogers.go @@ -69,13 +69,13 @@ func IndexCatalogers(cfg SearchConfig) []pkg.Cataloger { } } -func CatalogersBySourceScheme(scheme source.Scheme, cfg SearchConfig) []pkg.Cataloger { +func CatalogersBySourceScheme(scheme source.Type, cfg SearchConfig) []pkg.Cataloger { switch scheme { - case source.ImageScheme: + case source.ImageType: return InstalledCatalogers(cfg) - case source.FileScheme: + case source.FileType: return AllCatalogers(cfg) - case source.DirectoryScheme: + case source.DirectoryType: return IndexCatalogers(cfg) } return nil diff --git a/syft/pkg/cataloger/php/parse_installed_json.go b/syft/pkg/cataloger/php/parse_installed_json.go index 42c997acc..8fcc54ca2 100644 --- a/syft/pkg/cataloger/php/parse_installed_json.go +++ b/syft/pkg/cataloger/php/parse_installed_json.go @@ -3,9 +3,10 @@ package php import ( "encoding/json" "fmt" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/pkg" ) diff --git a/syft/pkg/cataloger/python/package_cataloger.go b/syft/pkg/cataloger/python/package_cataloger.go index ac0a214bc..c546c73f9 100644 --- a/syft/pkg/cataloger/python/package_cataloger.go +++ b/syft/pkg/cataloger/python/package_cataloger.go @@ -7,6 +7,8 @@ import ( "io/ioutil" "path/filepath" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/artifact" @@ -35,7 +37,7 @@ func (c *PackageCataloger) Name() string { // Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing python egg and wheel installations. func (c *PackageCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { - var fileMatches []source.Location + var fileMatches []file.Location for _, glob := range []string{eggMetadataGlob, wheelMetadataGlob, eggFileMetadataGlob} { matches, err := resolver.FilesByGlob(glob) @@ -59,7 +61,7 @@ func (c *PackageCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, } // catalogEggOrWheel takes the primary metadata file reference and returns the python package it represents. -func (c *PackageCataloger) catalogEggOrWheel(resolver source.FileResolver, metadataLocation source.Location) (*pkg.Package, error) { +func (c *PackageCataloger) catalogEggOrWheel(resolver source.FileResolver, metadataLocation file.Location) (*pkg.Package, error) { metadata, sources, err := c.assembleEggOrWheelMetadata(resolver, metadataLocation) if err != nil { return nil, err @@ -94,7 +96,7 @@ func (c *PackageCataloger) catalogEggOrWheel(resolver source.FileResolver, metad } // fetchRecordFiles finds a corresponding RECORD file for the given python package metadata file and returns the set of file records contained. -func (c *PackageCataloger) fetchRecordFiles(resolver source.FileResolver, metadataLocation source.Location) (files []pkg.PythonFileRecord, sources []source.Location, err error) { +func (c *PackageCataloger) fetchRecordFiles(resolver source.FileResolver, metadataLocation file.Location) (files []pkg.PythonFileRecord, sources []file.Location, err error) { // we've been given a file reference to a specific wheel METADATA file. note: this may be for a directory // or for an image... for an image the METADATA file may be present within multiple layers, so it is important // to reconcile the RECORD path to the same layer (or the next adjacent lower layer). @@ -124,7 +126,7 @@ func (c *PackageCataloger) fetchRecordFiles(resolver source.FileResolver, metada } // fetchTopLevelPackages finds a corresponding top_level.txt file for the given python package metadata file and returns the set of package names contained. -func (c *PackageCataloger) fetchTopLevelPackages(resolver source.FileResolver, metadataLocation source.Location) (pkgs []string, sources []source.Location, err error) { +func (c *PackageCataloger) fetchTopLevelPackages(resolver source.FileResolver, metadataLocation file.Location) (pkgs []string, sources []file.Location, err error) { // a top_level.txt file specifies the python top-level packages (provided by this python package) installed into site-packages parentDir := filepath.Dir(metadataLocation.RealPath) topLevelPath := filepath.Join(parentDir, "top_level.txt") @@ -154,7 +156,7 @@ func (c *PackageCataloger) fetchTopLevelPackages(resolver source.FileResolver, m return pkgs, sources, nil } -func (c *PackageCataloger) fetchDirectURLData(resolver source.FileResolver, metadataLocation source.Location) (d *pkg.PythonDirectURLOriginInfo, sources []source.Location, err error) { +func (c *PackageCataloger) fetchDirectURLData(resolver source.FileResolver, metadataLocation file.Location) (d *pkg.PythonDirectURLOriginInfo, sources []file.Location, err error) { parentDir := filepath.Dir(metadataLocation.RealPath) directURLPath := filepath.Join(parentDir, "direct_url.json") directURLLocation := resolver.RelativeFileByPath(metadataLocation, directURLPath) @@ -189,8 +191,8 @@ func (c *PackageCataloger) fetchDirectURLData(resolver source.FileResolver, meta } // assembleEggOrWheelMetadata discovers and accumulates python package metadata from multiple file sources and returns a single metadata object as well as a list of files where the metadata was derived from. -func (c *PackageCataloger) assembleEggOrWheelMetadata(resolver source.FileResolver, metadataLocation source.Location) (*pkg.PythonPackageMetadata, []source.Location, error) { - var sources = []source.Location{metadataLocation} +func (c *PackageCataloger) assembleEggOrWheelMetadata(resolver source.FileResolver, metadataLocation file.Location) (*pkg.PythonPackageMetadata, []file.Location, error) { + var sources = []file.Location{metadataLocation} metadataContents, err := resolver.FileContentsByLocation(metadataLocation) if err != nil { diff --git a/syft/pkg/cataloger/python/parse_pipfile_lock.go b/syft/pkg/cataloger/python/parse_pipfile_lock.go index 9362395cd..ecf044760 100644 --- a/syft/pkg/cataloger/python/parse_pipfile_lock.go +++ b/syft/pkg/cataloger/python/parse_pipfile_lock.go @@ -3,11 +3,12 @@ package python import ( "encoding/json" "fmt" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" "sort" "strings" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/pkg" ) diff --git a/syft/pkg/cataloger/python/parse_poetry_lock.go b/syft/pkg/cataloger/python/parse_poetry_lock.go index 3bb863cde..c3beef4b4 100644 --- a/syft/pkg/cataloger/python/parse_poetry_lock.go +++ b/syft/pkg/cataloger/python/parse_poetry_lock.go @@ -2,9 +2,10 @@ package python import ( "fmt" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/pkg" "github.com/pelletier/go-toml" diff --git a/syft/pkg/cataloger/python/parse_requirements.go b/syft/pkg/cataloger/python/parse_requirements.go index 4e90be0af..422a0fcd1 100644 --- a/syft/pkg/cataloger/python/parse_requirements.go +++ b/syft/pkg/cataloger/python/parse_requirements.go @@ -3,10 +3,11 @@ package python import ( "bufio" "fmt" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" "strings" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/pkg" ) diff --git a/syft/pkg/cataloger/python/parse_setup.go b/syft/pkg/cataloger/python/parse_setup.go index 9e70b3986..ecddda95c 100644 --- a/syft/pkg/cataloger/python/parse_setup.go +++ b/syft/pkg/cataloger/python/parse_setup.go @@ -2,11 +2,12 @@ package python import ( "bufio" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" "regexp" "strings" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/pkg" ) diff --git a/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go b/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go index 0955d0172..de419550e 100644 --- a/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go +++ b/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go @@ -3,11 +3,12 @@ package python import ( "bufio" "fmt" - "github.com/anchore/syft/internal" "io" "path/filepath" "strings" + "github.com/anchore/syft/internal" + "github.com/anchore/syft/internal/log" "github.com/mitchellh/mapstructure" diff --git a/syft/pkg/cataloger/rpmdb/parse_rpmdb.go b/syft/pkg/cataloger/rpmdb/parse_rpmdb.go index 74f694c09..3c4245b8a 100644 --- a/syft/pkg/cataloger/rpmdb/parse_rpmdb.go +++ b/syft/pkg/cataloger/rpmdb/parse_rpmdb.go @@ -16,7 +16,7 @@ import ( ) // parseApkDb parses an "Packages" RPM DB and returns the Packages listed within it. -func parseRpmDB(resolver source.FilePathResolver, dbLocation source.Location, reader io.Reader) ([]pkg.Package, error) { +func parseRpmDB(resolver source.FilePathResolver, dbLocation file.Location, reader io.Reader) ([]pkg.Package, error) { f, err := ioutil.TempFile("", internal.ApplicationName+"-rpmdb") if err != nil { return nil, fmt.Errorf("failed to create temp rpmdb file: %w", err) @@ -63,7 +63,7 @@ func parseRpmDB(resolver source.FilePathResolver, dbLocation source.Location, re p := pkg.Package{ Name: entry.Name, Version: toELVersion(metadata), - Locations: []source.Location{dbLocation}, + Locations: []file.Location{dbLocation}, FoundBy: catalogerName, Type: pkg.RpmPkg, MetadataType: pkg.RpmdbMetadataType, diff --git a/syft/pkg/cataloger/rpmdb/parse_rpmdb_test.go b/syft/pkg/cataloger/rpmdb/parse_rpmdb_test.go index 0bad62396..573aa7936 100644 --- a/syft/pkg/cataloger/rpmdb/parse_rpmdb_test.go +++ b/syft/pkg/cataloger/rpmdb/parse_rpmdb_test.go @@ -9,8 +9,6 @@ import ( "github.com/anchore/syft/syft/file" - "github.com/anchore/syft/syft/source" - "github.com/anchore/syft/syft/pkg" "github.com/go-test/deep" ) @@ -29,34 +27,34 @@ func (r rpmdbTestFileResolverMock) HasPath(path string) bool { return !r.ignorePaths } -func (r *rpmdbTestFileResolverMock) FilesByPath(paths ...string) ([]source.Location, error) { +func (r *rpmdbTestFileResolverMock) FilesByPath(paths ...string) ([]file.Location, error) { if r.ignorePaths { // act as if no paths exist return nil, nil } // act as if all files exist - var locations = make([]source.Location, len(paths)) + var locations = make([]file.Location, len(paths)) for i, p := range paths { - locations[i] = source.NewLocation(p) + locations[i] = file.NewLocation(p) } return locations, nil } -func (r *rpmdbTestFileResolverMock) FilesByGlob(...string) ([]source.Location, error) { +func (r *rpmdbTestFileResolverMock) FilesByGlob(...string) ([]file.Location, error) { return nil, fmt.Errorf("not implemented") } -func (r *rpmdbTestFileResolverMock) RelativeFileByPath(source.Location, string) *source.Location { +func (r *rpmdbTestFileResolverMock) RelativeFileByPath(file.Location, string) *file.Location { panic(fmt.Errorf("not implemented")) return nil } -func (r *rpmdbTestFileResolverMock) FilesByMIMEType(...string) ([]source.Location, error) { +func (r *rpmdbTestFileResolverMock) FilesByMIMEType(...string) ([]file.Location, error) { return nil, fmt.Errorf("not implemented") } func TestParseRpmDB(t *testing.T) { - dbLocation := source.NewLocation("test-path") + dbLocation := file.NewLocation("test-path") tests := []struct { fixture string @@ -71,7 +69,7 @@ func TestParseRpmDB(t *testing.T) { "dive": { Name: "dive", Version: "0.9.2-1", - Locations: []source.Location{dbLocation}, + Locations: []file.Location{dbLocation}, FoundBy: catalogerName, Type: pkg.RpmPkg, MetadataType: pkg.RpmdbMetadataType, @@ -98,7 +96,7 @@ func TestParseRpmDB(t *testing.T) { "dive": { Name: "dive", Version: "0.9.2-1", - Locations: []source.Location{dbLocation}, + Locations: []file.Location{dbLocation}, FoundBy: catalogerName, Type: pkg.RpmPkg, MetadataType: pkg.RpmdbMetadataType, diff --git a/syft/pkg/cataloger/ruby/parse_gemfile_lock.go b/syft/pkg/cataloger/ruby/parse_gemfile_lock.go index 5fb50884e..bf351739c 100644 --- a/syft/pkg/cataloger/ruby/parse_gemfile_lock.go +++ b/syft/pkg/cataloger/ruby/parse_gemfile_lock.go @@ -2,10 +2,11 @@ package ruby import ( "bufio" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" "strings" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/pkg" diff --git a/syft/pkg/cataloger/ruby/parse_gemspec.go b/syft/pkg/cataloger/ruby/parse_gemspec.go index 6bfc8ea0a..3e7680139 100644 --- a/syft/pkg/cataloger/ruby/parse_gemspec.go +++ b/syft/pkg/cataloger/ruby/parse_gemspec.go @@ -4,11 +4,12 @@ import ( "bufio" "encoding/json" "fmt" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" "regexp" "strings" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/internal" "github.com/mitchellh/mapstructure" diff --git a/syft/pkg/cataloger/rust/parse_cargo_lock.go b/syft/pkg/cataloger/rust/parse_cargo_lock.go index f9a8c57ed..ac6f286be 100644 --- a/syft/pkg/cataloger/rust/parse_cargo_lock.go +++ b/syft/pkg/cataloger/rust/parse_cargo_lock.go @@ -2,9 +2,10 @@ package rust import ( "fmt" - "github.com/anchore/syft/syft/pkg/cataloger/generic" "io" + "github.com/anchore/syft/syft/pkg/cataloger/generic" + "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/pkg" "github.com/pelletier/go-toml" diff --git a/syft/pkg/package.go b/syft/pkg/package.go index 6215bf594..efd353021 100644 --- a/syft/pkg/package.go +++ b/syft/pkg/package.go @@ -6,26 +6,27 @@ package pkg import ( "fmt" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" - "github.com/anchore/syft/syft/source" ) // Package represents an application or library that has been bundled into a distributable format. // TODO: if we ignore FoundBy for ID generation should we merge the field to show it was found in two places? type Package struct { - id artifact.ID `hash:"ignore"` - Name string // the package name - Version string // the version of the package - FoundBy string `cyclonedx:"foundBy"` // the specific cataloger that discovered this package - Locations []source.Location // the locations that lead to the discovery of this package (note: this is not necessarily the locations that make up this package) - Licenses []string // licenses discovered with the package metadata - Language Language `cyclonedx:"language"` // the language ecosystem this package belongs to (e.g. JavaScript, Python, etc) - Type Type `cyclonedx:"type"` // the package type (e.g. Npm, Yarn, Python, Rpm, Deb, etc) - CPEs []CPE `hash:"ignore"` // all possible Common Platform Enumerators (note: this is NOT included in the definition of the ID since all fields on a CPE are derived from other fields) - PURL string `hash:"ignore"` // the Package URL (see https://github.com/package-url/purl-spec) (note: this is NOT included in the definition of the ID since all fields on a pURL are derived from other fields) - MetadataType MetadataType `cyclonedx:"metadataType"` // the shape of the additional data in the "metadata" field - Metadata interface{} // additional data found while parsing the package source + id artifact.ID `hash:"ignore"` + Name string // the package name + Version string // the version of the package + FoundBy string `cyclonedx:"foundBy"` // the specific cataloger that discovered this package + Locations []file.Location // the locations that lead to the discovery of this package (note: this is not necessarily the locations that make up this package) + Licenses []string // licenses discovered with the package metadata + Language Language `cyclonedx:"language"` // the language ecosystem this package belongs to (e.g. JavaScript, Python, etc) + Type Type `cyclonedx:"type"` // the package type (e.g. Npm, Yarn, Python, Rpm, Deb, etc) + CPEs []CPE `hash:"ignore"` // all possible Common Platform Enumerators (note: this is NOT included in the definition of the ID since all fields on a CPE are derived from other fields) + PURL string `hash:"ignore"` // the Package URL (see https://github.com/package-url/purl-spec) (note: this is NOT included in the definition of the ID since all fields on a pURL are derived from other fields) + MetadataType MetadataType `cyclonedx:"metadataType"` // the shape of the additional data in the "metadata" field + Metadata interface{} // additional data found while parsing the package source } func (p *Package) SetID() { diff --git a/syft/pkg/package_test.go b/syft/pkg/package_test.go index 228726a37..fc3332721 100644 --- a/syft/pkg/package_test.go +++ b/syft/pkg/package_test.go @@ -1,9 +1,9 @@ package pkg import ( + "github.com/anchore/syft/syft/file" "testing" - "github.com/anchore/syft/syft/source" "github.com/stretchr/testify/assert" ) @@ -12,9 +12,9 @@ func TestFingerprint(t *testing.T) { Name: "pi", Version: "3.14", FoundBy: "Archimedes", - Locations: []source.Location{ + Locations: []file.Location{ { - Coordinates: source.Coordinates{ + Coordinates: file.Coordinates{ RealPath: "39.0742° N, 21.8243° E", FileSystemID: "Earth", }, diff --git a/syft/pkg/relationships_by_file_ownership_test.go b/syft/pkg/relationships_by_file_ownership_test.go index 69867f270..c2aacf3b1 100644 --- a/syft/pkg/relationships_by_file_ownership_test.go +++ b/syft/pkg/relationships_by_file_ownership_test.go @@ -1,10 +1,10 @@ package pkg import ( + "github.com/anchore/syft/syft/file" "testing" "github.com/anchore/syft/syft/artifact" - "github.com/anchore/syft/syft/source" "github.com/stretchr/testify/assert" ) @@ -18,9 +18,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { name: "owns-by-real-path", setup: func(t testing.TB) ([]Package, []artifact.Relationship) { parent := Package{ - Locations: []source.Location{ - source.NewVirtualLocation("/a/path", "/another/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: []file.Location{ + file.NewVirtualLocation("/a/path", "/another/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), }, Type: RpmPkg, MetadataType: RpmdbMetadataType, @@ -35,9 +35,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { parent.SetID() child := Package{ - Locations: []source.Location{ - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewVirtualLocation("/d/path", "/another/path"), + Locations: []file.Location{ + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewVirtualLocation("/d/path", "/another/path"), }, Type: NpmPkg, } @@ -61,9 +61,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { name: "owns-by-virtual-path", setup: func(t testing.TB) ([]Package, []artifact.Relationship) { parent := Package{ - Locations: []source.Location{ - source.NewVirtualLocation("/a/path", "/some/other/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: []file.Location{ + file.NewVirtualLocation("/a/path", "/some/other/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), }, Type: RpmPkg, MetadataType: RpmdbMetadataType, @@ -78,9 +78,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { parent.SetID() child := Package{ - Locations: []source.Location{ - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewLocation("/d/path"), + Locations: []file.Location{ + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewLocation("/d/path"), }, Type: NpmPkg, } @@ -103,9 +103,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { name: "ignore-empty-path", setup: func(t testing.TB) ([]Package, []artifact.Relationship) { parent := Package{ - Locations: []source.Location{ - source.NewVirtualLocation("/a/path", "/some/other/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: []file.Location{ + file.NewVirtualLocation("/a/path", "/some/other/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), }, Type: RpmPkg, MetadataType: RpmdbMetadataType, @@ -121,9 +121,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { parent.SetID() child := Package{ - Locations: []source.Location{ - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewLocation("/d/path"), + Locations: []file.Location{ + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewLocation("/d/path"), }, Type: NpmPkg, } diff --git a/syft/sbom/sbom.go b/syft/sbom/sbom.go index 76daf9d47..07e40c0c0 100644 --- a/syft/sbom/sbom.go +++ b/syft/sbom/sbom.go @@ -17,11 +17,11 @@ type SBOM struct { type Artifacts struct { PackageCatalog *pkg.Catalog - FileMetadata map[source.Coordinates]source.FileMetadata - FileDigests map[source.Coordinates][]file.Digest - FileClassifications map[source.Coordinates][]file.Classification - FileContents map[source.Coordinates]string - Secrets map[source.Coordinates][]file.SearchResult + FileMetadata map[file.Coordinates]file.Metadata + FileDigests map[file.Coordinates][]file.Digest + FileClassifications map[file.Coordinates][]file.Classification + FileContents map[file.Coordinates]string + Secrets map[file.Coordinates][]file.SearchResult LinuxDistribution *linux.Release } @@ -31,8 +31,8 @@ type Descriptor struct { Configuration interface{} } -func AllCoordinates(sbom SBOM) []source.Coordinates { - set := source.NewCoordinateSet() +func AllCoordinates(sbom SBOM) []file.Coordinates { + set := file.NewCoordinateSet() for coordinates := range sbom.Artifacts.FileMetadata { set.Add(coordinates) } @@ -53,12 +53,12 @@ func AllCoordinates(sbom SBOM) []source.Coordinates { return set.ToSlice() } -func extractCoordinates(relationship artifact.Relationship) (results []source.Coordinates) { - if coordinates, exists := relationship.From.(source.Coordinates); exists { +func extractCoordinates(relationship artifact.Relationship) (results []file.Coordinates) { + if coordinates, exists := relationship.From.(file.Coordinates); exists { results = append(results, coordinates) } - if coordinates, exists := relationship.To.(source.Coordinates); exists { + if coordinates, exists := relationship.To.(file.Coordinates); exists { results = append(results, coordinates) } diff --git a/syft/source/all_layers_resolver.go b/syft/source/all_layers_resolver.go index 4293dc825..9d540ce44 100644 --- a/syft/source/all_layers_resolver.go +++ b/syft/source/all_layers_resolver.go @@ -5,10 +5,11 @@ import ( "fmt" "io" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/filetree" "github.com/anchore/stereoscope/pkg/image" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" ) var _ FileResolver = (*allLayersResolver)(nil) @@ -37,7 +38,7 @@ func newAllLayersResolver(img *image.Image) (*allLayersResolver, error) { // HasPath indicates if the given path exists in the underlying source. func (r *allLayersResolver) HasPath(path string) bool { - p := file.Path(path) + p := stereoscopeFile.Path(path) for _, layerIdx := range r.layers { tree := r.img.Layers[layerIdx].Tree if tree.HasPath(p) { @@ -47,8 +48,8 @@ func (r *allLayersResolver) HasPath(path string) bool { return false } -func (r *allLayersResolver) fileByRef(ref file.Reference, uniqueFileIDs file.ReferenceSet, layerIdx int) ([]file.Reference, error) { - uniqueFiles := make([]file.Reference, 0) +func (r *allLayersResolver) fileByRef(ref stereoscopeFile.Reference, uniqueFileIDs stereoscopeFile.ReferenceSet, layerIdx int) ([]stereoscopeFile.Reference, error) { + uniqueFiles := make([]stereoscopeFile.Reference, 0) // since there is potentially considerable work for each symlink/hardlink that needs to be resolved, let's check to see if this is a symlink/hardlink first entry, err := r.img.FileCatalog.Get(ref) @@ -77,15 +78,15 @@ func (r *allLayersResolver) fileByRef(ref file.Reference, uniqueFileIDs file.Ref return uniqueFiles, nil } -// FilesByPath returns all file.References that match the given paths from any layer in the image. -func (r *allLayersResolver) FilesByPath(paths ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +// FilesByPath returns all stereoscopeFile.References that match the given paths from any layer in the image. +func (r *allLayersResolver) FilesByPath(paths ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, path := range paths { for idx, layerIdx := range r.layers { tree := r.img.Layers[layerIdx].Tree - _, ref, err := tree.File(file.Path(path), filetree.FollowBasenameLinks, filetree.DoNotFollowDeadBasenameLinks) + _, ref, err := tree.File(stereoscopeFile.Path(path), filetree.FollowBasenameLinks, filetree.DoNotFollowDeadBasenameLinks) if err != nil { return nil, err } @@ -112,17 +113,17 @@ func (r *allLayersResolver) FilesByPath(paths ...string) ([]Location, error) { return nil, err } for _, result := range results { - uniqueLocations = append(uniqueLocations, NewLocationFromImage(path, result, r.img)) + uniqueLocations = append(uniqueLocations, file.NewLocationFromImage(path, result, r.img)) } } } return uniqueLocations, nil } -// FilesByGlob returns all file.References that match the given path glob pattern from any layer in the image. -func (r *allLayersResolver) FilesByGlob(patterns ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +// FilesByGlob returns all stereoscopeFile.References that match the given path glob pattern from any layer in the image. +func (r *allLayersResolver) FilesByGlob(patterns ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, pattern := range patterns { for idx, layerIdx := range r.layers { @@ -150,7 +151,7 @@ func (r *allLayersResolver) FilesByGlob(patterns ...string) ([]Location, error) return nil, err } for _, refResult := range refResults { - uniqueLocations = append(uniqueLocations, NewLocationFromImage(string(result.MatchPath), refResult, r.img)) + uniqueLocations = append(uniqueLocations, file.NewLocationFromImage(string(result.MatchPath), refResult, r.img)) } } } @@ -160,14 +161,14 @@ func (r *allLayersResolver) FilesByGlob(patterns ...string) ([]Location, error) } // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. -// This is helpful when attempting to find a file that is in the same layer or lower as another file. -func (r *allLayersResolver) RelativeFileByPath(location Location, path string) *Location { - entry, err := r.img.FileCatalog.Get(location.ref) +// This is helpful when attempting to find a file that is in the same layer or lower as another stereoscopeFile. +func (r *allLayersResolver) RelativeFileByPath(location file.Location, path string) *file.Location { + entry, err := r.img.FileCatalog.Get(location.Ref()) if err != nil { return nil } - exists, relativeRef, err := entry.Layer.SquashedTree.File(file.Path(path), filetree.FollowBasenameLinks) + exists, relativeRef, err := entry.Layer.SquashedTree.File(stereoscopeFile.Path(path), filetree.FollowBasenameLinks) if err != nil { log.Errorf("failed to find path=%q in squash: %+w", path, err) return nil @@ -176,15 +177,15 @@ func (r *allLayersResolver) RelativeFileByPath(location Location, path string) * return nil } - relativeLocation := NewLocationFromImage(path, *relativeRef, r.img) + relativeLocation := file.NewLocationFromImage(path, *relativeRef, r.img) return &relativeLocation } // FileContentsByLocation fetches file contents for a single file reference, irregardless of the source layer. // If the path does not exist an error is returned. -func (r *allLayersResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { - entry, err := r.img.FileCatalog.Get(location.ref) +func (r *allLayersResolver) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { + entry, err := r.img.FileCatalog.Get(location.Ref()) if err != nil { return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err) } @@ -200,11 +201,11 @@ func (r *allLayersResolver) FileContentsByLocation(location Location) (io.ReadCl location = *newLocation } - return r.img.FileContentsByRef(location.ref) + return r.img.FileContentsByRef(location.Ref()) } -func (r *allLayersResolver) FilesByMIMEType(types ...string) ([]Location, error) { - var locations []Location +func (r *allLayersResolver) FilesByMIMEType(types ...string) ([]file.Location, error) { + var locations []file.Location for _, layerIdx := range r.layers { layer := r.img.Layers[layerIdx] @@ -214,27 +215,27 @@ func (r *allLayersResolver) FilesByMIMEType(types ...string) ([]Location, error) } for _, ref := range refs { - locations = append(locations, NewLocationFromImage(string(ref.RealPath), ref, r.img)) + locations = append(locations, file.NewLocationFromImage(string(ref.RealPath), ref, r.img)) } } return locations, nil } -func (r *allLayersResolver) AllLocations() <-chan Location { - results := make(chan Location) +func (r *allLayersResolver) AllLocations() <-chan file.Location { + results := make(chan file.Location) go func() { defer close(results) for _, layerIdx := range r.layers { tree := r.img.Layers[layerIdx].Tree - for _, ref := range tree.AllFiles(file.AllTypes...) { - results <- NewLocationFromImage(string(ref.RealPath), ref, r.img) + for _, ref := range tree.AllFiles(stereoscopeFile.AllTypes...) { + results <- file.NewLocationFromImage(string(ref.RealPath), ref, r.img) } } }() return results } -func (r *allLayersResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { - return fileMetadataByLocation(r.img, location) +func (r *allLayersResolver) FileMetadataByLocation(location file.Location) (file.Metadata, error) { + return file.MetadataByLocation(r.img, location) } diff --git a/syft/source/all_layers_resolver_test.go b/syft/source/all_layers_resolver_test.go index e9e078012..317e24e12 100644 --- a/syft/source/all_layers_resolver_test.go +++ b/syft/source/all_layers_resolver_test.go @@ -1,6 +1,7 @@ package source import ( + "github.com/anchore/syft/syft/file" "github.com/stretchr/testify/require" "io" "testing" @@ -116,15 +117,15 @@ func TestAllLayersResolver_FilesByPath(t *testing.T) { for idx, actual := range refs { expected := c.resolutions[idx] - if string(actual.ref.RealPath) != expected.path { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), expected.path) + if string(actual.Ref().RealPath) != expected.path { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Ref().RealPath), expected.path) } - if expected.path != "" && string(actual.ref.RealPath) != actual.RealPath { + if expected.path != "" && string(actual.Ref().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - entry, err := img.FileCatalog.Get(actual.ref) + entry, err := img.FileCatalog.Get(actual.Ref()) if err != nil { t.Fatalf("failed to get metadata: %+v", err) } @@ -223,15 +224,15 @@ func TestAllLayersResolver_FilesByGlob(t *testing.T) { for idx, actual := range refs { expected := c.resolutions[idx] - if string(actual.ref.RealPath) != expected.path { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), expected.path) + if string(actual.Ref().RealPath) != expected.path { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Ref().RealPath), expected.path) } - if expected.path != "" && string(actual.ref.RealPath) != actual.RealPath { + if expected.path != "" && string(actual.Ref().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - entry, err := img.FileCatalog.Get(actual.ref) + entry, err := img.FileCatalog.Get(actual.Ref()) if err != nil { t.Fatalf("failed to get metadata: %+v", err) } @@ -364,72 +365,72 @@ func TestAllLayersImageResolver_FilesContents(t *testing.T) { func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) { tests := []struct { name string - runner func(FileResolver) []Location - expected []Location + runner func(FileResolver) []file.Location + expected []file.Location }{ { name: "by mimetype", - runner: func(resolver FileResolver) []Location { + runner: func(resolver FileResolver) []file.Location { // links should not show up when searching mimetype actualLocations, err := resolver.FilesByMIMEType("text/plain") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/etc/group", }, VirtualPath: "/etc/group", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/etc/passwd", }, VirtualPath: "/etc/passwd", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/etc/shadow", }, VirtualPath: "/etc/shadow", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-1.txt", }, VirtualPath: "/file-1.txt", }, // copy 1 { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/file-2.txt", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-3.txt", }, VirtualPath: "/file-3.txt", }, // copy 2 { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/file-2.txt", }, // copy 1 { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/parent/file-4.txt", }, VirtualPath: "/parent/file-4.txt", }, // copy 2 { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/parent/file-4.txt", }, VirtualPath: "/parent/file-4.txt", @@ -438,35 +439,35 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) { }, { name: "by glob", - runner: func(resolver FileResolver) []Location { + runner: func(resolver FileResolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("*ink-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-1.txt", }, VirtualPath: "/link-1", }, // copy 1 { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/link-2", }, // copy 2 { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/link-2", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-3.txt", }, VirtualPath: "/link-within", @@ -475,22 +476,22 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) { }, { name: "by path to degree 1 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver FileResolver) []file.Location { // links resolve to the final file actualLocations, err := resolver.FilesByPath("/link-2") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/link-2", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/link-2", @@ -499,22 +500,22 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) { }, { name: "by path to degree 2 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver FileResolver) []file.Location { // multiple links resolves to the final file actualLocations, err := resolver.FilesByPath("/link-indirect") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/link-indirect", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/link-indirect", diff --git a/syft/source/all_regular_files.go b/syft/source/all_regular_files.go index 18d225549..be1cd5bd6 100644 --- a/syft/source/all_regular_files.go +++ b/syft/source/all_regular_files.go @@ -2,9 +2,10 @@ package source import ( "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" ) -func AllRegularFiles(resolver FileResolver) (locations []Location) { +func AllRegularFiles(resolver FileResolver) (locations []file.Location) { for location := range resolver.AllLocations() { resolvedLocations, err := resolver.FilesByPath(location.RealPath) if err != nil { @@ -19,7 +20,7 @@ func AllRegularFiles(resolver FileResolver) (locations []Location) { continue } - if metadata.Type != RegularFile { + if metadata.Type != file.RegularFile { continue } locations = append(locations, resolvedLocation) diff --git a/syft/source/directory_resolver.go b/syft/source/directory_resolver.go index 64ac1ef74..4b6f30d4a 100644 --- a/syft/source/directory_resolver.go +++ b/syft/source/directory_resolver.go @@ -11,12 +11,13 @@ import ( "runtime" "strings" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/filetree" "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/event" + "github.com/anchore/syft/syft/file" "github.com/wagoodman/go-partybus" "github.com/wagoodman/go-progress" ) @@ -39,10 +40,10 @@ type directoryResolver struct { currentWdRelativeToRoot string currentWd string fileTree *filetree.FileTree - metadata map[file.ID]FileMetadata + metadata map[stereoscopeFile.ID]file.Metadata // TODO: wire up to report these paths in the json report pathFilterFns []pathFilterFn - refsByMIMEType map[string][]file.Reference + refsByMIMEType map[string][]stereoscopeFile.Reference errPaths map[string]error } @@ -78,9 +79,9 @@ func newDirectoryResolver(root string, pathFilters ...pathFilterFn) (*directoryR currentWd: cleanCWD, currentWdRelativeToRoot: currentWdRelRoot, fileTree: filetree.NewFileTree(), - metadata: make(map[file.ID]FileMetadata), + metadata: make(map[stereoscopeFile.ID]file.Metadata), pathFilterFns: append([]pathFilterFn{isUnallowableFileType, isUnixSystemRuntimePath}, pathFilters...), - refsByMIMEType: make(map[string][]file.Reference), + refsByMIMEType: make(map[string][]stereoscopeFile.Reference), errPaths: make(map[string]error), } @@ -180,12 +181,12 @@ func (r *directoryResolver) isFileAccessErr(path string, err error) bool { } func (r directoryResolver) addPathToIndex(p string, info os.FileInfo) (string, error) { - switch t := newFileTypeFromMode(info.Mode()); t { - case SymbolicLink: + switch t := file.NewFileTypeFromMode(info.Mode()); t { + case file.SymbolicLink: return r.addSymlinkToIndex(p, info) - case Directory: + case file.Directory: return "", r.addDirectoryToIndex(p, info) - case RegularFile: + case file.RegularFile: return "", r.addFileToIndex(p, info) default: return "", fmt.Errorf("unsupported file type: %s", t) @@ -193,7 +194,7 @@ func (r directoryResolver) addPathToIndex(p string, info os.FileInfo) (string, e } func (r directoryResolver) hasBeenIndexed(p string) bool { - filePath := file.Path(p) + filePath := stereoscopeFile.Path(p) if !r.fileTree.HasPath(filePath) { return false } @@ -210,26 +211,26 @@ func (r directoryResolver) hasBeenIndexed(p string) bool { } func (r directoryResolver) addDirectoryToIndex(p string, info os.FileInfo) error { - ref, err := r.fileTree.AddDir(file.Path(p)) + ref, err := r.fileTree.AddDir(stereoscopeFile.Path(p)) if err != nil { return err } - location := NewLocationFromDirectory(p, *ref) - metadata := fileMetadataFromPath(p, info, r.isInIndex(location)) + location := file.NewLocationFromDirectory(p, *ref) + metadata := file.MetadataFromPath(p, info, r.isInIndex(location)) r.addFileMetadataToIndex(ref, metadata) return nil } func (r directoryResolver) addFileToIndex(p string, info os.FileInfo) error { - ref, err := r.fileTree.AddFile(file.Path(p)) + ref, err := r.fileTree.AddFile(stereoscopeFile.Path(p)) if err != nil { return err } - location := NewLocationFromDirectory(p, *ref) - metadata := fileMetadataFromPath(p, info, r.isInIndex(location)) + location := file.NewLocationFromDirectory(p, *ref) + metadata := file.MetadataFromPath(p, info, r.isInIndex(location)) r.addFileMetadataToIndex(ref, metadata) return nil @@ -249,7 +250,7 @@ func (r directoryResolver) addSymlinkToIndex(p string, info os.FileInfo) (string linkTarget = filepath.Join(filepath.Dir(p), linkTarget) } - ref, err := r.fileTree.AddSymLink(file.Path(p), file.Path(linkTarget)) + ref, err := r.fileTree.AddSymLink(stereoscopeFile.Path(p), stereoscopeFile.Path(linkTarget)) if err != nil { return "", err } @@ -259,16 +260,16 @@ func (r directoryResolver) addSymlinkToIndex(p string, info os.FileInfo) (string targetAbsPath = filepath.Clean(filepath.Join(path.Dir(p), linkTarget)) } - location := NewLocationFromDirectory(p, *ref) + location := file.NewLocationFromDirectory(p, *ref) location.VirtualPath = p - metadata := fileMetadataFromPath(p, usedInfo, r.isInIndex(location)) + metadata := file.MetadataFromPath(p, usedInfo, r.isInIndex(location)) metadata.LinkDestination = linkTarget r.addFileMetadataToIndex(ref, metadata) return targetAbsPath, nil } -func (r directoryResolver) addFileMetadataToIndex(ref *file.Reference, metadata FileMetadata) { +func (r directoryResolver) addFileMetadataToIndex(ref *stereoscopeFile.Reference, metadata file.Metadata) { if ref != nil { if metadata.MIMEType != "" { r.refsByMIMEType[metadata.MIMEType] = append(r.refsByMIMEType[metadata.MIMEType], *ref) @@ -315,7 +316,7 @@ func (r *directoryResolver) HasPath(userPath string) bool { if err != nil { return false } - return r.fileTree.HasPath(file.Path(requestPath)) + return r.fileTree.HasPath(stereoscopeFile.Path(requestPath)) } // Stringer to represent a directory path data source @@ -323,9 +324,9 @@ func (r directoryResolver) String() string { return fmt.Sprintf("dir:%s", r.path) } -// FilesByPath returns all file.References that match the given paths from the directory. -func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error) { - var references = make([]Location, 0) +// FilesByPath returns all stereoscopeFile.References that match the given paths from the directory. +func (r directoryResolver) FilesByPath(userPaths ...string) ([]file.Location, error) { + var references = make([]file.Location, 0) for _, userPath := range userPaths { userStrPath, err := r.requestPath(userPath) @@ -367,9 +368,9 @@ func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error) userStrPath = windowsToPosix(userStrPath) } - exists, ref, err := r.fileTree.File(file.Path(userStrPath), filetree.FollowBasenameLinks) + exists, ref, err := r.fileTree.File(stereoscopeFile.Path(userStrPath), filetree.FollowBasenameLinks) if err == nil && exists { - loc := NewVirtualLocationFromDirectory( + loc := file.NewVirtualLocationFromDirectory( r.responsePath(string(ref.RealPath)), // the actual path relative to the resolver root r.responsePath(userStrPath), // the path used to access this file, relative to the resolver root *ref, @@ -381,9 +382,9 @@ func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error) return references, nil } -// FilesByGlob returns all file.References that match the given path glob pattern from any layer in the image. -func (r directoryResolver) FilesByGlob(patterns ...string) ([]Location, error) { - result := make([]Location, 0) +// FilesByGlob returns all stereoscopeFile.References that match the given path glob pattern from any layer in the image. +func (r directoryResolver) FilesByGlob(patterns ...string) ([]file.Location, error) { + result := make([]file.Location, 0) for _, pattern := range patterns { globResults, err := r.fileTree.FilesByGlob(pattern, filetree.FollowBasenameLinks) @@ -391,7 +392,7 @@ func (r directoryResolver) FilesByGlob(patterns ...string) ([]Location, error) { return nil, err } for _, globResult := range globResults { - loc := NewVirtualLocationFromDirectory( + loc := file.NewVirtualLocationFromDirectory( r.responsePath(string(globResult.Reference.RealPath)), // the actual path relative to the resolver root r.responsePath(string(globResult.MatchPath)), // the path used to access this file, relative to the resolver root globResult.Reference, @@ -404,9 +405,9 @@ func (r directoryResolver) FilesByGlob(patterns ...string) ([]Location, error) { } // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. -// This is helpful when attempting to find a file that is in the same layer or lower as another file. For the +// This is helpful when attempting to find a file that is in the same layer or lower as another stereoscopeFile. For the // directoryResolver, this is a simple path lookup. -func (r *directoryResolver) RelativeFileByPath(_ Location, path string) *Location { +func (r *directoryResolver) RelativeFileByPath(_ file.Location, path string) *file.Location { paths, err := r.FilesByPath(path) if err != nil { return nil @@ -420,59 +421,60 @@ func (r *directoryResolver) RelativeFileByPath(_ Location, path string) *Locatio // FileContentsByLocation fetches file contents for a single file reference relative to a directory. // If the path does not exist an error is returned. -func (r directoryResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { - if location.ref.RealPath == "" { +func (r directoryResolver) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { + if location.Ref().RealPath == "" { return nil, errors.New("empty path given") } if !r.isInIndex(location) { // this is in cases where paths have been explicitly excluded from the tree index. In which case // we should DENY all content requests. Why? These paths have been indicated to be inaccessible (either // by preference or these files are not readable by the current user). - return nil, fmt.Errorf("file content is inaccessible path=%q", location.ref.RealPath) + return nil, fmt.Errorf("file content is inaccessible path=%q", location.Ref().RealPath) } // RealPath is posix so for windows directory resolver we need to translate // to its true on disk path. - filePath := string(location.ref.RealPath) + filePath := string(location.Ref().RealPath) if runtime.GOOS == WindowsOS { filePath = posixToWindows(filePath) } - return file.NewLazyReadCloser(filePath), nil + return stereoscopeFile.NewLazyReadCloser(filePath), nil } -func (r directoryResolver) isInIndex(location Location) bool { - if location.ref.RealPath == "" { +func (r directoryResolver) isInIndex(location file.Location) bool { + if location.Ref().RealPath == "" { return false } - return r.fileTree.HasPath(location.ref.RealPath, filetree.FollowBasenameLinks) + return r.fileTree.HasPath(location.Ref().RealPath, filetree.FollowBasenameLinks) } -func (r *directoryResolver) AllLocations() <-chan Location { - results := make(chan Location) +func (r *directoryResolver) AllLocations() <-chan file.Location { + results := make(chan file.Location) go func() { defer close(results) // this should be all non-directory types - for _, ref := range r.fileTree.AllFiles(file.TypeReg, file.TypeSymlink, file.TypeHardLink, file.TypeBlockDevice, file.TypeCharacterDevice, file.TypeFifo) { - results <- NewLocationFromDirectory(r.responsePath(string(ref.RealPath)), ref) + for _, ref := range r.fileTree.AllFiles(stereoscopeFile.TypeReg, stereoscopeFile.TypeSymlink, stereoscopeFile.TypeHardLink, stereoscopeFile.TypeBlockDevice, stereoscopeFile.TypeCharacterDevice, stereoscopeFile.TypeFifo) { + results <- file.NewLocationFromDirectory(r.responsePath(string(ref.RealPath)), ref) } }() return results } -func (r *directoryResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { - metadata, exists := r.metadata[location.ref.ID()] +func (r *directoryResolver) FileMetadataByLocation(location file.Location) (file.Metadata, error) { + ref := location.Ref() + metadata, exists := r.metadata[ref.ID()] if !exists { - return FileMetadata{}, fmt.Errorf("location: %+v : %w", location, os.ErrNotExist) + return file.Metadata{}, fmt.Errorf("location: %+v : %w", location, os.ErrNotExist) } return metadata, nil } -func (r *directoryResolver) FilesByMIMEType(types ...string) ([]Location, error) { - var locations []Location +func (r *directoryResolver) FilesByMIMEType(types ...string) ([]file.Location, error) { + var locations []file.Location for _, ty := range types { if refs, ok := r.refsByMIMEType[ty]; ok { for _, ref := range refs { - locations = append(locations, NewLocationFromDirectory(r.responsePath(string(ref.RealPath)), ref)) + locations = append(locations, file.NewLocationFromDirectory(r.responsePath(string(ref.RealPath)), ref)) } } } @@ -515,8 +517,8 @@ func isUnallowableFileType(_ string, info os.FileInfo) bool { // we can't filter out by filetype for non-existent files return false } - switch newFileTypeFromMode(info.Mode()) { - case CharacterDevice, Socket, BlockDevice, FIFONode, IrregularFile: + switch file.NewFileTypeFromMode(info.Mode()) { + case file.CharacterDevice, file.Socket, file.BlockDevice, file.FIFONode, file.IrregularFile: return true // note: symlinks that point to these files may still get by. // We handle this later in processing to help prevent against infinite links traversal. diff --git a/syft/source/directory_resolver_test.go b/syft/source/directory_resolver_test.go index 795ef0b79..94bc5094b 100644 --- a/syft/source/directory_resolver_test.go +++ b/syft/source/directory_resolver_test.go @@ -15,12 +15,11 @@ import ( "testing" "time" - "github.com/stretchr/testify/require" - + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/syft/syft/file" "github.com/scylladb/go-set/strset" - - "github.com/anchore/stereoscope/pkg/file" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/wagoodman/go-progress" ) @@ -466,7 +465,7 @@ func Test_directoryResolver_index(t *testing.T) { cwd, err := os.Getwd() require.NoError(t, err) - p := file.Path(path.Join(cwd, test.path)) + p := stereoscopeFile.Path(path.Join(cwd, test.path)) assert.Equal(t, true, r.fileTree.HasPath(p)) exists, ref, err := r.fileTree.File(p) assert.Equal(t, true, exists) @@ -735,20 +734,20 @@ func Test_directoryResolver_FileContentsByLocation(t *testing.T) { tests := []struct { name string - location Location + location file.Location expects string err bool }{ { name: "use file reference for content requests", - location: NewLocationFromDirectory("some/place", file.Reference{ - RealPath: file.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt")), + location: file.NewLocationFromDirectory("some/place", stereoscopeFile.Reference{ + RealPath: stereoscopeFile.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt")), }), expects: "this file has contents", }, { name: "error on empty file reference", - location: NewLocationFromDirectory("doesn't matter", file.Reference{}), + location: file.NewLocationFromDirectory("doesn't matter", stereoscopeFile.Reference{}), err: true, }, } @@ -823,12 +822,12 @@ func Test_SymlinkLoopWithGlobsShouldResolve(t *testing.T) { resolver, err := newDirectoryResolver("./test-fixtures/symlinks-loop") require.NoError(t, err) - locations, err := resolver.FilesByGlob("**/file.target") + locations, err := resolver.FilesByGlob("**/stereoscopeFile.target") require.NoError(t, err) // Note: I'm not certain that this behavior is correct, but it is not an infinite loop (which is the point of the test) - // - block/loop0/file.target - // - devices/loop0/file.target - // - devices/loop0/subsystem/loop0/file.target + // - block/loop0/stereoscopeFile.target + // - devices/loop0/stereoscopeFile.target + // - devices/loop0/subsystem/loop0/stereoscopeFile.target assert.Len(t, locations, 3) } @@ -857,7 +856,7 @@ func Test_IncludeRootPathInIndex(t *testing.T) { resolver, err := newDirectoryResolver("/", filterFn) require.NoError(t, err) - exists, ref, err := resolver.fileTree.File(file.Path("/")) + exists, ref, err := resolver.fileTree.File(stereoscopeFile.Path("/")) require.NoError(t, err) require.NotNil(t, ref) assert.True(t, exists) diff --git a/syft/source/excluding_file_resolver.go b/syft/source/excluding_file_resolver.go index 50969116a..9c20ec61b 100644 --- a/syft/source/excluding_file_resolver.go +++ b/syft/source/excluding_file_resolver.go @@ -3,6 +3,8 @@ package source import ( "fmt" "io" + + "github.com/anchore/syft/syft/file" ) type excludeFn func(string) bool @@ -23,16 +25,16 @@ func NewExcludingResolver(delegate FileResolver, excludeFn excludeFn) FileResolv } } -func (r *excludingResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { +func (r *excludingResolver) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { if locationMatches(&location, r.excludeFn) { return nil, fmt.Errorf("no such location: %+v", location.RealPath) } return r.delegate.FileContentsByLocation(location) } -func (r *excludingResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { +func (r *excludingResolver) FileMetadataByLocation(location file.Location) (file.Metadata, error) { if locationMatches(&location, r.excludeFn) { - return FileMetadata{}, fmt.Errorf("no such location: %+v", location.RealPath) + return file.Metadata{}, fmt.Errorf("no such location: %+v", location.RealPath) } return r.delegate.FileMetadataByLocation(location) } @@ -44,22 +46,22 @@ func (r *excludingResolver) HasPath(path string) bool { return r.delegate.HasPath(path) } -func (r *excludingResolver) FilesByPath(paths ...string) ([]Location, error) { +func (r *excludingResolver) FilesByPath(paths ...string) ([]file.Location, error) { locations, err := r.delegate.FilesByPath(paths...) return filterLocations(locations, err, r.excludeFn) } -func (r *excludingResolver) FilesByGlob(patterns ...string) ([]Location, error) { +func (r *excludingResolver) FilesByGlob(patterns ...string) ([]file.Location, error) { locations, err := r.delegate.FilesByGlob(patterns...) return filterLocations(locations, err, r.excludeFn) } -func (r *excludingResolver) FilesByMIMEType(types ...string) ([]Location, error) { +func (r *excludingResolver) FilesByMIMEType(types ...string) ([]file.Location, error) { locations, err := r.delegate.FilesByMIMEType(types...) return filterLocations(locations, err, r.excludeFn) } -func (r *excludingResolver) RelativeFileByPath(location Location, path string) *Location { +func (r *excludingResolver) RelativeFileByPath(location file.Location, path string) *file.Location { l := r.delegate.RelativeFileByPath(location, path) if l != nil && locationMatches(l, r.excludeFn) { return nil @@ -67,8 +69,8 @@ func (r *excludingResolver) RelativeFileByPath(location Location, path string) * return l } -func (r *excludingResolver) AllLocations() <-chan Location { - c := make(chan Location) +func (r *excludingResolver) AllLocations() <-chan file.Location { + c := make(chan file.Location) go func() { defer close(c) for location := range r.delegate.AllLocations() { @@ -80,11 +82,11 @@ func (r *excludingResolver) AllLocations() <-chan Location { return c } -func locationMatches(location *Location, exclusionFn excludeFn) bool { +func locationMatches(location *file.Location, exclusionFn excludeFn) bool { return exclusionFn(location.RealPath) || exclusionFn(location.VirtualPath) } -func filterLocations(locations []Location, err error, exclusionFn excludeFn) ([]Location, error) { +func filterLocations(locations []file.Location, err error, exclusionFn excludeFn) ([]file.Location, error) { if err != nil { return nil, err } diff --git a/syft/source/excluding_file_resolver_test.go b/syft/source/excluding_file_resolver_test.go index 4cfe18727..e45082c0f 100644 --- a/syft/source/excluding_file_resolver_test.go +++ b/syft/source/excluding_file_resolver_test.go @@ -5,8 +5,7 @@ import ( "strings" "testing" - "github.com/anchore/stereoscope/pkg/file" - + "github.com/anchore/syft/syft/file" "github.com/stretchr/testify/assert" ) @@ -67,7 +66,7 @@ func TestExcludingResolver(t *testing.T) { locations, _ = excludingResolver.FilesByMIMEType() assert.ElementsMatch(t, locationPaths(locations), test.expected) - locations = []Location{} + locations = []file.Location{} channel := excludingResolver.AllLocations() for location := range channel { @@ -119,18 +118,17 @@ func difference(a, b []string) []string { return diff } -func makeLocation(path string) Location { - return Location{ - Coordinates: Coordinates{ +func makeLocation(path string) file.Location { + return file.Location{ + Coordinates: file.Coordinates{ RealPath: path, FileSystemID: "", }, VirtualPath: "", - ref: file.Reference{}, } } -func locationPaths(locations []Location) []string { +func locationPaths(locations []file.Location) []string { paths := []string{} for _, l := range locations { paths = append(paths, l.RealPath) @@ -142,20 +140,20 @@ type mockResolver struct { locations []string } -func (r *mockResolver) getLocations() ([]Location, error) { - out := []Location{} +func (r *mockResolver) getLocations() ([]file.Location, error) { + out := []file.Location{} for _, path := range r.locations { out = append(out, makeLocation(path)) } return out, nil } -func (r *mockResolver) FileContentsByLocation(_ Location) (io.ReadCloser, error) { +func (r *mockResolver) FileContentsByLocation(_ file.Location) (io.ReadCloser, error) { return io.NopCloser(strings.NewReader("Hello, world!")), nil } -func (r *mockResolver) FileMetadataByLocation(_ Location) (FileMetadata, error) { - return FileMetadata{ +func (r *mockResolver) FileMetadataByLocation(_ file.Location) (file.Metadata, error) { + return file.Metadata{ LinkDestination: "MOCK", }, nil } @@ -164,28 +162,28 @@ func (r *mockResolver) HasPath(_ string) bool { return true } -func (r *mockResolver) FilesByPath(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByPath(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByGlob(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByGlob(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByMIMEType(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByMIMEType(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) RelativeFileByPath(_ Location, path string) *Location { - return &Location{ - Coordinates: Coordinates{ +func (r *mockResolver) RelativeFileByPath(_ file.Location, path string) *file.Location { + return &file.Location{ + Coordinates: file.Coordinates{ RealPath: path, }, } } -func (r *mockResolver) AllLocations() <-chan Location { - c := make(chan Location) +func (r *mockResolver) AllLocations() <-chan file.Location { + c := make(chan file.Location) go func() { defer close(c) locations, _ := r.getLocations() diff --git a/syft/source/file_resolver.go b/syft/source/file_resolver.go index b6ccb4815..d86e8a25e 100644 --- a/syft/source/file_resolver.go +++ b/syft/source/file_resolver.go @@ -2,6 +2,8 @@ package source import ( "io" + + "github.com/anchore/syft/syft/file" ) // FileResolver is an interface that encompasses how to get specific file references and file contents for a generic data source. @@ -14,11 +16,11 @@ type FileResolver interface { // FileContentResolver knows how to get file content for a given Location type FileContentResolver interface { - FileContentsByLocation(Location) (io.ReadCloser, error) + FileContentsByLocation(file.Location) (io.ReadCloser, error) } type FileMetadataResolver interface { - FileMetadataByLocation(Location) (FileMetadata, error) + FileMetadataByLocation(file.Location) (file.Metadata, error) } // FilePathResolver knows how to get a Location for given string paths and globs @@ -26,16 +28,16 @@ type FilePathResolver interface { // HasPath indicates if the given path exists in the underlying source. HasPath(string) bool // FilesByPath fetches a set of file references which have the given path (for an image, there may be multiple matches) - FilesByPath(paths ...string) ([]Location, error) + FilesByPath(paths ...string) ([]file.Location, error) // FilesByGlob fetches a set of file references which the given glob matches - FilesByGlob(patterns ...string) ([]Location, error) + FilesByGlob(patterns ...string) ([]file.Location, error) // FilesByMIMEType fetches a set of file references which the contents have been classified as one of the given MIME Types - FilesByMIMEType(types ...string) ([]Location, error) + FilesByMIMEType(types ...string) ([]file.Location, error) // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. // This is helpful when attempting to find a file that is in the same layer or lower as another file. - RelativeFileByPath(_ Location, path string) *Location + RelativeFileByPath(_ file.Location, path string) *file.Location } type FileLocationResolver interface { - AllLocations() <-chan Location + AllLocations() <-chan file.Location } diff --git a/syft/source/image_squash_resolver.go b/syft/source/image_squash_resolver.go index ba584897e..d3c52d308 100644 --- a/syft/source/image_squash_resolver.go +++ b/syft/source/image_squash_resolver.go @@ -5,9 +5,10 @@ import ( "fmt" "io" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/filetree" "github.com/anchore/stereoscope/pkg/image" + "github.com/anchore/syft/syft/file" ) var _ FileResolver = (*imageSquashResolver)(nil) @@ -30,17 +31,17 @@ func newImageSquashResolver(img *image.Image) (*imageSquashResolver, error) { // HasPath indicates if the given path exists in the underlying source. func (r *imageSquashResolver) HasPath(path string) bool { - return r.img.SquashedTree().HasPath(file.Path(path)) + return r.img.SquashedTree().HasPath(stereoscopeFile.Path(path)) } -// FilesByPath returns all file.References that match the given paths within the squashed representation of the image. -func (r *imageSquashResolver) FilesByPath(paths ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +// FilesByPath returns all stereoscopeFile.References that match the given paths within the squashed representation of the image. +func (r *imageSquashResolver) FilesByPath(paths ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, path := range paths { tree := r.img.SquashedTree() - _, ref, err := tree.File(file.Path(path), filetree.FollowBasenameLinks) + _, ref, err := tree.File(stereoscopeFile.Path(path), filetree.FollowBasenameLinks) if err != nil { return nil, err } @@ -70,17 +71,17 @@ func (r *imageSquashResolver) FilesByPath(paths ...string) ([]Location, error) { if resolvedRef != nil && !uniqueFileIDs.Contains(*resolvedRef) { uniqueFileIDs.Add(*resolvedRef) - uniqueLocations = append(uniqueLocations, NewLocationFromImage(path, *resolvedRef, r.img)) + uniqueLocations = append(uniqueLocations, file.NewLocationFromImage(path, *resolvedRef, r.img)) } } return uniqueLocations, nil } -// FilesByGlob returns all file.References that match the given path glob pattern within the squashed representation of the image. -func (r *imageSquashResolver) FilesByGlob(patterns ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +// FilesByGlob returns all stereoscopeFile.References that match the given path glob pattern within the squashed representation of the image. +func (r *imageSquashResolver) FilesByGlob(patterns ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, pattern := range patterns { results, err := r.img.SquashedTree().FilesByGlob(pattern, filetree.FollowBasenameLinks) @@ -109,8 +110,8 @@ func (r *imageSquashResolver) FilesByGlob(patterns ...string) ([]Location, error return nil, fmt.Errorf("failed to find files by path (result=%+v): %w", result, err) } for _, resolvedLocation := range resolvedLocations { - if !uniqueFileIDs.Contains(resolvedLocation.ref) { - uniqueFileIDs.Add(resolvedLocation.ref) + if !uniqueFileIDs.Contains(resolvedLocation.Ref()) { + uniqueFileIDs.Add(resolvedLocation.Ref()) uniqueLocations = append(uniqueLocations, resolvedLocation) } } @@ -121,9 +122,9 @@ func (r *imageSquashResolver) FilesByGlob(patterns ...string) ([]Location, error } // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. -// This is helpful when attempting to find a file that is in the same layer or lower as another file. For the +// This is helpful when attempting to find a file that is in the same layer or lower as another stereoscopeFile. For the // imageSquashResolver, this is a simple path lookup. -func (r *imageSquashResolver) RelativeFileByPath(_ Location, path string) *Location { +func (r *imageSquashResolver) RelativeFileByPath(_ file.Location, path string) *file.Location { paths, err := r.FilesByPath(path) if err != nil { return nil @@ -137,8 +138,8 @@ func (r *imageSquashResolver) RelativeFileByPath(_ Location, path string) *Locat // FileContentsByLocation fetches file contents for a single file reference, irregardless of the source layer. // If the path does not exist an error is returned. -func (r *imageSquashResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { - entry, err := r.img.FileCatalog.Get(location.ref) +func (r *imageSquashResolver) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { + entry, err := r.img.FileCatalog.Get(location.Ref()) if err != nil { return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err) } @@ -161,34 +162,34 @@ func (r *imageSquashResolver) FileContentsByLocation(location Location) (io.Read } } - return r.img.FileContentsByRef(location.ref) + return r.img.FileContentsByRef(location.Ref()) } -func (r *imageSquashResolver) AllLocations() <-chan Location { - results := make(chan Location) +func (r *imageSquashResolver) AllLocations() <-chan file.Location { + results := make(chan file.Location) go func() { defer close(results) - for _, ref := range r.img.SquashedTree().AllFiles(file.AllTypes...) { - results <- NewLocationFromImage(string(ref.RealPath), ref, r.img) + for _, ref := range r.img.SquashedTree().AllFiles(stereoscopeFile.AllTypes...) { + results <- file.NewLocationFromImage(string(ref.RealPath), ref, r.img) } }() return results } -func (r *imageSquashResolver) FilesByMIMEType(types ...string) ([]Location, error) { +func (r *imageSquashResolver) FilesByMIMEType(types ...string) ([]file.Location, error) { refs, err := r.img.FilesByMIMETypeFromSquash(types...) if err != nil { return nil, err } - var locations []Location + var locations []file.Location for _, ref := range refs { - locations = append(locations, NewLocationFromImage(string(ref.RealPath), ref, r.img)) + locations = append(locations, file.NewLocationFromImage(string(ref.RealPath), ref, r.img)) } return locations, nil } -func (r *imageSquashResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { - return fileMetadataByLocation(r.img, location) +func (r *imageSquashResolver) FileMetadataByLocation(location file.Location) (file.Metadata, error) { + return file.MetadataByLocation(r.img, location) } diff --git a/syft/source/image_squash_resolver_test.go b/syft/source/image_squash_resolver_test.go index 7f0819b9e..0ffc1fd64 100644 --- a/syft/source/image_squash_resolver_test.go +++ b/syft/source/image_squash_resolver_test.go @@ -1,6 +1,7 @@ package source import ( + "github.com/anchore/syft/syft/file" "github.com/stretchr/testify/require" "io" "testing" @@ -106,15 +107,15 @@ func TestImageSquashResolver_FilesByPath(t *testing.T) { actual := refs[0] - if string(actual.ref.RealPath) != c.resolvePath { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), c.resolvePath) + if string(actual.Ref().RealPath) != c.resolvePath { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Ref().RealPath), c.resolvePath) } - if c.resolvePath != "" && string(actual.ref.RealPath) != actual.RealPath { + if c.resolvePath != "" && string(actual.Ref().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - entry, err := img.FileCatalog.Get(actual.ref) + entry, err := img.FileCatalog.Get(actual.Ref()) if err != nil { t.Fatalf("failed to get metadata: %+v", err) } @@ -211,15 +212,15 @@ func TestImageSquashResolver_FilesByGlob(t *testing.T) { actual := refs[0] - if string(actual.ref.RealPath) != c.resolvePath { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), c.resolvePath) + if string(actual.Ref().RealPath) != c.resolvePath { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Ref().RealPath), c.resolvePath) } - if c.resolvePath != "" && string(actual.ref.RealPath) != actual.RealPath { + if c.resolvePath != "" && string(actual.Ref().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - entry, err := img.FileCatalog.Get(actual.ref) + entry, err := img.FileCatalog.Get(actual.Ref()) if err != nil { t.Fatalf("failed to get metadata: %+v", err) } @@ -347,56 +348,56 @@ func TestSquashImageResolver_FilesContents(t *testing.T) { func Test_imageSquashResolver_resolvesLinks(t *testing.T) { tests := []struct { name string - runner func(FileResolver) []Location - expected []Location + runner func(FileResolver) []file.Location + expected []file.Location }{ { name: "by mimetype", - runner: func(resolver FileResolver) []Location { + runner: func(resolver FileResolver) []file.Location { // links should not show up when searching mimetype actualLocations, err := resolver.FilesByMIMEType("text/plain") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/etc/group", }, VirtualPath: "/etc/group", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/etc/passwd", }, VirtualPath: "/etc/passwd", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/etc/shadow", }, VirtualPath: "/etc/shadow", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-1.txt", }, VirtualPath: "/file-1.txt", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-3.txt", }, VirtualPath: "/file-3.txt", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/file-2.txt", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/parent/file-4.txt", }, VirtualPath: "/parent/file-4.txt", @@ -405,27 +406,27 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) { }, { name: "by glob", - runner: func(resolver FileResolver) []Location { + runner: func(resolver FileResolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("*ink-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-3.txt", }, VirtualPath: "/link-within", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/link-2", }, { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-1.txt", }, VirtualPath: "/link-1", @@ -434,16 +435,16 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) { }, { name: "by path to degree 1 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver FileResolver) []file.Location { // links resolve to the final file actualLocations, err := resolver.FilesByPath("/link-2") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/link-2", @@ -452,16 +453,16 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) { }, { name: "by path to degree 2 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver FileResolver) []file.Location { // multiple links resolves to the final file actualLocations, err := resolver.FilesByPath("/link-indirect") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers { - Coordinates: Coordinates{ + Coordinates: file.Coordinates{ RealPath: "/file-2.txt", }, VirtualPath: "/link-indirect", diff --git a/syft/source/metadata.go b/syft/source/metadata.go index b9747362e..137ebdb80 100644 --- a/syft/source/metadata.go +++ b/syft/source/metadata.go @@ -2,7 +2,7 @@ package source // Metadata represents any static source data that helps describe "what" was cataloged. type Metadata struct { - Scheme Scheme // the source data scheme type (directory or image) + Scheme Type // the source data scheme type (directory or image) ImageMetadata ImageMetadata // all image info (image only) Path string // the root path to be cataloged (directory only) } diff --git a/syft/source/mock_resolver.go b/syft/source/mock_resolver.go index 51d7edc49..549c7d091 100644 --- a/syft/source/mock_resolver.go +++ b/syft/source/mock_resolver.go @@ -5,6 +5,7 @@ import ( "io" "os" + "github.com/anchore/syft/syft/file" "github.com/bmatcuk/doublestar/v4" ) @@ -14,28 +15,28 @@ var _ FileResolver = (*MockResolver)(nil) // It provides an implementation that can resolve local filesystem paths using only a provided discrete list of file // paths, which are typically paths to test fixtures. type MockResolver struct { - locations []Location - metadata map[Location]FileMetadata - mimeTypeIndex map[string][]Location + locations []file.Location + metadata map[file.Location]file.Metadata + mimeTypeIndex map[string][]file.Location } // NewMockResolverForPaths creates a new MockResolver, where the only resolvable // files are those specified by the supplied paths. func NewMockResolverForPaths(paths ...string) *MockResolver { - var locations []Location + var locations []file.Location for _, p := range paths { - locations = append(locations, NewLocation(p)) + locations = append(locations, file.NewLocation(p)) } return &MockResolver{ locations: locations, - metadata: make(map[Location]FileMetadata), + metadata: make(map[file.Location]file.Metadata), } } -func NewMockResolverForPathsWithMetadata(metadata map[Location]FileMetadata) *MockResolver { - var locations []Location - var mimeTypeIndex = make(map[string][]Location) +func NewMockResolverForPathsWithMetadata(metadata map[file.Location]file.Metadata) *MockResolver { + var locations []file.Location + var mimeTypeIndex = make(map[string][]file.Location) for l, m := range metadata { locations = append(locations, l) mimeTypeIndex[m.MIMEType] = append(mimeTypeIndex[m.MIMEType], l) @@ -65,7 +66,7 @@ func (r MockResolver) String() string { // FileContentsByLocation fetches file contents for a single location. If the // path does not exist, an error is returned. -func (r MockResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { +func (r MockResolver) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { for _, l := range r.locations { if l == location { return os.Open(location.RealPath) @@ -76,12 +77,12 @@ func (r MockResolver) FileContentsByLocation(location Location) (io.ReadCloser, } // FilesByPath returns all Locations that match the given paths. -func (r MockResolver) FilesByPath(paths ...string) ([]Location, error) { - var results []Location +func (r MockResolver) FilesByPath(paths ...string) ([]file.Location, error) { + var results []file.Location for _, p := range paths { for _, location := range r.locations { if p == location.RealPath { - results = append(results, NewLocation(p)) + results = append(results, file.NewLocation(p)) } } } @@ -90,8 +91,8 @@ func (r MockResolver) FilesByPath(paths ...string) ([]Location, error) { } // FilesByGlob returns all Locations that match the given path glob pattern. -func (r MockResolver) FilesByGlob(patterns ...string) ([]Location, error) { - var results []Location +func (r MockResolver) FilesByGlob(patterns ...string) ([]file.Location, error) { + var results []file.Location for _, pattern := range patterns { for _, location := range r.locations { matches, err := doublestar.Match(pattern, location.RealPath) @@ -108,7 +109,7 @@ func (r MockResolver) FilesByGlob(patterns ...string) ([]Location, error) { } // RelativeFileByPath returns a single Location for the given path. -func (r MockResolver) RelativeFileByPath(_ Location, path string) *Location { +func (r MockResolver) RelativeFileByPath(_ file.Location, path string) *file.Location { paths, err := r.FilesByPath(path) if err != nil { return nil @@ -121,8 +122,8 @@ func (r MockResolver) RelativeFileByPath(_ Location, path string) *Location { return &paths[0] } -func (r MockResolver) AllLocations() <-chan Location { - results := make(chan Location) +func (r MockResolver) AllLocations() <-chan file.Location { + results := make(chan file.Location) go func() { defer close(results) for _, l := range r.locations { @@ -132,19 +133,19 @@ func (r MockResolver) AllLocations() <-chan Location { return results } -func (r MockResolver) FileMetadataByLocation(l Location) (FileMetadata, error) { +func (r MockResolver) FileMetadataByLocation(l file.Location) (file.Metadata, error) { info, err := os.Stat(l.RealPath) if err != nil { - return FileMetadata{}, err + return file.Metadata{}, err } // other types not supported - ty := RegularFile + ty := file.RegularFile if info.IsDir() { - ty = Directory + ty = file.Directory } - return FileMetadata{ + return file.Metadata{ Mode: info.Mode(), Type: ty, UserID: 0, // not supported @@ -153,8 +154,8 @@ func (r MockResolver) FileMetadataByLocation(l Location) (FileMetadata, error) { }, nil } -func (r MockResolver) FilesByMIMEType(types ...string) ([]Location, error) { - var locations []Location +func (r MockResolver) FilesByMIMEType(types ...string) ([]file.Location, error) { + var locations []file.Location for _, ty := range types { locations = append(r.mimeTypeIndex[ty], locations...) } diff --git a/syft/source/scheme.go b/syft/source/scheme.go deleted file mode 100644 index 1c90dba03..000000000 --- a/syft/source/scheme.go +++ /dev/null @@ -1,73 +0,0 @@ -package source - -import ( - "fmt" - "strings" - - "github.com/anchore/stereoscope/pkg/image" - "github.com/mitchellh/go-homedir" - "github.com/spf13/afero" -) - -// Scheme represents the optional prefixed string at the beginning of a user request (e.g. "docker:"). -type Scheme string - -const ( - // UnknownScheme is the default scheme - UnknownScheme Scheme = "UnknownScheme" - // DirectoryScheme indicates the source being cataloged is a directory on the root filesystem - DirectoryScheme Scheme = "DirectoryScheme" - // ImageScheme indicates the source being cataloged is a container image - ImageScheme Scheme = "ImageScheme" - // FileScheme indicates the source being cataloged is a single file - FileScheme Scheme = "FileScheme" -) - -var AllSchemes = []Scheme{ - DirectoryScheme, - ImageScheme, - FileScheme, -} - -func DetectScheme(fs afero.Fs, imageDetector sourceDetector, userInput string) (Scheme, image.Source, string, error) { - switch { - case strings.HasPrefix(userInput, "dir:"): - dirLocation, err := homedir.Expand(strings.TrimPrefix(userInput, "dir:")) - if err != nil { - return UnknownScheme, image.UnknownSource, "", fmt.Errorf("unable to expand directory path: %w", err) - } - return DirectoryScheme, image.UnknownSource, dirLocation, nil - - case strings.HasPrefix(userInput, "file:"): - fileLocation, err := homedir.Expand(strings.TrimPrefix(userInput, "file:")) - if err != nil { - return UnknownScheme, image.UnknownSource, "", fmt.Errorf("unable to expand directory path: %w", err) - } - return FileScheme, image.UnknownSource, fileLocation, nil - } - - // try the most specific sources first and move out towards more generic sources. - - // first: let's try the image detector, which has more scheme parsing internal to stereoscope - source, imageSpec, err := imageDetector(userInput) - if err == nil && source != image.UnknownSource { - return ImageScheme, source, imageSpec, nil - } - - // next: let's try more generic sources (dir, file, etc.) - location, err := homedir.Expand(userInput) - if err != nil { - return UnknownScheme, image.UnknownSource, "", fmt.Errorf("unable to expand potential directory path: %w", err) - } - - fileMeta, err := fs.Stat(location) - if err != nil { - return UnknownScheme, source, "", nil - } - - if fileMeta.IsDir() { - return DirectoryScheme, source, location, nil - } - - return FileScheme, source, location, nil -} diff --git a/syft/source/source.go b/syft/source/source.go index 6e36a1779..d60cc693b 100644 --- a/syft/source/source.go +++ b/syft/source/source.go @@ -37,7 +37,7 @@ type Source struct { // It acts as a struct input for some source constructors. type Input struct { UserInput string - Scheme Scheme + Type Type ImageSource image.Source Location string Platform string @@ -48,7 +48,7 @@ type Input struct { // from specific providers including a registry. func ParseInput(userInput string, platform string, detectAvailableImageSources bool) (*Input, error) { fs := afero.NewOsFs() - scheme, source, location, err := DetectScheme(fs, image.DetectSource, userInput) + sType, source, location, err := DetectTypeFromScheme(fs, image.DetectSource, userInput) if err != nil { return nil, err } @@ -56,11 +56,11 @@ func ParseInput(userInput string, platform string, detectAvailableImageSources b if source == image.UnknownSource { // only run for these two scheme // only check on packages command, attest we automatically try to pull from userInput - switch scheme { - case ImageScheme, UnknownScheme: + switch sType { + case ImageType, UnknownType: if detectAvailableImageSources { if imagePullSource := image.DetermineDefaultImagePullSource(userInput); imagePullSource != image.UnknownSource { - scheme = ImageScheme + sType = ImageType source = imagePullSource location = userInput } @@ -72,14 +72,14 @@ func ParseInput(userInput string, platform string, detectAvailableImageSources b } } - if scheme != ImageScheme && platform != "" { + if sType != ImageType && platform != "" { return nil, fmt.Errorf("cannot specify a platform for a non-image source") } // collect user input for downstream consumption return &Input{ UserInput: userInput, - Scheme: scheme, + Type: sType, ImageSource: source, Location: location, Platform: platform, @@ -104,12 +104,12 @@ func New(in Input, registryOptions *image.RegistryOptions, exclusions []string) var source *Source cleanupFn := func() {} - switch in.Scheme { - case FileScheme: + switch in.Type { + case FileType: source, cleanupFn, err = generateFileSource(fs, in.Location) - case DirectoryScheme: + case DirectoryType: source, cleanupFn, err = generateDirectorySource(fs, in.Location) - case ImageScheme: + case ImageType: source, cleanupFn, err = generateImageSource(in, registryOptions) default: err = fmt.Errorf("unable to process input for scanning: %q", in.UserInput) @@ -241,7 +241,7 @@ func NewFromDirectory(path string) (Source, error) { return Source{ mutex: &sync.Mutex{}, Metadata: Metadata{ - Scheme: DirectoryScheme, + Scheme: DirectoryType, Path: path, }, path: path, @@ -255,7 +255,7 @@ func NewFromFile(path string) (Source, func()) { return Source{ mutex: &sync.Mutex{}, Metadata: Metadata{ - Scheme: FileScheme, + Scheme: FileType, Path: path, }, path: analysisPath, @@ -298,7 +298,7 @@ func NewFromImage(img *image.Image, userImageStr string) (Source, error) { return Source{ Image: img, Metadata: Metadata{ - Scheme: ImageScheme, + Scheme: ImageType, ImageMetadata: NewImageMetadata(img, userImageStr), }, }, nil @@ -306,7 +306,7 @@ func NewFromImage(img *image.Image, userImageStr string) (Source, error) { func (s *Source) FileResolver(scope Scope) (FileResolver, error) { switch s.Metadata.Scheme { - case DirectoryScheme, FileScheme: + case DirectoryType, FileType: s.mutex.Lock() defer s.mutex.Unlock() if s.directoryResolver == nil { @@ -321,7 +321,7 @@ func (s *Source) FileResolver(scope Scope) (FileResolver, error) { s.directoryResolver = resolver } return s.directoryResolver, nil - case ImageScheme: + case ImageType: var resolver FileResolver var err error switch scope { diff --git a/syft/source/source_test.go b/syft/source/source_test.go index 8945e36cd..12bf9378f 100644 --- a/syft/source/source_test.go +++ b/syft/source/source_test.go @@ -27,13 +27,13 @@ func TestParseInput(t *testing.T) { name string input string platform string - expected Scheme + expected Type errFn require.ErrorAssertionFunc }{ { name: "ParseInput parses a file input", input: "test-fixtures/image-simple/file-1.txt", - expected: FileScheme, + expected: FileType, }, { name: "errors out when using platform for non-image scheme", @@ -52,7 +52,7 @@ func TestParseInput(t *testing.T) { test.errFn(t, err) if test.expected != "" { require.NotNil(t, sourceInput) - assert.Equal(t, sourceInput.Scheme, test.expected) + assert.Equal(t, sourceInput.Type, test.expected) } }) } diff --git a/syft/source/type.go b/syft/source/type.go new file mode 100644 index 000000000..17a417f76 --- /dev/null +++ b/syft/source/type.go @@ -0,0 +1,73 @@ +package source + +import ( + "fmt" + "strings" + + "github.com/anchore/stereoscope/pkg/image" + "github.com/mitchellh/go-homedir" + "github.com/spf13/afero" +) + +// Type represents the optional prefixed string at the beginning of a user request (e.g. "docker:"). +type Type string + +const ( + // UnknownType is the default scheme + UnknownType Type = "UnknownType" + // DirectoryType indicates the source being cataloged is a directory on the root filesystem + DirectoryType Type = "directory" + // ImageType indicates the source being cataloged is a container image + ImageType Type = "image" + // FileType indicates the source being cataloged is a single file + FileType Type = "file" +) + +var AllTypes = []Type{ + DirectoryType, + ImageType, + FileType, +} + +func DetectTypeFromScheme(fs afero.Fs, imageDetector sourceDetector, userInput string) (Type, image.Source, string, error) { + switch { + case strings.HasPrefix(userInput, "dir:"): + dirLocation, err := homedir.Expand(strings.TrimPrefix(userInput, "dir:")) + if err != nil { + return UnknownType, image.UnknownSource, "", fmt.Errorf("unable to expand directory path: %w", err) + } + return DirectoryType, image.UnknownSource, dirLocation, nil + + case strings.HasPrefix(userInput, "file:"): + fileLocation, err := homedir.Expand(strings.TrimPrefix(userInput, "file:")) + if err != nil { + return UnknownType, image.UnknownSource, "", fmt.Errorf("unable to expand directory path: %w", err) + } + return FileType, image.UnknownSource, fileLocation, nil + } + + // try the most specific sources first and move out towards more generic sources. + + // first: let's try the image detector, which has more scheme parsing internal to stereoscope + source, imageSpec, err := imageDetector(userInput) + if err == nil && source != image.UnknownSource { + return ImageType, source, imageSpec, nil + } + + // next: let's try more generic sources (dir, file, etc.) + location, err := homedir.Expand(userInput) + if err != nil { + return UnknownType, image.UnknownSource, "", fmt.Errorf("unable to expand potential directory path: %w", err) + } + + fileMeta, err := fs.Stat(location) + if err != nil { + return UnknownType, source, "", nil + } + + if fileMeta.IsDir() { + return DirectoryType, source, location, nil + } + + return FileType, source, location, nil +} diff --git a/syft/source/scheme_test.go b/syft/source/type_test.go similarity index 89% rename from syft/source/scheme_test.go rename to syft/source/type_test.go index 4e94f602c..61719b3ae 100644 --- a/syft/source/scheme_test.go +++ b/syft/source/type_test.go @@ -23,7 +23,7 @@ func TestDetectScheme(t *testing.T) { dirs []string files []string detection detectorResult - expectedScheme Scheme + expectedScheme Type expectedLocation string }{ { @@ -33,7 +33,7 @@ func TestDetectScheme(t *testing.T) { src: image.DockerDaemonSource, ref: "wagoodman/dive:latest", }, - expectedScheme: ImageScheme, + expectedScheme: ImageType, expectedLocation: "wagoodman/dive:latest", }, { @@ -43,7 +43,7 @@ func TestDetectScheme(t *testing.T) { src: image.DockerDaemonSource, ref: "wagoodman/dive", }, - expectedScheme: ImageScheme, + expectedScheme: ImageType, expectedLocation: "wagoodman/dive", }, { @@ -53,7 +53,7 @@ func TestDetectScheme(t *testing.T) { src: image.OciRegistrySource, ref: "wagoodman/dive:latest", }, - expectedScheme: ImageScheme, + expectedScheme: ImageType, expectedLocation: "wagoodman/dive:latest", }, { @@ -63,7 +63,7 @@ func TestDetectScheme(t *testing.T) { src: image.DockerDaemonSource, ref: "wagoodman/dive:latest", }, - expectedScheme: ImageScheme, + expectedScheme: ImageType, expectedLocation: "wagoodman/dive:latest", }, { @@ -73,7 +73,7 @@ func TestDetectScheme(t *testing.T) { src: image.DockerDaemonSource, ref: "wagoodman/dive", }, - expectedScheme: ImageScheme, + expectedScheme: ImageType, expectedLocation: "wagoodman/dive", }, { @@ -83,7 +83,7 @@ func TestDetectScheme(t *testing.T) { src: image.DockerDaemonSource, ref: "latest", }, - expectedScheme: ImageScheme, + expectedScheme: ImageType, // we expected to be able to handle this case better, however, I don't see a way to do this // the user will need to provide more explicit input (docker:docker:latest) expectedLocation: "latest", @@ -95,7 +95,7 @@ func TestDetectScheme(t *testing.T) { src: image.DockerDaemonSource, ref: "docker:latest", }, - expectedScheme: ImageScheme, + expectedScheme: ImageType, // we expected to be able to handle this case better, however, I don't see a way to do this // the user will need to provide more explicit input (docker:docker:latest) expectedLocation: "docker:latest", @@ -107,7 +107,7 @@ func TestDetectScheme(t *testing.T) { src: image.OciTarballSource, ref: "some/path-to-file", }, - expectedScheme: ImageScheme, + expectedScheme: ImageType, expectedLocation: "some/path-to-file", }, { @@ -118,7 +118,7 @@ func TestDetectScheme(t *testing.T) { ref: "some/path-to-dir", }, dirs: []string{"some/path-to-dir"}, - expectedScheme: ImageScheme, + expectedScheme: ImageType, expectedLocation: "some/path-to-dir", }, { @@ -129,7 +129,7 @@ func TestDetectScheme(t *testing.T) { ref: "", }, dirs: []string{"some/path-to-dir"}, - expectedScheme: DirectoryScheme, + expectedScheme: DirectoryType, expectedLocation: "some/path-to-dir", }, { @@ -139,7 +139,7 @@ func TestDetectScheme(t *testing.T) { src: image.DockerDaemonSource, ref: "some/path-to-dir", }, - expectedScheme: ImageScheme, + expectedScheme: ImageType, expectedLocation: "some/path-to-dir", }, { @@ -149,7 +149,7 @@ func TestDetectScheme(t *testing.T) { src: image.PodmanDaemonSource, ref: "something:latest", }, - expectedScheme: ImageScheme, + expectedScheme: ImageType, expectedLocation: "something:latest", }, { @@ -160,7 +160,7 @@ func TestDetectScheme(t *testing.T) { ref: "", }, dirs: []string{"some/path-to-dir"}, - expectedScheme: DirectoryScheme, + expectedScheme: DirectoryType, expectedLocation: "some/path-to-dir", }, { @@ -171,7 +171,7 @@ func TestDetectScheme(t *testing.T) { ref: "", }, files: []string{"some/path-to-file"}, - expectedScheme: FileScheme, + expectedScheme: FileType, expectedLocation: "some/path-to-file", }, { @@ -182,7 +182,7 @@ func TestDetectScheme(t *testing.T) { ref: "", }, files: []string{"some/path-to-file"}, - expectedScheme: FileScheme, + expectedScheme: FileType, expectedLocation: "some/path-to-file", }, { @@ -192,7 +192,7 @@ func TestDetectScheme(t *testing.T) { src: image.UnknownSource, ref: "", }, - expectedScheme: DirectoryScheme, + expectedScheme: DirectoryType, expectedLocation: ".", }, { @@ -202,7 +202,7 @@ func TestDetectScheme(t *testing.T) { src: image.UnknownSource, ref: "", }, - expectedScheme: DirectoryScheme, + expectedScheme: DirectoryType, expectedLocation: ".", }, // we should support tilde expansion @@ -213,7 +213,7 @@ func TestDetectScheme(t *testing.T) { src: image.OciDirectorySource, ref: "~/some-path", }, - expectedScheme: ImageScheme, + expectedScheme: ImageType, expectedLocation: "~/some-path", }, { @@ -224,26 +224,26 @@ func TestDetectScheme(t *testing.T) { ref: "", }, dirs: []string{"~/some-path"}, - expectedScheme: DirectoryScheme, + expectedScheme: DirectoryType, expectedLocation: "~/some-path", }, { name: "tilde-expansion-dir-explicit-exists", userInput: "dir:~/some-path", dirs: []string{"~/some-path"}, - expectedScheme: DirectoryScheme, + expectedScheme: DirectoryType, expectedLocation: "~/some-path", }, { name: "tilde-expansion-dir-explicit-dne", userInput: "dir:~/some-path", - expectedScheme: DirectoryScheme, + expectedScheme: DirectoryType, expectedLocation: "~/some-path", }, { name: "tilde-expansion-dir-implicit-dne", userInput: "~/some-path", - expectedScheme: UnknownScheme, + expectedScheme: UnknownType, expectedLocation: "", }, } @@ -287,7 +287,7 @@ func TestDetectScheme(t *testing.T) { } } - actualScheme, actualSource, actualLocation, err := DetectScheme(fs, imageDetector, test.userInput) + actualScheme, actualSource, actualLocation, err := DetectTypeFromScheme(fs, imageDetector, test.userInput) if err != nil { t.Fatalf("unexpected err : %+v", err) } diff --git a/syft/tasks.go b/syft/tasks.go index 20fac37bc..1d5c7f23c 100644 --- a/syft/tasks.go +++ b/syft/tasks.go @@ -2,6 +2,7 @@ package syft import ( "fmt" + "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file/cataloger/fileclassifier" "github.com/anchore/syft/syft/file/cataloger/filecontents" @@ -62,7 +63,6 @@ func generateFileMetadataCatalogingTask(config CatalogingConfig) (task, error) { results.FileMetadata = result return nil, nil }, nil - } func generateFileDigestsCatalogingTask(config CatalogingConfig) (task, error) { @@ -88,7 +88,6 @@ func generateFileDigestsCatalogingTask(config CatalogingConfig) (task, error) { results.FileDigests = result return nil, nil }, nil - } func generateContentsCatalogingTask(config CatalogingConfig) (task, error) { @@ -139,7 +138,6 @@ func generateSecretsCatalogingTask(config CatalogingConfig) (task, error) { results.Secrets = result return nil, nil }, nil - } func generateFileClassifierTask(config CatalogingConfig) (task, error) {