mirror of
https://github.com/anchore/syft.git
synced 2025-11-18 08:53:15 +01:00
migrate source.FileResolver to the file package
Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
parent
a3dc0fa97d
commit
9f60d32369
@ -16,7 +16,7 @@ func NewCataloger(classifiers []Classifier) (*Cataloger, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates][]file.Classification, error) {
|
func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates][]file.Classification, error) {
|
||||||
results := make(map[file.Coordinates][]file.Classification)
|
results := make(map[file.Coordinates][]file.Classification)
|
||||||
|
|
||||||
numResults := 0
|
numResults := 0
|
||||||
|
|||||||
@ -10,7 +10,6 @@ import (
|
|||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Classifier struct {
|
type Classifier struct {
|
||||||
@ -70,7 +69,7 @@ func DefaultClassifiers() []Classifier {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Classifier) Classify(resolver source.FileResolver, location file.Location) (*file.Classification, error) {
|
func (c Classifier) Classify(resolver file.Resolver, location file.Location) (*file.Classification, error) {
|
||||||
doesFilepathMatch, filepathNamedGroupValues := filepathMatches(c.FilepathPatterns, location)
|
doesFilepathMatch, filepathNamedGroupValues := filepathMatches(c.FilepathPatterns, location)
|
||||||
if !doesFilepathMatch {
|
if !doesFilepathMatch {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
|||||||
@ -9,7 +9,6 @@ import (
|
|||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type CatalogerConfig struct {
|
type CatalogerConfig struct {
|
||||||
@ -34,7 +33,7 @@ func NewCataloger(config CatalogerConfig) (*Cataloger, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates]string, error) {
|
func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]string, error) {
|
||||||
results := make(map[file.Coordinates]string)
|
results := make(map[file.Coordinates]string)
|
||||||
var locations []file.Location
|
var locations []file.Location
|
||||||
|
|
||||||
@ -67,7 +66,7 @@ func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates]
|
|||||||
return results, nil
|
return results, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Cataloger) catalogLocation(resolver source.FileResolver, location file.Location) (string, error) {
|
func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) (string, error) {
|
||||||
contentReader, err := resolver.FileContentsByLocation(location)
|
contentReader, err := resolver.FileContentsByLocation(location)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
|
|||||||
@ -4,7 +4,6 @@ import (
|
|||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -73,7 +72,7 @@ func TestContentsCataloger(t *testing.T) {
|
|||||||
})
|
})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
resolver := source.NewMockResolverForPaths(test.files...)
|
resolver := file.NewMockResolverForPaths(test.files...)
|
||||||
actual, err := c.Catalog(resolver)
|
actual, err := c.Catalog(resolver)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, test.expected, actual, "mismatched contents")
|
assert.Equal(t, test.expected, actual, "mismatched contents")
|
||||||
|
|||||||
@ -29,7 +29,7 @@ func NewCataloger(hashes []crypto.Hash) (*Cataloger, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates][]file.Digest, error) {
|
func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates][]file.Digest, error) {
|
||||||
results := make(map[file.Coordinates][]file.Digest)
|
results := make(map[file.Coordinates][]file.Digest)
|
||||||
locations := source.AllRegularFiles(resolver)
|
locations := source.AllRegularFiles(resolver)
|
||||||
stage, prog := digestsCatalogingProgress(int64(len(locations)))
|
stage, prog := digestsCatalogingProgress(int64(len(locations)))
|
||||||
@ -57,7 +57,7 @@ func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates]
|
|||||||
return results, nil
|
return results, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Cataloger) catalogLocation(resolver source.FileResolver, location file.Location) ([]file.Digest, error) {
|
func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) ([]file.Digest, error) {
|
||||||
meta, err := resolver.FileMetadataByLocation(location)
|
meta, err := resolver.FileMetadataByLocation(location)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|||||||
@ -5,7 +5,6 @@ import (
|
|||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/event"
|
"github.com/anchore/syft/syft/event"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
"github.com/wagoodman/go-partybus"
|
"github.com/wagoodman/go-partybus"
|
||||||
"github.com/wagoodman/go-progress"
|
"github.com/wagoodman/go-progress"
|
||||||
)
|
)
|
||||||
@ -17,7 +16,7 @@ func NewCataloger() *Cataloger {
|
|||||||
return &Cataloger{}
|
return &Cataloger{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates]file.Metadata, error) {
|
func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]file.Metadata, error) {
|
||||||
results := make(map[file.Coordinates]file.Metadata)
|
results := make(map[file.Coordinates]file.Metadata)
|
||||||
var locations []file.Location
|
var locations []file.Location
|
||||||
for location := range resolver.AllLocations() {
|
for location := range resolver.AllLocations() {
|
||||||
|
|||||||
@ -56,7 +56,7 @@ func NewCataloger(config CatalogerConfig) (*Cataloger, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates][]file.SearchResult, error) {
|
func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates][]file.SearchResult, error) {
|
||||||
results := make(map[file.Coordinates][]file.SearchResult)
|
results := make(map[file.Coordinates][]file.SearchResult)
|
||||||
locations := source.AllRegularFiles(resolver)
|
locations := source.AllRegularFiles(resolver)
|
||||||
stage, prog, secretsDiscovered := newSecretsCatalogerMonitor(int64(len(locations)))
|
stage, prog, secretsDiscovered := newSecretsCatalogerMonitor(int64(len(locations)))
|
||||||
@ -82,7 +82,7 @@ func (i *Cataloger) Catalog(resolver source.FileResolver) (map[file.Coordinates]
|
|||||||
return results, nil
|
return results, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Cataloger) catalogLocation(resolver source.FileResolver, location file.Location) ([]file.SearchResult, error) {
|
func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) ([]file.SearchResult, error) {
|
||||||
metadata, err := resolver.FileMetadataByLocation(location)
|
metadata, err := resolver.FileMetadataByLocation(location)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -120,7 +120,7 @@ func (i *Cataloger) catalogLocation(resolver source.FileResolver, location file.
|
|||||||
return secrets, nil
|
return secrets, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func extractValue(resolver source.FileResolver, location file.Location, start, length int64) (string, error) {
|
func extractValue(resolver file.Resolver, location file.Location, start, length int64) (string, error) {
|
||||||
readCloser, err := resolver.FileContentsByLocation(location)
|
readCloser, err := resolver.FileContentsByLocation(location)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
|
return "", fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
|
||||||
|
|||||||
@ -5,7 +5,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -188,7 +187,7 @@ func TestSecretsCataloger(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
resolver := source.NewMockResolverForPaths(test.fixture)
|
resolver := file.NewMockResolverForPaths(test.fixture)
|
||||||
|
|
||||||
actualResults, err := c.Catalog(resolver)
|
actualResults, err := c.Catalog(resolver)
|
||||||
if err != nil && !test.catalogErr {
|
if err != nil && !test.catalogErr {
|
||||||
@ -430,7 +429,7 @@ j4f668YfhUbKdRF6S6734856
|
|||||||
t.Fatalf("could not create cataloger: %+v", err)
|
t.Fatalf("could not create cataloger: %+v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
resolver := source.NewMockResolverForPaths(test.fixture)
|
resolver := file.NewMockResolverForPaths(test.fixture)
|
||||||
|
|
||||||
actualResults, err := c.Catalog(resolver)
|
actualResults, err := c.Catalog(resolver)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@ -11,11 +11,9 @@ import (
|
|||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func catalogLocationByLine(resolver source.FileResolver, location file.Location, patterns map[string]*regexp.Regexp) ([]file.SearchResult, error) {
|
func catalogLocationByLine(resolver file.Resolver, location file.Location, patterns map[string]*regexp.Regexp) ([]file.SearchResult, error) {
|
||||||
readCloser, err := resolver.FileContentsByLocation(location)
|
readCloser, err := resolver.FileContentsByLocation(location)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
|
return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
|
||||||
@ -47,7 +45,7 @@ func catalogLocationByLine(resolver source.FileResolver, location file.Location,
|
|||||||
return allSecrets, nil
|
return allSecrets, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func searchForSecretsWithinLine(resolver source.FileResolver, location file.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]file.SearchResult, error) {
|
func searchForSecretsWithinLine(resolver file.Resolver, location file.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]file.SearchResult, error) {
|
||||||
var secrets []file.SearchResult
|
var secrets []file.SearchResult
|
||||||
for name, pattern := range patterns {
|
for name, pattern := range patterns {
|
||||||
matches := pattern.FindAllIndex(line, -1)
|
matches := pattern.FindAllIndex(line, -1)
|
||||||
@ -76,7 +74,7 @@ func searchForSecretsWithinLine(resolver source.FileResolver, location file.Loca
|
|||||||
return secrets, nil
|
return secrets, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func readerAtPosition(resolver source.FileResolver, location file.Location, seekPosition int64) (io.ReadCloser, error) {
|
func readerAtPosition(resolver file.Resolver, location file.Location, seekPosition int64) (io.ReadCloser, error) {
|
||||||
readCloser, err := resolver.FileContentsByLocation(location)
|
readCloser, err := resolver.FileContentsByLocation(location)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
|
return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
|
||||||
|
|||||||
@ -1,10 +1,8 @@
|
|||||||
package source
|
package file
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/file"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type excludeFn func(string) bool
|
type excludeFn func(string) bool
|
||||||
@ -12,29 +10,29 @@ type excludeFn func(string) bool
|
|||||||
// excludingResolver decorates a resolver with an exclusion function that is used to
|
// excludingResolver decorates a resolver with an exclusion function that is used to
|
||||||
// filter out entries in the delegate resolver
|
// filter out entries in the delegate resolver
|
||||||
type excludingResolver struct {
|
type excludingResolver struct {
|
||||||
delegate FileResolver
|
delegate Resolver
|
||||||
excludeFn excludeFn
|
excludeFn excludeFn
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewExcludingResolver create a new resolver which wraps the provided delegate and excludes
|
// NewExcludingResolver create a new resolver which wraps the provided delegate and excludes
|
||||||
// entries based on a provided path exclusion function
|
// entries based on a provided path exclusion function
|
||||||
func NewExcludingResolver(delegate FileResolver, excludeFn excludeFn) FileResolver {
|
func NewExcludingResolver(delegate Resolver, excludeFn excludeFn) Resolver {
|
||||||
return &excludingResolver{
|
return &excludingResolver{
|
||||||
delegate,
|
delegate,
|
||||||
excludeFn,
|
excludeFn,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *excludingResolver) FileContentsByLocation(location file.Location) (io.ReadCloser, error) {
|
func (r *excludingResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) {
|
||||||
if locationMatches(&location, r.excludeFn) {
|
if locationMatches(&location, r.excludeFn) {
|
||||||
return nil, fmt.Errorf("no such location: %+v", location.RealPath)
|
return nil, fmt.Errorf("no such location: %+v", location.RealPath)
|
||||||
}
|
}
|
||||||
return r.delegate.FileContentsByLocation(location)
|
return r.delegate.FileContentsByLocation(location)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *excludingResolver) FileMetadataByLocation(location file.Location) (file.Metadata, error) {
|
func (r *excludingResolver) FileMetadataByLocation(location Location) (Metadata, error) {
|
||||||
if locationMatches(&location, r.excludeFn) {
|
if locationMatches(&location, r.excludeFn) {
|
||||||
return file.Metadata{}, fmt.Errorf("no such location: %+v", location.RealPath)
|
return Metadata{}, fmt.Errorf("no such location: %+v", location.RealPath)
|
||||||
}
|
}
|
||||||
return r.delegate.FileMetadataByLocation(location)
|
return r.delegate.FileMetadataByLocation(location)
|
||||||
}
|
}
|
||||||
@ -46,22 +44,22 @@ func (r *excludingResolver) HasPath(path string) bool {
|
|||||||
return r.delegate.HasPath(path)
|
return r.delegate.HasPath(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *excludingResolver) FilesByPath(paths ...string) ([]file.Location, error) {
|
func (r *excludingResolver) FilesByPath(paths ...string) ([]Location, error) {
|
||||||
locations, err := r.delegate.FilesByPath(paths...)
|
locations, err := r.delegate.FilesByPath(paths...)
|
||||||
return filterLocations(locations, err, r.excludeFn)
|
return filterLocations(locations, err, r.excludeFn)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *excludingResolver) FilesByGlob(patterns ...string) ([]file.Location, error) {
|
func (r *excludingResolver) FilesByGlob(patterns ...string) ([]Location, error) {
|
||||||
locations, err := r.delegate.FilesByGlob(patterns...)
|
locations, err := r.delegate.FilesByGlob(patterns...)
|
||||||
return filterLocations(locations, err, r.excludeFn)
|
return filterLocations(locations, err, r.excludeFn)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *excludingResolver) FilesByMIMEType(types ...string) ([]file.Location, error) {
|
func (r *excludingResolver) FilesByMIMEType(types ...string) ([]Location, error) {
|
||||||
locations, err := r.delegate.FilesByMIMEType(types...)
|
locations, err := r.delegate.FilesByMIMEType(types...)
|
||||||
return filterLocations(locations, err, r.excludeFn)
|
return filterLocations(locations, err, r.excludeFn)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *excludingResolver) RelativeFileByPath(location file.Location, path string) *file.Location {
|
func (r *excludingResolver) RelativeFileByPath(location Location, path string) *Location {
|
||||||
l := r.delegate.RelativeFileByPath(location, path)
|
l := r.delegate.RelativeFileByPath(location, path)
|
||||||
if l != nil && locationMatches(l, r.excludeFn) {
|
if l != nil && locationMatches(l, r.excludeFn) {
|
||||||
return nil
|
return nil
|
||||||
@ -69,8 +67,8 @@ func (r *excludingResolver) RelativeFileByPath(location file.Location, path stri
|
|||||||
return l
|
return l
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *excludingResolver) AllLocations() <-chan file.Location {
|
func (r *excludingResolver) AllLocations() <-chan Location {
|
||||||
c := make(chan file.Location)
|
c := make(chan Location)
|
||||||
go func() {
|
go func() {
|
||||||
defer close(c)
|
defer close(c)
|
||||||
for location := range r.delegate.AllLocations() {
|
for location := range r.delegate.AllLocations() {
|
||||||
@ -82,11 +80,11 @@ func (r *excludingResolver) AllLocations() <-chan file.Location {
|
|||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
func locationMatches(location *file.Location, exclusionFn excludeFn) bool {
|
func locationMatches(location *Location, exclusionFn excludeFn) bool {
|
||||||
return exclusionFn(location.RealPath) || exclusionFn(location.AccessPath)
|
return exclusionFn(location.RealPath) || exclusionFn(location.AccessPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func filterLocations(locations []file.Location, err error, exclusionFn excludeFn) ([]file.Location, error) {
|
func filterLocations(locations []Location, err error, exclusionFn excludeFn) ([]Location, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -1,11 +1,10 @@
|
|||||||
package source
|
package file
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/file"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -66,7 +65,7 @@ func TestExcludingResolver(t *testing.T) {
|
|||||||
locations, _ = excludingResolver.FilesByMIMEType()
|
locations, _ = excludingResolver.FilesByMIMEType()
|
||||||
assert.ElementsMatch(t, locationPaths(locations), test.expected)
|
assert.ElementsMatch(t, locationPaths(locations), test.expected)
|
||||||
|
|
||||||
locations = []file.Location{}
|
locations = []Location{}
|
||||||
|
|
||||||
channel := excludingResolver.AllLocations()
|
channel := excludingResolver.AllLocations()
|
||||||
for location := range channel {
|
for location := range channel {
|
||||||
@ -118,9 +117,9 @@ func difference(a, b []string) []string {
|
|||||||
return diff
|
return diff
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeLocation(path string) file.Location {
|
func makeLocation(path string) Location {
|
||||||
return file.Location{
|
return Location{
|
||||||
Coordinates: file.Coordinates{
|
Coordinates: Coordinates{
|
||||||
RealPath: path,
|
RealPath: path,
|
||||||
FileSystemID: "",
|
FileSystemID: "",
|
||||||
},
|
},
|
||||||
@ -128,7 +127,7 @@ func makeLocation(path string) file.Location {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func locationPaths(locations []file.Location) []string {
|
func locationPaths(locations []Location) []string {
|
||||||
paths := []string{}
|
paths := []string{}
|
||||||
for _, l := range locations {
|
for _, l := range locations {
|
||||||
paths = append(paths, l.RealPath)
|
paths = append(paths, l.RealPath)
|
||||||
@ -140,20 +139,20 @@ type mockResolver struct {
|
|||||||
locations []string
|
locations []string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mockResolver) getLocations() ([]file.Location, error) {
|
func (r *mockResolver) getLocations() ([]Location, error) {
|
||||||
out := []file.Location{}
|
out := []Location{}
|
||||||
for _, path := range r.locations {
|
for _, path := range r.locations {
|
||||||
out = append(out, makeLocation(path))
|
out = append(out, makeLocation(path))
|
||||||
}
|
}
|
||||||
return out, nil
|
return out, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mockResolver) FileContentsByLocation(_ file.Location) (io.ReadCloser, error) {
|
func (r *mockResolver) FileContentsByLocation(_ Location) (io.ReadCloser, error) {
|
||||||
return io.NopCloser(strings.NewReader("Hello, world!")), nil
|
return io.NopCloser(strings.NewReader("Hello, world!")), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mockResolver) FileMetadataByLocation(_ file.Location) (file.Metadata, error) {
|
func (r *mockResolver) FileMetadataByLocation(_ Location) (Metadata, error) {
|
||||||
return file.Metadata{
|
return Metadata{
|
||||||
LinkDestination: "MOCK",
|
LinkDestination: "MOCK",
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
@ -162,28 +161,28 @@ func (r *mockResolver) HasPath(_ string) bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mockResolver) FilesByPath(_ ...string) ([]file.Location, error) {
|
func (r *mockResolver) FilesByPath(_ ...string) ([]Location, error) {
|
||||||
return r.getLocations()
|
return r.getLocations()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mockResolver) FilesByGlob(_ ...string) ([]file.Location, error) {
|
func (r *mockResolver) FilesByGlob(_ ...string) ([]Location, error) {
|
||||||
return r.getLocations()
|
return r.getLocations()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mockResolver) FilesByMIMEType(_ ...string) ([]file.Location, error) {
|
func (r *mockResolver) FilesByMIMEType(_ ...string) ([]Location, error) {
|
||||||
return r.getLocations()
|
return r.getLocations()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mockResolver) RelativeFileByPath(_ file.Location, path string) *file.Location {
|
func (r *mockResolver) RelativeFileByPath(_ Location, path string) *Location {
|
||||||
return &file.Location{
|
return &Location{
|
||||||
Coordinates: file.Coordinates{
|
Coordinates: Coordinates{
|
||||||
RealPath: path,
|
RealPath: path,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mockResolver) AllLocations() <-chan file.Location {
|
func (r *mockResolver) AllLocations() <-chan Location {
|
||||||
c := make(chan file.Location)
|
c := make(chan Location)
|
||||||
go func() {
|
go func() {
|
||||||
defer close(c)
|
defer close(c)
|
||||||
locations, _ := r.getLocations()
|
locations, _ := r.getLocations()
|
||||||
@ -1,13 +0,0 @@
|
|||||||
//go:build windows
|
|
||||||
// +build windows
|
|
||||||
|
|
||||||
package file
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
// GetXid is a placeholder for windows file information
|
|
||||||
func GetXid(info os.FileInfo) (uid, gid int) {
|
|
||||||
return -1, -1
|
|
||||||
}
|
|
||||||
@ -79,15 +79,15 @@ func NewLocationFromDirectory(responsePath string, ref file.Reference) Location
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewVirtualLocationFromDirectory creates a new Location representing the given path (extracted from the ref) relative to the given directory with a separate virtual access path.
|
// NewVirtualLocationFromDirectory creates a new Location representing the given path (extracted from the ref) relative to the given directory with a separate virtual access path.
|
||||||
func NewVirtualLocationFromDirectory(responsePath, virtualResponsePath string, ref file.Reference) Location {
|
func NewVirtualLocationFromDirectory(responsePath, accessResponsePath string, ref file.Reference) Location {
|
||||||
if responsePath == virtualResponsePath {
|
if responsePath == accessResponsePath {
|
||||||
return NewLocationFromDirectory(responsePath, ref)
|
return NewLocationFromDirectory(responsePath, ref)
|
||||||
}
|
}
|
||||||
return Location{
|
return Location{
|
||||||
Coordinates: Coordinates{
|
Coordinates: Coordinates{
|
||||||
RealPath: responsePath,
|
RealPath: responsePath,
|
||||||
},
|
},
|
||||||
AccessPath: virtualResponsePath,
|
AccessPath: accessResponsePath,
|
||||||
ref: ref,
|
ref: ref,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,12 +1,6 @@
|
|||||||
package file
|
package file
|
||||||
|
|
||||||
import (
|
import "os"
|
||||||
"os"
|
|
||||||
|
|
||||||
"github.com/anchore/stereoscope/pkg/file"
|
|
||||||
"github.com/anchore/stereoscope/pkg/image"
|
|
||||||
"github.com/anchore/syft/internal/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Metadata struct {
|
type Metadata struct {
|
||||||
Mode os.FileMode
|
Mode os.FileMode
|
||||||
@ -17,51 +11,3 @@ type Metadata struct {
|
|||||||
Size int64
|
Size int64
|
||||||
MIMEType string
|
MIMEType string
|
||||||
}
|
}
|
||||||
|
|
||||||
func MetadataByLocation(img *image.Image, location Location) (Metadata, error) {
|
|
||||||
entry, err := img.FileCatalog.Get(location.ref)
|
|
||||||
if err != nil {
|
|
||||||
return Metadata{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return Metadata{
|
|
||||||
Mode: entry.Metadata.Mode,
|
|
||||||
Type: NewFileTypeFromTarHeaderTypeFlag(entry.Metadata.TypeFlag),
|
|
||||||
UserID: entry.Metadata.UserID,
|
|
||||||
GroupID: entry.Metadata.GroupID,
|
|
||||||
LinkDestination: entry.Metadata.Linkname,
|
|
||||||
Size: entry.Metadata.Size,
|
|
||||||
MIMEType: entry.Metadata.MIMEType,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func MetadataFromPath(path string, info os.FileInfo, withMIMEType bool) Metadata {
|
|
||||||
var mimeType string
|
|
||||||
uid, gid := GetXid(info)
|
|
||||||
|
|
||||||
if withMIMEType {
|
|
||||||
f, err := os.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
// TODO: it may be that the file is inaccessible, however, this is not an error or a warning. In the future we need to track these as known-unknowns
|
|
||||||
f = nil
|
|
||||||
} else {
|
|
||||||
defer func() {
|
|
||||||
if err := f.Close(); err != nil {
|
|
||||||
log.Warnf("unable to close file while obtaining metadata: %s", path)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
mimeType = file.MIMEType(f)
|
|
||||||
}
|
|
||||||
|
|
||||||
return Metadata{
|
|
||||||
Mode: info.Mode(),
|
|
||||||
Type: NewFileTypeFromMode(info.Mode()),
|
|
||||||
// unsupported across platforms
|
|
||||||
UserID: uid,
|
|
||||||
GroupID: gid,
|
|
||||||
Size: info.Size(),
|
|
||||||
MIMEType: mimeType,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@ -1,42 +1,41 @@
|
|||||||
package source
|
package file
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/file"
|
|
||||||
"github.com/bmatcuk/doublestar/v4"
|
"github.com/bmatcuk/doublestar/v4"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ FileResolver = (*MockResolver)(nil)
|
var _ Resolver = (*MockResolver)(nil)
|
||||||
|
|
||||||
// MockResolver implements the FileResolver interface and is intended for use *only in test code*.
|
// MockResolver implements the Resolver interface and is intended for use *only in test code*.
|
||||||
// It provides an implementation that can resolve local filesystem paths using only a provided discrete list of file
|
// It provides an implementation that can resolve local filesystem paths using only a provided discrete list of file
|
||||||
// paths, which are typically paths to test fixtures.
|
// paths, which are typically paths to test fixtures.
|
||||||
type MockResolver struct {
|
type MockResolver struct {
|
||||||
locations []file.Location
|
locations []Location
|
||||||
metadata map[file.Location]file.Metadata
|
metadata map[Location]Metadata
|
||||||
mimeTypeIndex map[string][]file.Location
|
mimeTypeIndex map[string][]Location
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewMockResolverForPaths creates a new MockResolver, where the only resolvable
|
// NewMockResolverForPaths creates a new MockResolver, where the only resolvable
|
||||||
// files are those specified by the supplied paths.
|
// files are those specified by the supplied paths.
|
||||||
func NewMockResolverForPaths(paths ...string) *MockResolver {
|
func NewMockResolverForPaths(paths ...string) *MockResolver {
|
||||||
var locations []file.Location
|
var locations []Location
|
||||||
for _, p := range paths {
|
for _, p := range paths {
|
||||||
locations = append(locations, file.NewLocation(p))
|
locations = append(locations, NewLocation(p))
|
||||||
}
|
}
|
||||||
|
|
||||||
return &MockResolver{
|
return &MockResolver{
|
||||||
locations: locations,
|
locations: locations,
|
||||||
metadata: make(map[file.Location]file.Metadata),
|
metadata: make(map[Location]Metadata),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewMockResolverForPathsWithMetadata(metadata map[file.Location]file.Metadata) *MockResolver {
|
func NewMockResolverForPathsWithMetadata(metadata map[Location]Metadata) *MockResolver {
|
||||||
var locations []file.Location
|
var locations []Location
|
||||||
var mimeTypeIndex = make(map[string][]file.Location)
|
var mimeTypeIndex = make(map[string][]Location)
|
||||||
for l, m := range metadata {
|
for l, m := range metadata {
|
||||||
locations = append(locations, l)
|
locations = append(locations, l)
|
||||||
mimeTypeIndex[m.MIMEType] = append(mimeTypeIndex[m.MIMEType], l)
|
mimeTypeIndex[m.MIMEType] = append(mimeTypeIndex[m.MIMEType], l)
|
||||||
@ -66,7 +65,7 @@ func (r MockResolver) String() string {
|
|||||||
|
|
||||||
// FileContentsByLocation fetches file contents for a single location. If the
|
// FileContentsByLocation fetches file contents for a single location. If the
|
||||||
// path does not exist, an error is returned.
|
// path does not exist, an error is returned.
|
||||||
func (r MockResolver) FileContentsByLocation(location file.Location) (io.ReadCloser, error) {
|
func (r MockResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) {
|
||||||
for _, l := range r.locations {
|
for _, l := range r.locations {
|
||||||
if l == location {
|
if l == location {
|
||||||
return os.Open(location.RealPath)
|
return os.Open(location.RealPath)
|
||||||
@ -77,12 +76,12 @@ func (r MockResolver) FileContentsByLocation(location file.Location) (io.ReadClo
|
|||||||
}
|
}
|
||||||
|
|
||||||
// FilesByPath returns all Locations that match the given paths.
|
// FilesByPath returns all Locations that match the given paths.
|
||||||
func (r MockResolver) FilesByPath(paths ...string) ([]file.Location, error) {
|
func (r MockResolver) FilesByPath(paths ...string) ([]Location, error) {
|
||||||
var results []file.Location
|
var results []Location
|
||||||
for _, p := range paths {
|
for _, p := range paths {
|
||||||
for _, location := range r.locations {
|
for _, location := range r.locations {
|
||||||
if p == location.RealPath {
|
if p == location.RealPath {
|
||||||
results = append(results, file.NewLocation(p))
|
results = append(results, NewLocation(p))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -91,8 +90,8 @@ func (r MockResolver) FilesByPath(paths ...string) ([]file.Location, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// FilesByGlob returns all Locations that match the given path glob pattern.
|
// FilesByGlob returns all Locations that match the given path glob pattern.
|
||||||
func (r MockResolver) FilesByGlob(patterns ...string) ([]file.Location, error) {
|
func (r MockResolver) FilesByGlob(patterns ...string) ([]Location, error) {
|
||||||
var results []file.Location
|
var results []Location
|
||||||
for _, pattern := range patterns {
|
for _, pattern := range patterns {
|
||||||
for _, location := range r.locations {
|
for _, location := range r.locations {
|
||||||
matches, err := doublestar.Match(pattern, location.RealPath)
|
matches, err := doublestar.Match(pattern, location.RealPath)
|
||||||
@ -109,7 +108,7 @@ func (r MockResolver) FilesByGlob(patterns ...string) ([]file.Location, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// RelativeFileByPath returns a single Location for the given path.
|
// RelativeFileByPath returns a single Location for the given path.
|
||||||
func (r MockResolver) RelativeFileByPath(_ file.Location, path string) *file.Location {
|
func (r MockResolver) RelativeFileByPath(_ Location, path string) *Location {
|
||||||
paths, err := r.FilesByPath(path)
|
paths, err := r.FilesByPath(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
@ -122,8 +121,8 @@ func (r MockResolver) RelativeFileByPath(_ file.Location, path string) *file.Loc
|
|||||||
return &paths[0]
|
return &paths[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r MockResolver) AllLocations() <-chan file.Location {
|
func (r MockResolver) AllLocations() <-chan Location {
|
||||||
results := make(chan file.Location)
|
results := make(chan Location)
|
||||||
go func() {
|
go func() {
|
||||||
defer close(results)
|
defer close(results)
|
||||||
for _, l := range r.locations {
|
for _, l := range r.locations {
|
||||||
@ -133,19 +132,19 @@ func (r MockResolver) AllLocations() <-chan file.Location {
|
|||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r MockResolver) FileMetadataByLocation(l file.Location) (file.Metadata, error) {
|
func (r MockResolver) FileMetadataByLocation(l Location) (Metadata, error) {
|
||||||
info, err := os.Stat(l.RealPath)
|
info, err := os.Stat(l.RealPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return file.Metadata{}, err
|
return Metadata{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// other types not supported
|
// other types not supported
|
||||||
ty := file.RegularFile
|
ty := RegularFile
|
||||||
if info.IsDir() {
|
if info.IsDir() {
|
||||||
ty = file.Directory
|
ty = Directory
|
||||||
}
|
}
|
||||||
|
|
||||||
return file.Metadata{
|
return Metadata{
|
||||||
Mode: info.Mode(),
|
Mode: info.Mode(),
|
||||||
Type: ty,
|
Type: ty,
|
||||||
UserID: 0, // not supported
|
UserID: 0, // not supported
|
||||||
@ -154,8 +153,8 @@ func (r MockResolver) FileMetadataByLocation(l file.Location) (file.Metadata, er
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r MockResolver) FilesByMIMEType(types ...string) ([]file.Location, error) {
|
func (r MockResolver) FilesByMIMEType(types ...string) ([]Location, error) {
|
||||||
var locations []file.Location
|
var locations []Location
|
||||||
for _, ty := range types {
|
for _, ty := range types {
|
||||||
locations = append(r.mimeTypeIndex[ty], locations...)
|
locations = append(r.mimeTypeIndex[ty], locations...)
|
||||||
}
|
}
|
||||||
41
syft/file/resolver.go
Normal file
41
syft/file/resolver.go
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
package file
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Resolver is an interface that encompasses how to get specific file references and file contents for a generic data source.
|
||||||
|
type Resolver interface {
|
||||||
|
ContentResolver
|
||||||
|
PathResolver
|
||||||
|
LocationResolver
|
||||||
|
MetadataResolver
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContentResolver knows how to get file content for a given Location
|
||||||
|
type ContentResolver interface {
|
||||||
|
FileContentsByLocation(Location) (io.ReadCloser, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type MetadataResolver interface {
|
||||||
|
FileMetadataByLocation(Location) (Metadata, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PathResolver knows how to get a Location for given string paths and globs
|
||||||
|
type PathResolver interface {
|
||||||
|
// HasPath indicates if the given path exists in the underlying source.
|
||||||
|
HasPath(string) bool
|
||||||
|
// FilesByPath fetches a set of file references which have the given path (for an image, there may be multiple matches)
|
||||||
|
FilesByPath(paths ...string) ([]Location, error)
|
||||||
|
// FilesByGlob fetches a set of file references which the given glob matches
|
||||||
|
FilesByGlob(patterns ...string) ([]Location, error)
|
||||||
|
// FilesByMIMEType fetches a set of file references which the contents have been classified as one of the given MIME Types
|
||||||
|
FilesByMIMEType(types ...string) ([]Location, error)
|
||||||
|
// RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference.
|
||||||
|
// This is helpful when attempting to find a file that is in the same layer or lower as another file.
|
||||||
|
RelativeFileByPath(_ Location, path string) *Location
|
||||||
|
}
|
||||||
|
|
||||||
|
type LocationResolver interface {
|
||||||
|
AllLocations() <-chan Location
|
||||||
|
}
|
||||||
@ -2,6 +2,7 @@ package linux
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
@ -9,7 +10,6 @@ import (
|
|||||||
"github.com/acobaugh/osrelease"
|
"github.com/acobaugh/osrelease"
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
"github.com/google/go-cmp/cmp"
|
"github.com/google/go-cmp/cmp"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -53,7 +53,7 @@ var identityFiles = []parseEntry{
|
|||||||
}
|
}
|
||||||
|
|
||||||
// IdentifyRelease parses distro-specific files to discover and raise linux distribution release details.
|
// IdentifyRelease parses distro-specific files to discover and raise linux distribution release details.
|
||||||
func IdentifyRelease(resolver source.FileResolver) *Release {
|
func IdentifyRelease(resolver file.Resolver) *Release {
|
||||||
for _, entry := range identityFiles {
|
for _, entry := range identityFiles {
|
||||||
locations, err := resolver.FilesByPath(entry.path)
|
locations, err := resolver.FilesByPath(entry.path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@ -2,7 +2,7 @@ package pkg
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/file"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cataloger describes behavior for an object to participate in parsing container image or file system
|
// Cataloger describes behavior for an object to participate in parsing container image or file system
|
||||||
@ -12,5 +12,5 @@ type Cataloger interface {
|
|||||||
// Name returns a string that uniquely describes a cataloger
|
// Name returns a string that uniquely describes a cataloger
|
||||||
Name() string
|
Name() string
|
||||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
|
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
|
||||||
Catalog(resolver source.FileResolver) ([]Package, []artifact.Relationship, error)
|
Catalog(resolver file.Resolver) ([]Package, []artifact.Relationship, error)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,7 +17,6 @@ import (
|
|||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -39,7 +38,7 @@ func (c *Cataloger) Name() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing dpkg support files.
|
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing dpkg support files.
|
||||||
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
func (c *Cataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
dbFileMatches, err := resolver.FilesByGlob(pkg.DpkgDBGlob)
|
dbFileMatches, err := resolver.FilesByGlob(pkg.DpkgDBGlob)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("failed to find dpkg status files's by glob: %w", err)
|
return nil, nil, fmt.Errorf("failed to find dpkg status files's by glob: %w", err)
|
||||||
@ -80,7 +79,7 @@ func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []arti
|
|||||||
return allPackages, nil, nil
|
return allPackages, nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func addLicenses(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) {
|
func addLicenses(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) {
|
||||||
// get license information from the copyright file
|
// get license information from the copyright file
|
||||||
copyrightReader, copyrightLocation := fetchCopyrightContents(resolver, dbLocation, p)
|
copyrightReader, copyrightLocation := fetchCopyrightContents(resolver, dbLocation, p)
|
||||||
|
|
||||||
@ -94,7 +93,7 @@ func addLicenses(resolver source.FileResolver, dbLocation file.Location, p *pkg.
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func mergeFileListing(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) {
|
func mergeFileListing(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) {
|
||||||
metadata := p.Metadata.(pkg.DpkgMetadata)
|
metadata := p.Metadata.(pkg.DpkgMetadata)
|
||||||
|
|
||||||
// get file listing (package files + additional config files)
|
// get file listing (package files + additional config files)
|
||||||
@ -122,7 +121,7 @@ loopNewFiles:
|
|||||||
p.Locations = append(p.Locations, infoLocations...)
|
p.Locations = append(p.Locations, infoLocations...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func getAdditionalFileListing(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) ([]pkg.DpkgFileRecord, []file.Location) {
|
func getAdditionalFileListing(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) ([]pkg.DpkgFileRecord, []file.Location) {
|
||||||
// ensure the default value for a collection is never nil since this may be shown as JSON
|
// ensure the default value for a collection is never nil since this may be shown as JSON
|
||||||
var files = make([]pkg.DpkgFileRecord, 0)
|
var files = make([]pkg.DpkgFileRecord, 0)
|
||||||
var locations []file.Location
|
var locations []file.Location
|
||||||
@ -152,7 +151,7 @@ func getAdditionalFileListing(resolver source.FileResolver, dbLocation file.Loca
|
|||||||
return files, locations
|
return files, locations
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchMd5Contents(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) (io.ReadCloser, *file.Location) {
|
func fetchMd5Contents(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) (io.ReadCloser, *file.Location) {
|
||||||
var md5Reader io.ReadCloser
|
var md5Reader io.ReadCloser
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
@ -179,7 +178,7 @@ func fetchMd5Contents(resolver source.FileResolver, dbLocation file.Location, p
|
|||||||
return md5Reader, location
|
return md5Reader, location
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchConffileContents(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) (io.ReadCloser, *file.Location) {
|
func fetchConffileContents(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) (io.ReadCloser, *file.Location) {
|
||||||
var reader io.ReadCloser
|
var reader io.ReadCloser
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
@ -206,7 +205,7 @@ func fetchConffileContents(resolver source.FileResolver, dbLocation file.Locatio
|
|||||||
return reader, location
|
return reader, location
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchCopyrightContents(resolver source.FileResolver, dbLocation file.Location, p *pkg.Package) (io.ReadCloser, *file.Location) {
|
func fetchCopyrightContents(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) (io.ReadCloser, *file.Location) {
|
||||||
// look for /usr/share/docs/NAME/copyright files
|
// look for /usr/share/docs/NAME/copyright files
|
||||||
name := p.Name
|
name := p.Name
|
||||||
copyrightPath := path.Join(docsPath, name, "copyright")
|
copyrightPath := path.Join(docsPath, name, "copyright")
|
||||||
|
|||||||
@ -13,7 +13,6 @@ import (
|
|||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cataloger implements the Catalog interface and is responsible for dispatching the proper parser function for
|
// Cataloger implements the Catalog interface and is responsible for dispatching the proper parser function for
|
||||||
@ -39,7 +38,7 @@ func (c *Cataloger) Name() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
|
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
|
||||||
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
func (c *Cataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
var packages []pkg.Package
|
var packages []pkg.Package
|
||||||
var relationships []artifact.Relationship
|
var relationships []artifact.Relationship
|
||||||
|
|
||||||
@ -72,7 +71,7 @@ func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []arti
|
|||||||
}
|
}
|
||||||
|
|
||||||
// SelectFiles takes a set of file trees and resolves and file references of interest for future cataloging
|
// SelectFiles takes a set of file trees and resolves and file references of interest for future cataloging
|
||||||
func (c *Cataloger) selectFiles(resolver source.FilePathResolver) map[file.Location]Parser {
|
func (c *Cataloger) selectFiles(resolver file.PathResolver) map[file.Location]Parser {
|
||||||
var parserByLocation = make(map[file.Location]Parser)
|
var parserByLocation = make(map[file.Location]Parser)
|
||||||
|
|
||||||
// select by exact path
|
// select by exact path
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package generic
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"testing"
|
"testing"
|
||||||
@ -10,7 +11,6 @@ import (
|
|||||||
|
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func parser(_ string, reader io.Reader) ([]*pkg.Package, []artifact.Relationship, error) {
|
func parser(_ string, reader io.Reader) ([]*pkg.Package, []artifact.Relationship, error) {
|
||||||
@ -37,7 +37,7 @@ func TestGenericCataloger(t *testing.T) {
|
|||||||
upstream := "some-other-cataloger"
|
upstream := "some-other-cataloger"
|
||||||
|
|
||||||
expectedSelection := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"}
|
expectedSelection := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"}
|
||||||
resolver := source.NewMockResolverForPaths(expectedSelection...)
|
resolver := file.NewMockResolverForPaths(expectedSelection...)
|
||||||
cataloger := NewCataloger(pathParsers, globParsers, upstream)
|
cataloger := NewCataloger(pathParsers, globParsers, upstream)
|
||||||
|
|
||||||
expectedPkgs := make(map[string]pkg.Package)
|
expectedPkgs := make(map[string]pkg.Package)
|
||||||
|
|||||||
@ -5,13 +5,13 @@ package golang
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const catalogerName = "go-module-binary-cataloger"
|
const catalogerName = "go-module-binary-cataloger"
|
||||||
@ -29,7 +29,7 @@ func (c *Cataloger) Name() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing rpm db installation.
|
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing rpm db installation.
|
||||||
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
func (c *Cataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
var pkgs []pkg.Package
|
var pkgs []pkg.Package
|
||||||
|
|
||||||
fileMatches, err := resolver.FilesByMIMEType(internal.ExecutableMIMETypeSet.List()...)
|
fileMatches, err := resolver.FilesByMIMEType(internal.ExecutableMIMETypeSet.List()...)
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package packages
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/bus"
|
"github.com/anchore/syft/internal/bus"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
@ -11,7 +12,6 @@ import (
|
|||||||
"github.com/anchore/syft/syft/event/monitor"
|
"github.com/anchore/syft/syft/event/monitor"
|
||||||
"github.com/anchore/syft/syft/linux"
|
"github.com/anchore/syft/syft/linux"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
"github.com/hashicorp/go-multierror"
|
"github.com/hashicorp/go-multierror"
|
||||||
"github.com/wagoodman/go-partybus"
|
"github.com/wagoodman/go-partybus"
|
||||||
"github.com/wagoodman/go-progress"
|
"github.com/wagoodman/go-progress"
|
||||||
@ -21,7 +21,7 @@ import (
|
|||||||
// In order to efficiently retrieve contents from an underlying container image the content fetch requests are
|
// In order to efficiently retrieve contents from an underlying container image the content fetch requests are
|
||||||
// done in bulk. Specifically, all files of interest are collected from each cataloger and accumulated into a single
|
// done in bulk. Specifically, all files of interest are collected from each cataloger and accumulated into a single
|
||||||
// request.
|
// request.
|
||||||
func Catalog(resolver source.FileResolver, release *linux.Release, catalogers ...pkg.Cataloger) (*pkg.Catalog, []artifact.Relationship, error) {
|
func Catalog(resolver file.Resolver, release *linux.Release, catalogers ...pkg.Cataloger) (*pkg.Catalog, []artifact.Relationship, error) {
|
||||||
catalog := pkg.NewCatalog()
|
catalog := pkg.NewCatalog()
|
||||||
var allRelationships []artifact.Relationship
|
var allRelationships []artifact.Relationship
|
||||||
|
|
||||||
@ -77,7 +77,7 @@ func Catalog(resolver source.FileResolver, release *linux.Release, catalogers ..
|
|||||||
return catalog, allRelationships, nil
|
return catalog, allRelationships, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func packageFileOwnershipRelationships(p pkg.Package, resolver source.FilePathResolver) ([]artifact.Relationship, error) {
|
func packageFileOwnershipRelationships(p pkg.Package, resolver file.PathResolver) ([]artifact.Relationship, error) {
|
||||||
fileOwner, ok := p.Metadata.(pkg.FileOwner)
|
fileOwner, ok := p.Metadata.(pkg.FileOwner)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
|||||||
@ -13,8 +13,6 @@ import (
|
|||||||
|
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -36,7 +34,7 @@ func (c *PackageCataloger) Name() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing python egg and wheel installations.
|
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing python egg and wheel installations.
|
||||||
func (c *PackageCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
func (c *PackageCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
var fileMatches []file.Location
|
var fileMatches []file.Location
|
||||||
|
|
||||||
for _, glob := range []string{eggMetadataGlob, wheelMetadataGlob, eggFileMetadataGlob} {
|
for _, glob := range []string{eggMetadataGlob, wheelMetadataGlob, eggFileMetadataGlob} {
|
||||||
@ -61,7 +59,7 @@ func (c *PackageCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// catalogEggOrWheel takes the primary metadata file reference and returns the python package it represents.
|
// catalogEggOrWheel takes the primary metadata file reference and returns the python package it represents.
|
||||||
func (c *PackageCataloger) catalogEggOrWheel(resolver source.FileResolver, metadataLocation file.Location) (*pkg.Package, error) {
|
func (c *PackageCataloger) catalogEggOrWheel(resolver file.Resolver, metadataLocation file.Location) (*pkg.Package, error) {
|
||||||
metadata, sources, err := c.assembleEggOrWheelMetadata(resolver, metadataLocation)
|
metadata, sources, err := c.assembleEggOrWheelMetadata(resolver, metadataLocation)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -96,7 +94,7 @@ func (c *PackageCataloger) catalogEggOrWheel(resolver source.FileResolver, metad
|
|||||||
}
|
}
|
||||||
|
|
||||||
// fetchRecordFiles finds a corresponding RECORD file for the given python package metadata file and returns the set of file records contained.
|
// fetchRecordFiles finds a corresponding RECORD file for the given python package metadata file and returns the set of file records contained.
|
||||||
func (c *PackageCataloger) fetchRecordFiles(resolver source.FileResolver, metadataLocation file.Location) (files []pkg.PythonFileRecord, sources []file.Location, err error) {
|
func (c *PackageCataloger) fetchRecordFiles(resolver file.Resolver, metadataLocation file.Location) (files []pkg.PythonFileRecord, sources []file.Location, err error) {
|
||||||
// we've been given a file reference to a specific wheel METADATA file. note: this may be for a directory
|
// we've been given a file reference to a specific wheel METADATA file. note: this may be for a directory
|
||||||
// or for an image... for an image the METADATA file may be present within multiple layers, so it is important
|
// or for an image... for an image the METADATA file may be present within multiple layers, so it is important
|
||||||
// to reconcile the RECORD path to the same layer (or the next adjacent lower layer).
|
// to reconcile the RECORD path to the same layer (or the next adjacent lower layer).
|
||||||
@ -126,7 +124,7 @@ func (c *PackageCataloger) fetchRecordFiles(resolver source.FileResolver, metada
|
|||||||
}
|
}
|
||||||
|
|
||||||
// fetchTopLevelPackages finds a corresponding top_level.txt file for the given python package metadata file and returns the set of package names contained.
|
// fetchTopLevelPackages finds a corresponding top_level.txt file for the given python package metadata file and returns the set of package names contained.
|
||||||
func (c *PackageCataloger) fetchTopLevelPackages(resolver source.FileResolver, metadataLocation file.Location) (pkgs []string, sources []file.Location, err error) {
|
func (c *PackageCataloger) fetchTopLevelPackages(resolver file.Resolver, metadataLocation file.Location) (pkgs []string, sources []file.Location, err error) {
|
||||||
// a top_level.txt file specifies the python top-level packages (provided by this python package) installed into site-packages
|
// a top_level.txt file specifies the python top-level packages (provided by this python package) installed into site-packages
|
||||||
parentDir := filepath.Dir(metadataLocation.RealPath)
|
parentDir := filepath.Dir(metadataLocation.RealPath)
|
||||||
topLevelPath := filepath.Join(parentDir, "top_level.txt")
|
topLevelPath := filepath.Join(parentDir, "top_level.txt")
|
||||||
@ -156,7 +154,7 @@ func (c *PackageCataloger) fetchTopLevelPackages(resolver source.FileResolver, m
|
|||||||
return pkgs, sources, nil
|
return pkgs, sources, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *PackageCataloger) fetchDirectURLData(resolver source.FileResolver, metadataLocation file.Location) (d *pkg.PythonDirectURLOriginInfo, sources []file.Location, err error) {
|
func (c *PackageCataloger) fetchDirectURLData(resolver file.Resolver, metadataLocation file.Location) (d *pkg.PythonDirectURLOriginInfo, sources []file.Location, err error) {
|
||||||
parentDir := filepath.Dir(metadataLocation.RealPath)
|
parentDir := filepath.Dir(metadataLocation.RealPath)
|
||||||
directURLPath := filepath.Join(parentDir, "direct_url.json")
|
directURLPath := filepath.Join(parentDir, "direct_url.json")
|
||||||
directURLLocation := resolver.RelativeFileByPath(metadataLocation, directURLPath)
|
directURLLocation := resolver.RelativeFileByPath(metadataLocation, directURLPath)
|
||||||
@ -191,7 +189,7 @@ func (c *PackageCataloger) fetchDirectURLData(resolver source.FileResolver, meta
|
|||||||
}
|
}
|
||||||
|
|
||||||
// assembleEggOrWheelMetadata discovers and accumulates python package metadata from multiple file sources and returns a single metadata object as well as a list of files where the metadata was derived from.
|
// assembleEggOrWheelMetadata discovers and accumulates python package metadata from multiple file sources and returns a single metadata object as well as a list of files where the metadata was derived from.
|
||||||
func (c *PackageCataloger) assembleEggOrWheelMetadata(resolver source.FileResolver, metadataLocation file.Location) (*pkg.PythonPackageMetadata, []file.Location, error) {
|
func (c *PackageCataloger) assembleEggOrWheelMetadata(resolver file.Resolver, metadataLocation file.Location) (*pkg.PythonPackageMetadata, []file.Location, error) {
|
||||||
var sources = []file.Location{metadataLocation}
|
var sources = []file.Location{metadataLocation}
|
||||||
|
|
||||||
metadataContents, err := resolver.FileContentsByLocation(metadataLocation)
|
metadataContents, err := resolver.FileContentsByLocation(metadataLocation)
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
package python
|
package python
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
"github.com/go-test/deep"
|
"github.com/go-test/deep"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -137,7 +137,7 @@ func TestPythonPackageWheelCataloger(t *testing.T) {
|
|||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
t.Run(test.name, func(t *testing.T) {
|
t.Run(test.name, func(t *testing.T) {
|
||||||
resolver := source.NewMockResolverForPaths(test.fixtures...)
|
resolver := file.NewMockResolverForPaths(test.fixtures...)
|
||||||
|
|
||||||
locations, err := resolver.FilesByPath(test.fixtures...)
|
locations, err := resolver.FilesByPath(test.fixtures...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -173,7 +173,7 @@ func TestIgnorePackage(t *testing.T) {
|
|||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
t.Run(test.MetadataFixture, func(t *testing.T) {
|
t.Run(test.MetadataFixture, func(t *testing.T) {
|
||||||
resolver := source.NewMockResolverForPaths(test.MetadataFixture)
|
resolver := file.NewMockResolverForPaths(test.MetadataFixture)
|
||||||
|
|
||||||
actual, _, err := NewPythonPackageCataloger().Catalog(resolver)
|
actual, _, err := NewPythonPackageCataloger().Catalog(resolver)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@ -5,12 +5,12 @@ package rpmdb
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const catalogerName = "rpmdb-cataloger"
|
const catalogerName = "rpmdb-cataloger"
|
||||||
@ -28,7 +28,7 @@ func (c *Cataloger) Name() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing rpm db installation.
|
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing rpm db installation.
|
||||||
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
func (c *Cataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
fileMatches, err := resolver.FilesByGlob(pkg.RpmDBGlob)
|
fileMatches, err := resolver.FilesByGlob(pkg.RpmDBGlob)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("failed to find rpmdb's by glob: %w", err)
|
return nil, nil, fmt.Errorf("failed to find rpmdb's by glob: %w", err)
|
||||||
|
|||||||
@ -12,11 +12,10 @@ import (
|
|||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// parseApkDb parses an "Packages" RPM DB and returns the Packages listed within it.
|
// parseApkDb parses an "Packages" RPM DB and returns the Packages listed within it.
|
||||||
func parseRpmDB(resolver source.FilePathResolver, dbLocation file.Location, reader io.Reader) ([]pkg.Package, error) {
|
func parseRpmDB(resolver file.PathResolver, dbLocation file.Location, reader io.Reader) ([]pkg.Package, error) {
|
||||||
f, err := ioutil.TempFile("", internal.ApplicationName+"-rpmdb")
|
f, err := ioutil.TempFile("", internal.ApplicationName+"-rpmdb")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to create temp rpmdb file: %w", err)
|
return nil, fmt.Errorf("failed to create temp rpmdb file: %w", err)
|
||||||
@ -91,7 +90,7 @@ func toELVersion(metadata pkg.RpmdbMetadata) string {
|
|||||||
return fmt.Sprintf("%s-%s", metadata.Version, metadata.Release)
|
return fmt.Sprintf("%s-%s", metadata.Version, metadata.Release)
|
||||||
}
|
}
|
||||||
|
|
||||||
func extractRpmdbFileRecords(resolver source.FilePathResolver, entry *rpmdb.PackageInfo) []pkg.RpmdbFileRecord {
|
func extractRpmdbFileRecords(resolver file.PathResolver, entry *rpmdb.PackageInfo) []pkg.RpmdbFileRecord {
|
||||||
var records = make([]pkg.RpmdbFileRecord, 0)
|
var records = make([]pkg.RpmdbFileRecord, 0)
|
||||||
|
|
||||||
for _, record := range entry.Files {
|
for _, record := range entry.Files {
|
||||||
|
|||||||
@ -12,7 +12,7 @@ import (
|
|||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ FileResolver = (*allLayersResolver)(nil)
|
var _ file.Resolver = (*allLayersResolver)(nil)
|
||||||
|
|
||||||
// allLayersResolver implements path and content access for the AllLayers source option for container image data sources.
|
// allLayersResolver implements path and content access for the AllLayers source option for container image data sources.
|
||||||
type allLayersResolver struct {
|
type allLayersResolver struct {
|
||||||
@ -237,5 +237,5 @@ func (r *allLayersResolver) AllLocations() <-chan file.Location {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *allLayersResolver) FileMetadataByLocation(location file.Location) (file.Metadata, error) {
|
func (r *allLayersResolver) FileMetadataByLocation(location file.Location) (file.Metadata, error) {
|
||||||
return file.MetadataByLocation(r.img, location)
|
return fileMetadataByImageLocation(r.img, location)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -365,12 +365,12 @@ func TestAllLayersImageResolver_FilesContents(t *testing.T) {
|
|||||||
func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
|
func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
runner func(FileResolver) []file.Location
|
runner func(file.Resolver) []file.Location
|
||||||
expected []file.Location
|
expected []file.Location
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "by mimetype",
|
name: "by mimetype",
|
||||||
runner: func(resolver FileResolver) []file.Location {
|
runner: func(resolver file.Resolver) []file.Location {
|
||||||
// links should not show up when searching mimetype
|
// links should not show up when searching mimetype
|
||||||
actualLocations, err := resolver.FilesByMIMEType("text/plain")
|
actualLocations, err := resolver.FilesByMIMEType("text/plain")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -439,7 +439,7 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "by glob",
|
name: "by glob",
|
||||||
runner: func(resolver FileResolver) []file.Location {
|
runner: func(resolver file.Resolver) []file.Location {
|
||||||
// links are searched, but resolve to the real files
|
// links are searched, but resolve to the real files
|
||||||
actualLocations, err := resolver.FilesByGlob("*ink-*")
|
actualLocations, err := resolver.FilesByGlob("*ink-*")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -476,7 +476,7 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "by path to degree 1 link",
|
name: "by path to degree 1 link",
|
||||||
runner: func(resolver FileResolver) []file.Location {
|
runner: func(resolver file.Resolver) []file.Location {
|
||||||
// links resolve to the final file
|
// links resolve to the final file
|
||||||
actualLocations, err := resolver.FilesByPath("/link-2")
|
actualLocations, err := resolver.FilesByPath("/link-2")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -500,7 +500,7 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "by path to degree 2 link",
|
name: "by path to degree 2 link",
|
||||||
runner: func(resolver FileResolver) []file.Location {
|
runner: func(resolver file.Resolver) []file.Location {
|
||||||
// multiple links resolves to the final file
|
// multiple links resolves to the final file
|
||||||
actualLocations, err := resolver.FilesByPath("/link-indirect")
|
actualLocations, err := resolver.FilesByPath("/link-indirect")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|||||||
@ -5,7 +5,7 @@ import (
|
|||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
)
|
)
|
||||||
|
|
||||||
func AllRegularFiles(resolver FileResolver) (locations []file.Location) {
|
func AllRegularFiles(resolver file.Resolver) (locations []file.Location) {
|
||||||
for location := range resolver.AllLocations() {
|
for location := range resolver.AllLocations() {
|
||||||
resolvedLocations, err := resolver.FilesByPath(location.RealPath)
|
resolvedLocations, err := resolver.FilesByPath(location.RealPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package source
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/anchore/stereoscope/pkg/imagetest"
|
"github.com/anchore/stereoscope/pkg/imagetest"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/scylladb/go-set/strset"
|
"github.com/scylladb/go-set/strset"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
@ -15,13 +16,13 @@ func Test_allRegularFiles(t *testing.T) {
|
|||||||
}
|
}
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
setup func() FileResolver
|
setup func() file.Resolver
|
||||||
wantRealPaths *strset.Set
|
wantRealPaths *strset.Set
|
||||||
wantVirtualPaths *strset.Set
|
wantVirtualPaths *strset.Set
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "image",
|
name: "image",
|
||||||
setup: func() FileResolver {
|
setup: func() file.Resolver {
|
||||||
img := imagetest.GetFixtureImage(t, "docker-archive", "image-file-type-mix")
|
img := imagetest.GetFixtureImage(t, "docker-archive", "image-file-type-mix")
|
||||||
|
|
||||||
s, err := NewFromImage(img, "---")
|
s, err := NewFromImage(img, "---")
|
||||||
@ -37,7 +38,7 @@ func Test_allRegularFiles(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "directory",
|
name: "directory",
|
||||||
setup: func() FileResolver {
|
setup: func() file.Resolver {
|
||||||
s, err := NewFromDirectory("test-fixtures/symlinked-root/nested/link-root")
|
s, err := NewFromDirectory("test-fixtures/symlinked-root/nested/link-root")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
r, err := s.FileResolver(SquashedScope)
|
r, err := s.FileResolver(SquashedScope)
|
||||||
|
|||||||
@ -30,7 +30,7 @@ var unixSystemRuntimePrefixes = []string{
|
|||||||
"/sys",
|
"/sys",
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ FileResolver = (*directoryResolver)(nil)
|
var _ file.Resolver = (*directoryResolver)(nil)
|
||||||
|
|
||||||
type pathFilterFn func(string, os.FileInfo) bool
|
type pathFilterFn func(string, os.FileInfo) bool
|
||||||
|
|
||||||
@ -217,7 +217,7 @@ func (r directoryResolver) addDirectoryToIndex(p string, info os.FileInfo) error
|
|||||||
}
|
}
|
||||||
|
|
||||||
location := file.NewLocationFromDirectory(p, *ref)
|
location := file.NewLocationFromDirectory(p, *ref)
|
||||||
metadata := file.MetadataFromPath(p, info, r.isInIndex(location))
|
metadata := fileMetadataFromPath(p, info, r.isInIndex(location))
|
||||||
r.addFileMetadataToIndex(ref, metadata)
|
r.addFileMetadataToIndex(ref, metadata)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -230,7 +230,7 @@ func (r directoryResolver) addFileToIndex(p string, info os.FileInfo) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
location := file.NewLocationFromDirectory(p, *ref)
|
location := file.NewLocationFromDirectory(p, *ref)
|
||||||
metadata := file.MetadataFromPath(p, info, r.isInIndex(location))
|
metadata := fileMetadataFromPath(p, info, r.isInIndex(location))
|
||||||
r.addFileMetadataToIndex(ref, metadata)
|
r.addFileMetadataToIndex(ref, metadata)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -262,7 +262,7 @@ func (r directoryResolver) addSymlinkToIndex(p string, info os.FileInfo) (string
|
|||||||
|
|
||||||
location := file.NewLocationFromDirectory(p, *ref)
|
location := file.NewLocationFromDirectory(p, *ref)
|
||||||
location.AccessPath = p
|
location.AccessPath = p
|
||||||
metadata := file.MetadataFromPath(p, usedInfo, r.isInIndex(location))
|
metadata := fileMetadataFromPath(p, usedInfo, r.isInIndex(location))
|
||||||
metadata.LinkDestination = linkTarget
|
metadata.LinkDestination = linkTarget
|
||||||
r.addFileMetadataToIndex(ref, metadata)
|
r.addFileMetadataToIndex(ref, metadata)
|
||||||
|
|
||||||
|
|||||||
58
syft/source/file_metadata_helpers.go
Normal file
58
syft/source/file_metadata_helpers.go
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
package source
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
|
||||||
|
stereoscopeFile "github.com/anchore/stereoscope/pkg/file"
|
||||||
|
"github.com/anchore/stereoscope/pkg/image"
|
||||||
|
"github.com/anchore/syft/internal/log"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
)
|
||||||
|
|
||||||
|
func fileMetadataByImageLocation(img *image.Image, location file.Location) (file.Metadata, error) {
|
||||||
|
entry, err := img.FileCatalog.Get(location.Ref())
|
||||||
|
if err != nil {
|
||||||
|
return file.Metadata{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return file.Metadata{
|
||||||
|
Mode: entry.Metadata.Mode,
|
||||||
|
Type: file.NewFileTypeFromTarHeaderTypeFlag(entry.Metadata.TypeFlag),
|
||||||
|
UserID: entry.Metadata.UserID,
|
||||||
|
GroupID: entry.Metadata.GroupID,
|
||||||
|
LinkDestination: entry.Metadata.Linkname,
|
||||||
|
Size: entry.Metadata.Size,
|
||||||
|
MIMEType: entry.Metadata.MIMEType,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileMetadataFromPath(path string, info os.FileInfo, withMIMEType bool) file.Metadata {
|
||||||
|
var mimeType string
|
||||||
|
uid, gid := getFileXid(info)
|
||||||
|
|
||||||
|
if withMIMEType {
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
// TODO: it may be that the file is inaccessible, however, this is not an error or a warning. In the future we need to track these as known-unknowns
|
||||||
|
f = nil
|
||||||
|
} else {
|
||||||
|
defer func() {
|
||||||
|
if err := f.Close(); err != nil {
|
||||||
|
log.Warnf("unable to close file while obtaining metadata: %s", path)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
mimeType = stereoscopeFile.MIMEType(f)
|
||||||
|
}
|
||||||
|
|
||||||
|
return file.Metadata{
|
||||||
|
Mode: info.Mode(),
|
||||||
|
Type: file.NewFileTypeFromMode(info.Mode()),
|
||||||
|
// unsupported across platforms
|
||||||
|
UserID: uid,
|
||||||
|
GroupID: gid,
|
||||||
|
Size: info.Size(),
|
||||||
|
MIMEType: mimeType,
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,7 +1,7 @@
|
|||||||
//go:build !windows
|
//go:build !windows
|
||||||
// +build !windows
|
// +build !windows
|
||||||
|
|
||||||
package file
|
package source
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
@ -49,7 +49,7 @@ func Test_fileMetadataFromPath(t *testing.T) {
|
|||||||
info, err := os.Lstat(test.path)
|
info, err := os.Lstat(test.path)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
actual := MetadataFromPath(test.path, info, test.withMIMEType)
|
actual := fileMetadataFromPath(test.path, info, test.withMIMEType)
|
||||||
assert.Equal(t, test.expectedMIMEType, actual.MIMEType)
|
assert.Equal(t, test.expectedMIMEType, actual.MIMEType)
|
||||||
assert.Equal(t, test.expectedType, string(actual.Type))
|
assert.Equal(t, test.expectedType, string(actual.Type))
|
||||||
})
|
})
|
||||||
@ -1,43 +0,0 @@
|
|||||||
package source
|
|
||||||
|
|
||||||
import (
|
|
||||||
"io"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/file"
|
|
||||||
)
|
|
||||||
|
|
||||||
// FileResolver is an interface that encompasses how to get specific file references and file contents for a generic data source.
|
|
||||||
type FileResolver interface {
|
|
||||||
FileContentResolver
|
|
||||||
FilePathResolver
|
|
||||||
FileLocationResolver
|
|
||||||
FileMetadataResolver
|
|
||||||
}
|
|
||||||
|
|
||||||
// FileContentResolver knows how to get file content for a given Location
|
|
||||||
type FileContentResolver interface {
|
|
||||||
FileContentsByLocation(file.Location) (io.ReadCloser, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type FileMetadataResolver interface {
|
|
||||||
FileMetadataByLocation(file.Location) (file.Metadata, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// FilePathResolver knows how to get a Location for given string paths and globs
|
|
||||||
type FilePathResolver interface {
|
|
||||||
// HasPath indicates if the given path exists in the underlying source.
|
|
||||||
HasPath(string) bool
|
|
||||||
// FilesByPath fetches a set of file references which have the given path (for an image, there may be multiple matches)
|
|
||||||
FilesByPath(paths ...string) ([]file.Location, error)
|
|
||||||
// FilesByGlob fetches a set of file references which the given glob matches
|
|
||||||
FilesByGlob(patterns ...string) ([]file.Location, error)
|
|
||||||
// FilesByMIMEType fetches a set of file references which the contents have been classified as one of the given MIME Types
|
|
||||||
FilesByMIMEType(types ...string) ([]file.Location, error)
|
|
||||||
// RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference.
|
|
||||||
// This is helpful when attempting to find a file that is in the same layer or lower as another file.
|
|
||||||
RelativeFileByPath(_ file.Location, path string) *file.Location
|
|
||||||
}
|
|
||||||
|
|
||||||
type FileLocationResolver interface {
|
|
||||||
AllLocations() <-chan file.Location
|
|
||||||
}
|
|
||||||
@ -1,15 +1,15 @@
|
|||||||
//go:build linux || darwin
|
//go:build linux || darwin
|
||||||
// +build linux darwin
|
// +build linux darwin
|
||||||
|
|
||||||
package file
|
package source
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
"syscall"
|
"syscall"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GetXid is the UID GID system info for unix
|
// getFileXid is the UID GID system info for unix
|
||||||
func GetXid(info os.FileInfo) (uid, gid int) {
|
func getFileXid(info os.FileInfo) (uid, gid int) {
|
||||||
uid = -1
|
uid = -1
|
||||||
gid = -1
|
gid = -1
|
||||||
if stat, ok := info.Sys().(*syscall.Stat_t); ok {
|
if stat, ok := info.Sys().(*syscall.Stat_t); ok {
|
||||||
13
syft/source/get_file_xid_win.go
Normal file
13
syft/source/get_file_xid_win.go
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
//go:build windows
|
||||||
|
// +build windows
|
||||||
|
|
||||||
|
package source
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// getFileXid is a placeholder for windows file information
|
||||||
|
func getFileXid(info os.FileInfo) (uid, gid int) {
|
||||||
|
return -1, -1
|
||||||
|
}
|
||||||
@ -11,7 +11,7 @@ import (
|
|||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ FileResolver = (*imageSquashResolver)(nil)
|
var _ file.Resolver = (*imageSquashResolver)(nil)
|
||||||
|
|
||||||
// imageSquashResolver implements path and content access for the Squashed source option for container image data sources.
|
// imageSquashResolver implements path and content access for the Squashed source option for container image data sources.
|
||||||
type imageSquashResolver struct {
|
type imageSquashResolver struct {
|
||||||
@ -191,5 +191,5 @@ func (r *imageSquashResolver) FilesByMIMEType(types ...string) ([]file.Location,
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *imageSquashResolver) FileMetadataByLocation(location file.Location) (file.Metadata, error) {
|
func (r *imageSquashResolver) FileMetadataByLocation(location file.Location) (file.Metadata, error) {
|
||||||
return file.MetadataByLocation(r.img, location)
|
return fileMetadataByImageLocation(r.img, location)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -348,12 +348,12 @@ func TestSquashImageResolver_FilesContents(t *testing.T) {
|
|||||||
func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
|
func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
runner func(FileResolver) []file.Location
|
runner func(file.Resolver) []file.Location
|
||||||
expected []file.Location
|
expected []file.Location
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "by mimetype",
|
name: "by mimetype",
|
||||||
runner: func(resolver FileResolver) []file.Location {
|
runner: func(resolver file.Resolver) []file.Location {
|
||||||
// links should not show up when searching mimetype
|
// links should not show up when searching mimetype
|
||||||
actualLocations, err := resolver.FilesByMIMEType("text/plain")
|
actualLocations, err := resolver.FilesByMIMEType("text/plain")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -406,7 +406,7 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "by glob",
|
name: "by glob",
|
||||||
runner: func(resolver FileResolver) []file.Location {
|
runner: func(resolver file.Resolver) []file.Location {
|
||||||
// links are searched, but resolve to the real files
|
// links are searched, but resolve to the real files
|
||||||
actualLocations, err := resolver.FilesByGlob("*ink-*")
|
actualLocations, err := resolver.FilesByGlob("*ink-*")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -435,7 +435,7 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "by path to degree 1 link",
|
name: "by path to degree 1 link",
|
||||||
runner: func(resolver FileResolver) []file.Location {
|
runner: func(resolver file.Resolver) []file.Location {
|
||||||
// links resolve to the final file
|
// links resolve to the final file
|
||||||
actualLocations, err := resolver.FilesByPath("/link-2")
|
actualLocations, err := resolver.FilesByPath("/link-2")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -453,7 +453,7 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "by path to degree 2 link",
|
name: "by path to degree 2 link",
|
||||||
runner: func(resolver FileResolver) []file.Location {
|
runner: func(resolver file.Resolver) []file.Location {
|
||||||
// multiple links resolves to the final file
|
// multiple links resolves to the final file
|
||||||
actualLocations, err := resolver.FilesByPath("/link-indirect")
|
actualLocations, err := resolver.FilesByPath("/link-indirect")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|||||||
@ -8,6 +8,7 @@ package source
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@ -304,7 +305,7 @@ func NewFromImage(img *image.Image, userImageStr string) (Source, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Source) FileResolver(scope Scope) (FileResolver, error) {
|
func (s *Source) FileResolver(scope Scope) (file.Resolver, error) {
|
||||||
switch s.Metadata.Scheme {
|
switch s.Metadata.Scheme {
|
||||||
case DirectoryType, FileType:
|
case DirectoryType, FileType:
|
||||||
s.mutex.Lock()
|
s.mutex.Lock()
|
||||||
@ -322,7 +323,7 @@ func (s *Source) FileResolver(scope Scope) (FileResolver, error) {
|
|||||||
}
|
}
|
||||||
return s.directoryResolver, nil
|
return s.directoryResolver, nil
|
||||||
case ImageType:
|
case ImageType:
|
||||||
var resolver FileResolver
|
var resolver file.Resolver
|
||||||
var err error
|
var err error
|
||||||
switch scope {
|
switch scope {
|
||||||
case SquashedScope:
|
case SquashedScope:
|
||||||
@ -337,11 +338,11 @@ func (s *Source) FileResolver(scope Scope) (FileResolver, error) {
|
|||||||
}
|
}
|
||||||
// image tree contains all paths, so we filter out the excluded entries afterwards
|
// image tree contains all paths, so we filter out the excluded entries afterwards
|
||||||
if len(s.Exclusions) > 0 {
|
if len(s.Exclusions) > 0 {
|
||||||
resolver = NewExcludingResolver(resolver, getImageExclusionFunction(s.Exclusions))
|
resolver = file.NewExcludingResolver(resolver, getImageExclusionFunction(s.Exclusions))
|
||||||
}
|
}
|
||||||
return resolver, nil
|
return resolver, nil
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("unable to determine FilePathResolver with current scheme=%q", s.Metadata.Scheme)
|
return nil, fmt.Errorf("unable to determine PathResolver with current scheme=%q", s.Metadata.Scheme)
|
||||||
}
|
}
|
||||||
|
|
||||||
func unarchiveToTmp(path string, unarchiver archiver.Unarchiver) (string, func(), error) {
|
func unarchiveToTmp(path string, unarchiver archiver.Unarchiver) (string, func(), error) {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user