mirror of
https://github.com/anchore/syft.git
synced 2025-11-19 01:13:18 +01:00
rename gem parsers and catalogers
Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
parent
10b44f5311
commit
abdd00cd24
@ -7,19 +7,19 @@ import (
|
|||||||
"github.com/anchore/syft/syft/cataloger/common"
|
"github.com/anchore/syft/syft/cataloger/common"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewGemfileLockCataloger returns a new Bundler cataloger object tailored for parsing index-oriented files (e.g. Gemfile.lock).
|
// NewGemFileLockCataloger returns a new Bundler cataloger object tailored for parsing index-oriented files (e.g. Gemfile.lock).
|
||||||
func NewGemfileLockCataloger() *common.GenericCataloger {
|
func NewGemFileLockCataloger() *common.GenericCataloger {
|
||||||
globParsers := map[string]common.ParserFn{
|
globParsers := map[string]common.ParserFn{
|
||||||
"**/Gemfile.lock": parseGemfileLockEntries,
|
"**/Gemfile.lock": parseGemFileLockEntries,
|
||||||
}
|
}
|
||||||
|
|
||||||
return common.NewGenericCataloger(nil, globParsers, "ruby-gemfile-cataloger")
|
return common.NewGenericCataloger(nil, globParsers, "ruby-gemfile-cataloger")
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewGemspecCataloger returns a new Bundler cataloger object tailored for detecting installations of gems (e.g. Gemspec).
|
// NewGemSpecCataloger returns a new Bundler cataloger object tailored for detecting installations of gems (e.g. Gemspec).
|
||||||
func NewGemspecCataloger() *common.GenericCataloger {
|
func NewGemSpecCataloger() *common.GenericCataloger {
|
||||||
globParsers := map[string]common.ParserFn{
|
globParsers := map[string]common.ParserFn{
|
||||||
"**/specification/*.gemspec": parseGemspecEntries,
|
"**/specification/*.gemspec": parseGemSpecEntries,
|
||||||
}
|
}
|
||||||
|
|
||||||
return common.NewGenericCataloger(nil, globParsers, "ruby-gemspec-cataloger")
|
return common.NewGenericCataloger(nil, globParsers, "ruby-gemspec-cataloger")
|
||||||
@ -11,12 +11,12 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// integrity check
|
// integrity check
|
||||||
var _ common.ParserFn = parseGemfileLockEntries
|
var _ common.ParserFn = parseGemFileLockEntries
|
||||||
|
|
||||||
var sectionsOfInterest = internal.NewStringSetFromSlice([]string{"GEM"})
|
var sectionsOfInterest = internal.NewStringSetFromSlice([]string{"GEM"})
|
||||||
|
|
||||||
// parseGemfileLockEntries is a parser function for Gemfile.lock contents, returning all Gems discovered.
|
// parseGemFileLockEntries is a parser function for Gemfile.lock contents, returning all Gems discovered.
|
||||||
func parseGemfileLockEntries(_ string, reader io.Reader) ([]pkg.Package, error) {
|
func parseGemFileLockEntries(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||||
pkgs := make([]pkg.Package, 0)
|
pkgs := make([]pkg.Package, 0)
|
||||||
scanner := bufio.NewScanner(reader)
|
scanner := bufio.NewScanner(reader)
|
||||||
|
|
||||||
|
|||||||
@ -68,7 +68,7 @@ func TestParseGemfileLockEntries(t *testing.T) {
|
|||||||
t.Fatalf("failed to open fixture: %+v", err)
|
t.Fatalf("failed to open fixture: %+v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
actual, err := parseGemfileLockEntries(fixture.Name(), fixture)
|
actual, err := parseGemFileLockEntries(fixture.Name(), fixture)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed to parse gemfile lock: %+v", err)
|
t.Fatalf("failed to parse gemfile lock: %+v", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -14,7 +14,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// integrity check
|
// integrity check
|
||||||
var _ common.ParserFn = parseGemfileLockEntries
|
var _ common.ParserFn = parseGemFileLockEntries
|
||||||
|
|
||||||
// for line in gem.splitlines():
|
// for line in gem.splitlines():
|
||||||
// line = line.strip()
|
// line = line.strip()
|
||||||
@ -65,7 +65,7 @@ var postProcessors = map[string]listProcessor{
|
|||||||
//},
|
//},
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseGemspecEntries(_ string, reader io.Reader) ([]pkg.Package, error) {
|
func parseGemSpecEntries(_ string, reader io.Reader) ([]pkg.Package, error) {
|
||||||
var pkgs []pkg.Package
|
var pkgs []pkg.Package
|
||||||
var fields = make(map[string]interface{})
|
var fields = make(map[string]interface{})
|
||||||
scanner := bufio.NewScanner(reader)
|
scanner := bufio.NewScanner(reader)
|
||||||
|
|||||||
@ -17,7 +17,7 @@ func TestParseGemspec(t *testing.T) {
|
|||||||
t.Fatalf("failed to open fixture: %+v", err)
|
t.Fatalf("failed to open fixture: %+v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
actual, err := parseGemspecEntries(fixture.Name(), fixture)
|
actual, err := parseGemSpecEntries(fixture.Name(), fixture)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed to parse gemspec: %+v", err)
|
t.Fatalf("failed to parse gemspec: %+v", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -36,7 +36,7 @@ type Cataloger interface {
|
|||||||
// ImageCatalogers returns a slice of locally implemented catalogers that are fit for detecting installations of packages.
|
// ImageCatalogers returns a slice of locally implemented catalogers that are fit for detecting installations of packages.
|
||||||
func ImageCatalogers() []Cataloger {
|
func ImageCatalogers() []Cataloger {
|
||||||
return []Cataloger{
|
return []Cataloger{
|
||||||
bundler.NewGemspecCataloger(),
|
bundler.NewGemSpecCataloger(),
|
||||||
python.NewPythonCataloger(), // TODO: split and replace me
|
python.NewPythonCataloger(), // TODO: split and replace me
|
||||||
javascript.NewJavascriptCataloger(), // TODO: split and replace me
|
javascript.NewJavascriptCataloger(), // TODO: split and replace me
|
||||||
deb.NewDpkgdbCataloger(),
|
deb.NewDpkgdbCataloger(),
|
||||||
@ -50,7 +50,7 @@ func ImageCatalogers() []Cataloger {
|
|||||||
// DirectoryCatalogers returns a slice of locally implemented catalogers that are fit for detecting packages from index files (and select installations)
|
// DirectoryCatalogers returns a slice of locally implemented catalogers that are fit for detecting packages from index files (and select installations)
|
||||||
func DirectoryCatalogers() []Cataloger {
|
func DirectoryCatalogers() []Cataloger {
|
||||||
return []Cataloger{
|
return []Cataloger{
|
||||||
bundler.NewGemfileLockCataloger(),
|
bundler.NewGemFileLockCataloger(),
|
||||||
python.NewPythonCataloger(), // TODO: split and replace me
|
python.NewPythonCataloger(), // TODO: split and replace me
|
||||||
javascript.NewJavascriptCataloger(), // TODO: split and replace me
|
javascript.NewJavascriptCataloger(), // TODO: split and replace me
|
||||||
deb.NewDpkgdbCataloger(),
|
deb.NewDpkgdbCataloger(),
|
||||||
|
|||||||
@ -15,27 +15,27 @@ import (
|
|||||||
// GenericCataloger implements the Catalog interface and is responsible for dispatching the proper parser function for
|
// GenericCataloger implements the Catalog interface and is responsible for dispatching the proper parser function for
|
||||||
// a given path or glob pattern. This is intended to be reusable across many package cataloger types.
|
// a given path or glob pattern. This is intended to be reusable across many package cataloger types.
|
||||||
type GenericCataloger struct {
|
type GenericCataloger struct {
|
||||||
globParsers map[string]ParserFn
|
globParsers map[string]ParserFn
|
||||||
pathParsers map[string]ParserFn
|
pathParsers map[string]ParserFn
|
||||||
selectedFiles []file.Reference
|
selectedFiles []file.Reference
|
||||||
parsers map[file.Reference]ParserFn
|
parsers map[file.Reference]ParserFn
|
||||||
upstreamMatcher string
|
upstreamCataloger string
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewGenericCataloger if provided path-to-parser-function and glob-to-parser-function lookups creates a GenericCataloger
|
// NewGenericCataloger if provided path-to-parser-function and glob-to-parser-function lookups creates a GenericCataloger
|
||||||
func NewGenericCataloger(pathParsers map[string]ParserFn, globParsers map[string]ParserFn, upstreamMatcher string) *GenericCataloger {
|
func NewGenericCataloger(pathParsers map[string]ParserFn, globParsers map[string]ParserFn, upstreamCataloger string) *GenericCataloger {
|
||||||
return &GenericCataloger{
|
return &GenericCataloger{
|
||||||
globParsers: globParsers,
|
globParsers: globParsers,
|
||||||
pathParsers: pathParsers,
|
pathParsers: pathParsers,
|
||||||
selectedFiles: make([]file.Reference, 0),
|
selectedFiles: make([]file.Reference, 0),
|
||||||
parsers: make(map[file.Reference]ParserFn),
|
parsers: make(map[file.Reference]ParserFn),
|
||||||
upstreamMatcher: upstreamMatcher,
|
upstreamCataloger: upstreamCataloger,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns a string that uniquely describes the upstream cataloger that this Generic Cataloger represents.
|
// Name returns a string that uniquely describes the upstream cataloger that this Generic Cataloger represents.
|
||||||
func (a *GenericCataloger) Name() string {
|
func (a *GenericCataloger) Name() string {
|
||||||
return a.upstreamMatcher
|
return a.upstreamCataloger
|
||||||
}
|
}
|
||||||
|
|
||||||
// register pairs a set of file references with a parser function for future cataloging (when the file contents are resolved)
|
// register pairs a set of file references with a parser function for future cataloging (when the file contents are resolved)
|
||||||
@ -88,19 +88,19 @@ func (a *GenericCataloger) Catalog(contents map[file.Reference]string) ([]pkg.Pa
|
|||||||
for reference, parser := range a.parsers {
|
for reference, parser := range a.parsers {
|
||||||
content, ok := contents[reference]
|
content, ok := contents[reference]
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Errorf("cataloger '%s' missing file content: %+v", a.upstreamMatcher, reference)
|
log.Errorf("cataloger '%s' missing file content: %+v", a.upstreamCataloger, reference)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
entries, err := parser(string(reference.Path), strings.NewReader(content))
|
entries, err := parser(string(reference.Path), strings.NewReader(content))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// TODO: should we fail? or only log?
|
// TODO: should we fail? or only log?
|
||||||
log.Errorf("cataloger '%s' failed to parse entries (reference=%+v): %+v", a.upstreamMatcher, reference, err)
|
log.Errorf("cataloger '%s' failed to parse entries (reference=%+v): %+v", a.upstreamCataloger, reference, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
entry.FoundBy = a.upstreamMatcher
|
entry.FoundBy = a.upstreamCataloger
|
||||||
entry.Source = []file.Reference{reference}
|
entry.Source = []file.Reference{reference}
|
||||||
|
|
||||||
packages = append(packages, entry)
|
packages = append(packages, entry)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user