mirror of
https://github.com/anchore/syft.git
synced 2025-11-17 16:33:21 +01:00
Merge pull request #60 from anchore/rename-analyzers
Rename analyzers to catalogers
This commit is contained in:
commit
0d66cb8a1c
@ -19,7 +19,7 @@ func setCliOptions() {
|
||||
flag := "scope"
|
||||
rootCmd.Flags().StringP(
|
||||
"scope", "s", scope.AllLayersScope.String(),
|
||||
fmt.Sprintf("selection of layers to analyze, options=%v", scope.Options))
|
||||
fmt.Sprintf("selection of layers to catalog, options=%v", scope.Options))
|
||||
if err := viper.BindPFlag(flag, rootCmd.Flags().Lookup(flag)); err != nil {
|
||||
fmt.Printf("unable to bind flag '%s': %+v", flag, err)
|
||||
os.Exit(1)
|
||||
|
||||
@ -1,34 +0,0 @@
|
||||
package bundler
|
||||
|
||||
import (
|
||||
"github.com/anchore/imgbom/imgbom/analyzer/common"
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
"github.com/anchore/stereoscope/pkg/file"
|
||||
"github.com/anchore/stereoscope/pkg/tree"
|
||||
)
|
||||
|
||||
type Analyzer struct {
|
||||
analyzer common.GenericAnalyzer
|
||||
}
|
||||
|
||||
func NewAnalyzer() *Analyzer {
|
||||
globParserDispatch := map[string]common.ParserFn{
|
||||
"*/Gemfile.lock": parseGemfileLockEntries,
|
||||
}
|
||||
|
||||
return &Analyzer{
|
||||
analyzer: common.NewGenericAnalyzer(nil, globParserDispatch),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Analyzer) Name() string {
|
||||
return "bundler-analyzer"
|
||||
}
|
||||
|
||||
func (a *Analyzer) SelectFiles(trees []tree.FileTreeReader) []file.Reference {
|
||||
return a.analyzer.SelectFiles(trees)
|
||||
}
|
||||
|
||||
func (a *Analyzer) Analyze(contents map[file.Reference]string) ([]pkg.Package, error) {
|
||||
return a.analyzer.Analyze(contents, a.Name())
|
||||
}
|
||||
@ -1,94 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
"github.com/anchore/imgbom/internal/log"
|
||||
"github.com/anchore/stereoscope/pkg/file"
|
||||
"github.com/anchore/stereoscope/pkg/tree"
|
||||
)
|
||||
|
||||
// TODO: put under test...
|
||||
|
||||
type GenericAnalyzer struct {
|
||||
globParserDispatch map[string]ParserFn
|
||||
pathParserDispatch map[string]ParserFn
|
||||
selectedFiles []file.Reference
|
||||
parsers map[file.Reference]ParserFn
|
||||
}
|
||||
|
||||
func NewGenericAnalyzer(pathParserDispatch map[string]ParserFn, globParserDispatch map[string]ParserFn) GenericAnalyzer {
|
||||
return GenericAnalyzer{
|
||||
globParserDispatch: globParserDispatch,
|
||||
pathParserDispatch: pathParserDispatch,
|
||||
selectedFiles: make([]file.Reference, 0),
|
||||
parsers: make(map[file.Reference]ParserFn),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *GenericAnalyzer) register(files []file.Reference, parser ParserFn) {
|
||||
a.selectedFiles = append(a.selectedFiles, files...)
|
||||
for _, f := range files {
|
||||
a.parsers[f] = parser
|
||||
}
|
||||
}
|
||||
|
||||
func (a *GenericAnalyzer) clear() {
|
||||
a.selectedFiles = make([]file.Reference, 0)
|
||||
a.parsers = make(map[file.Reference]ParserFn)
|
||||
}
|
||||
|
||||
func (a *GenericAnalyzer) SelectFiles(trees []tree.FileTreeReader) []file.Reference {
|
||||
for _, tree := range trees {
|
||||
// select by exact path
|
||||
for path, parser := range a.globParserDispatch {
|
||||
f := tree.File(file.Path(path))
|
||||
if f != nil {
|
||||
a.register([]file.Reference{*f}, parser)
|
||||
}
|
||||
}
|
||||
|
||||
// select by pattern
|
||||
for globPattern, parser := range a.globParserDispatch {
|
||||
fileMatches, err := tree.FilesByGlob(globPattern)
|
||||
if err != nil {
|
||||
log.Errorf("failed to find files by glob: %s", globPattern)
|
||||
}
|
||||
if fileMatches != nil {
|
||||
a.register(fileMatches, parser)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return a.selectedFiles
|
||||
}
|
||||
|
||||
func (a *GenericAnalyzer) Analyze(contents map[file.Reference]string, upstreamMatcher string) ([]pkg.Package, error) {
|
||||
defer a.clear()
|
||||
|
||||
packages := make([]pkg.Package, 0)
|
||||
|
||||
for reference, parser := range a.parsers {
|
||||
content, ok := contents[reference]
|
||||
if !ok {
|
||||
log.Errorf("analyzer '%s' missing file content: %+v", upstreamMatcher, reference)
|
||||
continue
|
||||
}
|
||||
|
||||
entries, err := parser(strings.NewReader(content))
|
||||
if err != nil {
|
||||
log.Errorf("analyzer '%s' failed to parse entries (reference=%+v): %w", upstreamMatcher, reference, err)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
entry.FoundBy = upstreamMatcher
|
||||
entry.Source = []file.Reference{reference}
|
||||
|
||||
packages = append(packages, entry)
|
||||
}
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
}
|
||||
@ -1,9 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
)
|
||||
|
||||
type ParserFn func(io.Reader) ([]pkg.Package, error)
|
||||
@ -1,34 +0,0 @@
|
||||
package dpkg
|
||||
|
||||
import (
|
||||
"github.com/anchore/imgbom/imgbom/analyzer/common"
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
"github.com/anchore/stereoscope/pkg/file"
|
||||
"github.com/anchore/stereoscope/pkg/tree"
|
||||
)
|
||||
|
||||
type Analyzer struct {
|
||||
analyzer common.GenericAnalyzer
|
||||
}
|
||||
|
||||
func NewAnalyzer() *Analyzer {
|
||||
pathParserDispatch := map[string]common.ParserFn{
|
||||
"/var/lib/dpkg/status": parseDpkgStatus,
|
||||
}
|
||||
|
||||
return &Analyzer{
|
||||
analyzer: common.NewGenericAnalyzer(pathParserDispatch, nil),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Analyzer) Name() string {
|
||||
return "dpkg-analyzer"
|
||||
}
|
||||
|
||||
func (a *Analyzer) SelectFiles(trees []tree.FileTreeReader) []file.Reference {
|
||||
return a.analyzer.SelectFiles(trees)
|
||||
}
|
||||
|
||||
func (a *Analyzer) Analyze(contents map[file.Reference]string) ([]pkg.Package, error) {
|
||||
return a.analyzer.Analyze(contents, a.Name())
|
||||
}
|
||||
@ -1,35 +0,0 @@
|
||||
package python
|
||||
|
||||
import (
|
||||
"github.com/anchore/imgbom/imgbom/analyzer/common"
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
"github.com/anchore/stereoscope/pkg/file"
|
||||
"github.com/anchore/stereoscope/pkg/tree"
|
||||
)
|
||||
|
||||
type Analyzer struct {
|
||||
analyzer common.GenericAnalyzer
|
||||
}
|
||||
|
||||
func NewAnalyzer() *Analyzer {
|
||||
globParserDispatch := map[string]common.ParserFn{
|
||||
"*egg-info/PKG-INFO": parseEggMetadata,
|
||||
"*dist-info/METADATA": parseWheelMetadata,
|
||||
}
|
||||
|
||||
return &Analyzer{
|
||||
analyzer: common.NewGenericAnalyzer(nil, globParserDispatch),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Analyzer) Name() string {
|
||||
return "python-analyzer"
|
||||
}
|
||||
|
||||
func (a *Analyzer) SelectFiles(trees []tree.FileTreeReader) []file.Reference {
|
||||
return a.analyzer.SelectFiles(trees)
|
||||
}
|
||||
|
||||
func (a *Analyzer) Analyze(contents map[file.Reference]string) ([]pkg.Package, error) {
|
||||
return a.analyzer.Analyze(contents, a.Name())
|
||||
}
|
||||
34
imgbom/cataloger/bundler/cataloger.go
Normal file
34
imgbom/cataloger/bundler/cataloger.go
Normal file
@ -0,0 +1,34 @@
|
||||
package bundler
|
||||
|
||||
import (
|
||||
"github.com/anchore/imgbom/imgbom/cataloger/common"
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
"github.com/anchore/stereoscope/pkg/file"
|
||||
"github.com/anchore/stereoscope/pkg/tree"
|
||||
)
|
||||
|
||||
type Cataloger struct {
|
||||
cataloger common.GenericCataloger
|
||||
}
|
||||
|
||||
func NewCataloger() *Cataloger {
|
||||
globParsers := map[string]common.ParserFn{
|
||||
"*/Gemfile.lock": parseGemfileLockEntries,
|
||||
}
|
||||
|
||||
return &Cataloger{
|
||||
cataloger: common.NewGenericCataloger(nil, globParsers),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Cataloger) Name() string {
|
||||
return "bundler-cataloger"
|
||||
}
|
||||
|
||||
func (a *Cataloger) SelectFiles(trees []tree.FileTreeReader) []file.Reference {
|
||||
return a.cataloger.SelectFiles(trees)
|
||||
}
|
||||
|
||||
func (a *Cataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
|
||||
return a.cataloger.Catalog(contents, a.Name())
|
||||
}
|
||||
@ -1,4 +1,4 @@
|
||||
package analyzer
|
||||
package cataloger
|
||||
|
||||
import (
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
@ -6,11 +6,11 @@ import (
|
||||
"github.com/anchore/stereoscope/pkg/tree"
|
||||
)
|
||||
|
||||
type Analyzer interface {
|
||||
type Cataloger interface {
|
||||
Name() string
|
||||
// TODO: add ID / Name for analyze for uniquely identifying this analyzer type
|
||||
// TODO: add ID / Name for catalog for uniquely identifying this cataloger type
|
||||
SelectFiles([]tree.FileTreeReader) []file.Reference
|
||||
// NOTE: one of the errors which is returned is "IterationNeeded", which indicates to the driver to
|
||||
// continue with another Select/Analyze pass
|
||||
Analyze(map[file.Reference]string) ([]pkg.Package, error)
|
||||
// continue with another Select/Catalog pass
|
||||
Catalog(map[file.Reference]string) ([]pkg.Package, error)
|
||||
}
|
||||
101
imgbom/cataloger/common/generic_cataloger.go
Normal file
101
imgbom/cataloger/common/generic_cataloger.go
Normal file
@ -0,0 +1,101 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
"github.com/anchore/imgbom/internal/log"
|
||||
"github.com/anchore/stereoscope/pkg/file"
|
||||
"github.com/anchore/stereoscope/pkg/tree"
|
||||
)
|
||||
|
||||
// TODO: put under test...
|
||||
|
||||
// GenericCataloger implements the Catalog interface and is responsible for dispatching the proper parser function for
|
||||
// a given path or glob pattern. This is intended to be reusable across many package cataloger types.
|
||||
type GenericCataloger struct {
|
||||
globParsers map[string]ParserFn
|
||||
pathParsers map[string]ParserFn
|
||||
selectedFiles []file.Reference
|
||||
parsers map[file.Reference]ParserFn
|
||||
}
|
||||
|
||||
// NewGenericCataloger if provided path-to-parser-function and glob-to-parser-function lookups creates a GenericCataloger
|
||||
func NewGenericCataloger(pathParsers map[string]ParserFn, globParsers map[string]ParserFn) GenericCataloger {
|
||||
return GenericCataloger{
|
||||
globParsers: globParsers,
|
||||
pathParsers: pathParsers,
|
||||
selectedFiles: make([]file.Reference, 0),
|
||||
parsers: make(map[file.Reference]ParserFn),
|
||||
}
|
||||
}
|
||||
|
||||
// register pairs a set of file references with a parser function for future cataloging (when the file contents are resolved)
|
||||
func (a *GenericCataloger) register(files []file.Reference, parser ParserFn) {
|
||||
a.selectedFiles = append(a.selectedFiles, files...)
|
||||
for _, f := range files {
|
||||
a.parsers[f] = parser
|
||||
}
|
||||
}
|
||||
|
||||
// clear deletes all registered file-reference-to-parser-function pairings from former SelectFiles() and register() calls
|
||||
func (a *GenericCataloger) clear() {
|
||||
a.selectedFiles = make([]file.Reference, 0)
|
||||
a.parsers = make(map[file.Reference]ParserFn)
|
||||
}
|
||||
|
||||
// SelectFiles takes a set of file trees and resolves and file references of interest for future cataloging
|
||||
func (a *GenericCataloger) SelectFiles(trees []tree.FileTreeReader) []file.Reference {
|
||||
for _, t := range trees {
|
||||
// select by exact path
|
||||
for path, parser := range a.pathParsers {
|
||||
f := t.File(file.Path(path))
|
||||
if f != nil {
|
||||
a.register([]file.Reference{*f}, parser)
|
||||
}
|
||||
}
|
||||
|
||||
// select by pattern
|
||||
for globPattern, parser := range a.globParsers {
|
||||
fileMatches, err := t.FilesByGlob(globPattern)
|
||||
if err != nil {
|
||||
log.Errorf("failed to find files by glob: %s", globPattern)
|
||||
}
|
||||
if fileMatches != nil {
|
||||
a.register(fileMatches, parser)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return a.selectedFiles
|
||||
}
|
||||
|
||||
// Catalog takes a set of file contents and uses any configured parser functions to resolve and return discovered packages
|
||||
func (a *GenericCataloger) Catalog(contents map[file.Reference]string, upstreamMatcher string) ([]pkg.Package, error) {
|
||||
defer a.clear()
|
||||
|
||||
packages := make([]pkg.Package, 0)
|
||||
|
||||
for reference, parser := range a.parsers {
|
||||
content, ok := contents[reference]
|
||||
if !ok {
|
||||
log.Errorf("cataloger '%s' missing file content: %+v", upstreamMatcher, reference)
|
||||
continue
|
||||
}
|
||||
|
||||
entries, err := parser(strings.NewReader(content))
|
||||
if err != nil {
|
||||
log.Errorf("cataloger '%s' failed to parse entries (reference=%+v): %w", upstreamMatcher, reference, err)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
entry.FoundBy = upstreamMatcher
|
||||
entry.Source = []file.Reference{reference}
|
||||
|
||||
packages = append(packages, entry)
|
||||
}
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
}
|
||||
10
imgbom/cataloger/common/parser.go
Normal file
10
imgbom/cataloger/common/parser.go
Normal file
@ -0,0 +1,10 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
)
|
||||
|
||||
// ParserFn standardizes a function signature for parser functions that accept file contents and return any discovered packages from that file
|
||||
type ParserFn func(io.Reader) ([]pkg.Package, error)
|
||||
@ -1,9 +1,9 @@
|
||||
package analyzer
|
||||
package cataloger
|
||||
|
||||
import (
|
||||
"github.com/anchore/imgbom/imgbom/analyzer/bundler"
|
||||
"github.com/anchore/imgbom/imgbom/analyzer/dpkg"
|
||||
"github.com/anchore/imgbom/imgbom/analyzer/python"
|
||||
"github.com/anchore/imgbom/imgbom/cataloger/bundler"
|
||||
"github.com/anchore/imgbom/imgbom/cataloger/dpkg"
|
||||
"github.com/anchore/imgbom/imgbom/cataloger/python"
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
"github.com/anchore/imgbom/imgbom/scope"
|
||||
"github.com/anchore/imgbom/internal/log"
|
||||
@ -17,40 +17,40 @@ func init() {
|
||||
controllerInstance = newController()
|
||||
}
|
||||
|
||||
func Analyze(s scope.Scope) (*pkg.Catalog, error) {
|
||||
return controllerInstance.analyze(s)
|
||||
func Catalog(s scope.Scope) (*pkg.Catalog, error) {
|
||||
return controllerInstance.catalog(s)
|
||||
}
|
||||
|
||||
type controller struct {
|
||||
analyzers []Analyzer
|
||||
catalogers []Cataloger
|
||||
}
|
||||
|
||||
func newController() controller {
|
||||
ctrlr := controller{
|
||||
analyzers: make([]Analyzer, 0),
|
||||
catalogers: make([]Cataloger, 0),
|
||||
}
|
||||
ctrlr.add(dpkg.NewAnalyzer())
|
||||
ctrlr.add(bundler.NewAnalyzer())
|
||||
ctrlr.add(python.NewAnalyzer())
|
||||
ctrlr.add(dpkg.NewCataloger())
|
||||
ctrlr.add(bundler.NewCataloger())
|
||||
ctrlr.add(python.NewCataloger())
|
||||
return ctrlr
|
||||
}
|
||||
|
||||
func (c *controller) add(a Analyzer) {
|
||||
log.Debugf("adding analyzer: %s", a.Name())
|
||||
c.analyzers = append(c.analyzers, a)
|
||||
func (c *controller) add(a Cataloger) {
|
||||
log.Debugf("adding cataloger: %s", a.Name())
|
||||
c.catalogers = append(c.catalogers, a)
|
||||
}
|
||||
|
||||
func (c *controller) analyze(s scope.Scope) (*pkg.Catalog, error) {
|
||||
func (c *controller) catalog(s scope.Scope) (*pkg.Catalog, error) {
|
||||
catalog := pkg.NewCatalog()
|
||||
fileSelection := make([]file.Reference, 0)
|
||||
|
||||
// ask analyzers for files to extract from the image tar
|
||||
for _, a := range c.analyzers {
|
||||
// ask catalogers for files to extract from the image tar
|
||||
for _, a := range c.catalogers {
|
||||
fileSelection = append(fileSelection, a.SelectFiles(s.Trees)...)
|
||||
log.Debugf("analyzer '%s' selected '%d' files", a.Name(), len(fileSelection))
|
||||
log.Debugf("cataloger '%s' selected '%d' files", a.Name(), len(fileSelection))
|
||||
}
|
||||
|
||||
// fetch contents for requested selection by analyzers
|
||||
// fetch contents for requested selection by catalogers
|
||||
contents, err := s.Image.MultipleFileContentsByRef(fileSelection...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -58,15 +58,15 @@ func (c *controller) analyze(s scope.Scope) (*pkg.Catalog, error) {
|
||||
|
||||
// perform analysis, accumulating errors for each failed analysis
|
||||
var errs error
|
||||
for _, a := range c.analyzers {
|
||||
for _, a := range c.catalogers {
|
||||
// TODO: check for multiple rounds of analyses by Iterate error
|
||||
packages, err := a.Analyze(contents)
|
||||
packages, err := a.Catalog(contents)
|
||||
if err != nil {
|
||||
errs = multierror.Append(errs, err)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Debugf("analyzer '%s' discovered '%d' packages", a.Name(), len(packages))
|
||||
log.Debugf("cataloger '%s' discovered '%d' packages", a.Name(), len(packages))
|
||||
|
||||
for _, p := range packages {
|
||||
catalog.Add(p)
|
||||
34
imgbom/cataloger/dpkg/cataloger.go
Normal file
34
imgbom/cataloger/dpkg/cataloger.go
Normal file
@ -0,0 +1,34 @@
|
||||
package dpkg
|
||||
|
||||
import (
|
||||
"github.com/anchore/imgbom/imgbom/cataloger/common"
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
"github.com/anchore/stereoscope/pkg/file"
|
||||
"github.com/anchore/stereoscope/pkg/tree"
|
||||
)
|
||||
|
||||
type Cataloger struct {
|
||||
cataloger common.GenericCataloger
|
||||
}
|
||||
|
||||
func NewCataloger() *Cataloger {
|
||||
pathParsers := map[string]common.ParserFn{
|
||||
"/var/lib/dpkg/status": parseDpkgStatus,
|
||||
}
|
||||
|
||||
return &Cataloger{
|
||||
cataloger: common.NewGenericCataloger(pathParsers, nil),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Cataloger) Name() string {
|
||||
return "dpkg-cataloger"
|
||||
}
|
||||
|
||||
func (a *Cataloger) SelectFiles(trees []tree.FileTreeReader) []file.Reference {
|
||||
return a.cataloger.SelectFiles(trees)
|
||||
}
|
||||
|
||||
func (a *Cataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
|
||||
return a.cataloger.Catalog(contents, a.Name())
|
||||
}
|
||||
35
imgbom/cataloger/python/cataloger.go
Normal file
35
imgbom/cataloger/python/cataloger.go
Normal file
@ -0,0 +1,35 @@
|
||||
package python
|
||||
|
||||
import (
|
||||
"github.com/anchore/imgbom/imgbom/cataloger/common"
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
"github.com/anchore/stereoscope/pkg/file"
|
||||
"github.com/anchore/stereoscope/pkg/tree"
|
||||
)
|
||||
|
||||
type Cataloger struct {
|
||||
cataloger common.GenericCataloger
|
||||
}
|
||||
|
||||
func NewCataloger() *Cataloger {
|
||||
globParsers := map[string]common.ParserFn{
|
||||
"*egg-info/PKG-INFO": parseEggMetadata,
|
||||
"*dist-info/METADATA": parseWheelMetadata,
|
||||
}
|
||||
|
||||
return &Cataloger{
|
||||
cataloger: common.NewGenericCataloger(nil, globParsers),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Cataloger) Name() string {
|
||||
return "python-cataloger"
|
||||
}
|
||||
|
||||
func (a *Cataloger) SelectFiles(trees []tree.FileTreeReader) []file.Reference {
|
||||
return a.cataloger.SelectFiles(trees)
|
||||
}
|
||||
|
||||
func (a *Cataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
|
||||
return a.cataloger.Catalog(contents, a.Name())
|
||||
}
|
||||
@ -69,7 +69,6 @@ func parseWheelOrEggMetadata(reader io.Reader) ([]pkg.Package, error) {
|
||||
return nil, fmt.Errorf("cannot parse field from line: '%s'", line)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
@ -25,7 +25,6 @@ func Identify(img *image.Image) *Distro {
|
||||
}
|
||||
|
||||
for path, fn := range identityFiles {
|
||||
|
||||
contents, err := img.FileContentsFromSquash(path)
|
||||
|
||||
if err != nil {
|
||||
@ -44,7 +43,6 @@ func Identify(img *image.Image) *Distro {
|
||||
}
|
||||
|
||||
return distro
|
||||
|
||||
}
|
||||
// TODO: is it useful to know partially detected distros? where the ID is known but not the version (and viceversa?)
|
||||
return nil
|
||||
@ -72,7 +70,6 @@ func assembleDistro(name, version string) *Distro {
|
||||
func parseOsRelease(contents string) *Distro {
|
||||
id, vers := "", ""
|
||||
for _, line := range strings.Split(contents, "\n") {
|
||||
|
||||
parts := strings.Split(line, "=")
|
||||
prefix := parts[0]
|
||||
value := strings.ReplaceAll(parts[len(parts)-1], `"`, "")
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
package imgbom
|
||||
|
||||
import (
|
||||
"github.com/anchore/imgbom/imgbom/analyzer"
|
||||
"github.com/anchore/imgbom/imgbom/cataloger"
|
||||
"github.com/anchore/imgbom/imgbom/distro"
|
||||
"github.com/anchore/imgbom/imgbom/logger"
|
||||
"github.com/anchore/imgbom/imgbom/pkg"
|
||||
@ -20,7 +20,7 @@ func CatalogImage(img *image.Image, o scope.Option) (*pkg.Catalog, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return analyzer.Analyze(s)
|
||||
return cataloger.Catalog(s)
|
||||
}
|
||||
|
||||
func SetLogger(logger logger.Logger) {
|
||||
|
||||
@ -8,7 +8,7 @@ import (
|
||||
|
||||
type ID int64
|
||||
|
||||
// TODO: add field to trace which analyzer detected this
|
||||
// TODO: add field to trace which cataloger detected this
|
||||
type Package struct {
|
||||
id ID
|
||||
Name string
|
||||
|
||||
@ -41,12 +41,12 @@ type source struct {
|
||||
}
|
||||
|
||||
type artifact struct {
|
||||
Name string `json:"name"`
|
||||
Version string `json:"version"`
|
||||
Type string `json:"type"`
|
||||
Analyzer string `json:"analyzer"`
|
||||
Sources []source `json:"sources"`
|
||||
Metadata interface{} `json:"metadata"`
|
||||
Name string `json:"name"`
|
||||
Version string `json:"version"`
|
||||
Type string `json:"type"`
|
||||
Cataloger string `json:"cataloger"`
|
||||
Sources []source `json:"sources"`
|
||||
Metadata interface{} `json:"metadata"`
|
||||
}
|
||||
|
||||
func (pres *Presenter) Present(output io.Writer, img *stereoscopeImg.Image, catalog *pkg.Catalog) error {
|
||||
|
||||
@ -1 +1 @@
|
||||
{"image":{"layers":[{"mediaType":"application/vnd.docker.image.rootfs.diff.tar.gzip","digest":"sha256:056c0789fa9ad629ceae6d09713fb035f84115af3c4a88a43aa60f13bc683053","size":22},{"mediaType":"application/vnd.docker.image.rootfs.diff.tar.gzip","digest":"sha256:b461c48116592c570a66fed71d5b09662a8172e168b7938cf317af47872cdc9b","size":16},{"mediaType":"application/vnd.docker.image.rootfs.diff.tar.gzip","digest":"sha256:00b80053e05c01da485015610d288ce3185fac00d251e2ada02b45a7a7c5f589","size":27}],"size":65,"digest":"sha256:3c53d2d891940f8d8e95acb77b58752f54dc5de9d91d19dd90ced2db76256cea","mediaType":"application/vnd.docker.distribution.manifest.v2+json","tags":["anchore-fixture-image-simple:04e16e44161c8888a1a963720fd0443cbf7eef8101434c431de8725cd98cc9f7"]},"artifacts":[{"name":"package-1","version":"1.0.1","type":"deb","analyzer":"","sources":[{"foundBy":"","layer":0,"effects":[]}],"metadata":null},{"name":"package-2","version":"2.0.1","type":"deb","analyzer":"","sources":[{"foundBy":"","layer":1,"effects":[]}],"metadata":null}]}
|
||||
{"image":{"layers":[{"mediaType":"application/vnd.docker.image.rootfs.diff.tar.gzip","digest":"sha256:056c0789fa9ad629ceae6d09713fb035f84115af3c4a88a43aa60f13bc683053","size":22},{"mediaType":"application/vnd.docker.image.rootfs.diff.tar.gzip","digest":"sha256:b461c48116592c570a66fed71d5b09662a8172e168b7938cf317af47872cdc9b","size":16},{"mediaType":"application/vnd.docker.image.rootfs.diff.tar.gzip","digest":"sha256:00b80053e05c01da485015610d288ce3185fac00d251e2ada02b45a7a7c5f589","size":27}],"size":65,"digest":"sha256:3c53d2d891940f8d8e95acb77b58752f54dc5de9d91d19dd90ced2db76256cea","mediaType":"application/vnd.docker.distribution.manifest.v2+json","tags":["anchore-fixture-image-simple:04e16e44161c8888a1a963720fd0443cbf7eef8101434c431de8725cd98cc9f7"]},"artifacts":[{"name":"package-1","version":"1.0.1","type":"deb","cataloger":"","sources":[{"foundBy":"","layer":0,"effects":[]}],"metadata":null},{"name":"package-2","version":"2.0.1","type":"deb","cataloger":"","sources":[{"foundBy":"","layer":1,"effects":[]}],"metadata":null}]}
|
||||
Loading…
x
Reference in New Issue
Block a user