Relax error conditions for catalogers (#1492)

* binary cataloger should continue on errors

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* test: add redirect for cmd stderr stdout

Signed-off-by: Christopher Phillips <christopher.phillips@anchore.com>

* test: image update for test failure

Signed-off-by: Christopher Phillips <christopher.phillips@anchore.com>

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
Signed-off-by: Christopher Phillips <christopher.phillips@anchore.com>
Co-authored-by: Christopher Phillips <christopher.phillips@anchore.com>
This commit is contained in:
Alex Goodman 2023-01-19 19:28:42 -05:00 committed by GitHub
parent 7427445fe9
commit 0f75f975c8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 91 additions and 29 deletions

View File

@ -1,6 +1,7 @@
package binary package binary
import ( import (
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
@ -30,26 +31,39 @@ func (c Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artif
var packages []pkg.Package var packages []pkg.Package
var relationships []artifact.Relationship var relationships []artifact.Relationship
for _, classifier := range defaultClassifiers { for _, cls := range defaultClassifiers {
locations, err := resolver.FilesByGlob(classifier.FileGlob) pkgs, err := catalog(resolver, cls)
if err != nil { if err != nil {
return nil, nil, err log.WithFields("error", err, "classifier", cls.Class).Warn("unable to catalog binary package: %w", err)
continue
}
packages = append(packages, pkgs...)
}
return packages, relationships, nil
}
func catalog(resolver source.FileResolver, cls classifier) ([]pkg.Package, error) {
var pkgs []pkg.Package
locations, err := resolver.FilesByGlob(cls.FileGlob)
if err != nil {
return nil, err
} }
for _, location := range locations { for _, location := range locations {
reader, err := resolver.FileContentsByLocation(location) reader, err := resolver.FileContentsByLocation(location)
if err != nil { if err != nil {
return nil, nil, err return nil, err
} }
locationReader := source.NewLocationReadCloser(location, reader) locationReader := source.NewLocationReadCloser(location, reader)
newPkgs, err := classifier.EvidenceMatcher(classifier, locationReader) newPkgs, err := cls.EvidenceMatcher(cls, locationReader)
if err != nil { if err != nil {
return nil, nil, err return nil, err
} }
newPackages: newPackages:
for i := range newPkgs { for i := range newPkgs {
newPkg := &newPkgs[i] newPkg := &newPkgs[i]
for j := range packages { for j := range pkgs {
p := &packages[j] p := &pkgs[j]
// consolidate identical packages found in different locations, // consolidate identical packages found in different locations,
// but continue to track each location // but continue to track each location
if packagesMatch(p, newPkg) { if packagesMatch(p, newPkg) {
@ -57,12 +71,10 @@ func (c Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artif
continue newPackages continue newPackages
} }
} }
packages = append(packages, *newPkg) pkgs = append(pkgs, *newPkg)
} }
} }
} return pkgs, nil
return packages, relationships, nil
} }
// packagesMatch returns true if the binary packages "match" based on basic criteria // packagesMatch returns true if the binary packages "match" based on basic criteria

View File

@ -1,6 +1,8 @@
package binary package binary
import ( import (
"errors"
"io"
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -433,3 +435,49 @@ func assertPackagesAreEqual(t *testing.T, expected pkg.Package, p pkg.Package) {
assert.Failf(t, "packages not equal", "%v != %v", expected, p) assert.Failf(t, "packages not equal", "%v != %v", expected, p)
} }
} }
type panicyResolver struct {
globCalled bool
}
func (p panicyResolver) FileContentsByLocation(location source.Location) (io.ReadCloser, error) {
return nil, errors.New("not implemented")
}
func (p panicyResolver) HasPath(s string) bool {
return true
}
func (p panicyResolver) FilesByPath(paths ...string) ([]source.Location, error) {
return nil, errors.New("not implemented")
}
func (p *panicyResolver) FilesByGlob(patterns ...string) ([]source.Location, error) {
p.globCalled = true
return nil, errors.New("not implemented")
}
func (p panicyResolver) FilesByMIMEType(types ...string) ([]source.Location, error) {
return nil, errors.New("not implemented")
}
func (p panicyResolver) RelativeFileByPath(_ source.Location, path string) *source.Location {
return nil
}
func (p panicyResolver) AllLocations() <-chan source.Location {
return nil
}
func (p panicyResolver) FileMetadataByLocation(location source.Location) (source.FileMetadata, error) {
return source.FileMetadata{}, errors.New("not implemented")
}
func Test_Cataloger_ResilientToErrors(t *testing.T) {
c := NewCataloger()
resolver := &panicyResolver{}
_, _, err := c.Catalog(resolver)
assert.NoError(t, err)
assert.True(t, resolver.globCalled)
}

View File

@ -36,6 +36,8 @@ func TestSpdxValidationTooling(t *testing.T) {
fixturesPath := filepath.Join(cwd, "test-fixtures", "image-java-spdx-tools") fixturesPath := filepath.Join(cwd, "test-fixtures", "image-java-spdx-tools")
buildCmd := exec.Command("make", "build") buildCmd := exec.Command("make", "build")
buildCmd.Dir = fixturesPath buildCmd.Dir = fixturesPath
buildCmd.Stdout = os.Stdout
buildCmd.Stderr = os.Stderr
err = buildCmd.Run() err = buildCmd.Run()
require.NoError(t, err) require.NoError(t, err)
}, },

View File

@ -1,4 +1,4 @@
FROM cgr.dev/chainguard/jdk FROM openjdk:11
RUN wget https://github.com/spdx/tools-java/releases/download/v1.1.3/tools-java-1.1.3.zip && \ RUN wget https://github.com/spdx/tools-java/releases/download/v1.1.3/tools-java-1.1.3.zip && \
unzip tools-java-1.1.3.zip && \ unzip tools-java-1.1.3.zip && \