mirror of
https://github.com/anchore/syft.git
synced 2025-11-17 08:23:15 +01:00
chore: use ruleguard to test for missing defer statements (#2837)
* chore: ruleguard to enforce defer use Signed-off-by: Will Murphy <will.murphy@anchore.com> * fix go.mod location Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: defer close in linux release identifier Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: better lint suggestion Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: refactor binary classifier to defer close Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: defer close readers in gentoo cataloger Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: make go license parsing defer close readers Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: defer closing readers in alpine apm parser Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: defer close readers in graalvm parser Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: defer close readers in debian package parser Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: defer close readers in alpm parser Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: defer close readers in executable file cataloger Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: defer close readers in javascript license parser Signed-off-by: Will Murphy <will.murphy@anchore.com> * chore: defer close readers in go mod parser Signed-off-by: Will Murphy <will.murphy@anchore.com> --------- Signed-off-by: Will Murphy <will.murphy@anchore.com>
This commit is contained in:
parent
430c55a5b0
commit
3713d97b7b
@ -52,6 +52,10 @@ linters-settings:
|
||||
gocritic:
|
||||
enabled-checks:
|
||||
- deferInLoop
|
||||
- ruleguard
|
||||
settings:
|
||||
ruleguard:
|
||||
rules: "test/rules/rules.go"
|
||||
output:
|
||||
uniq-by-line: false
|
||||
run:
|
||||
|
||||
1
go.mod
1
go.mod
@ -59,6 +59,7 @@ require (
|
||||
github.com/olekukonko/tablewriter v0.0.5
|
||||
github.com/opencontainers/go-digest v1.0.0
|
||||
github.com/pelletier/go-toml v1.9.5
|
||||
github.com/quasilyte/go-ruleguard/dsl v0.3.22
|
||||
github.com/saferwall/pe v1.5.2
|
||||
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d
|
||||
github.com/sanity-io/litter v1.5.5
|
||||
|
||||
2
go.sum
2
go.sum
@ -658,6 +658,8 @@ github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+Gx
|
||||
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
|
||||
github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo=
|
||||
github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4=
|
||||
github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE=
|
||||
github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
|
||||
@ -11,6 +11,7 @@ import (
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/dustin/go-humanize"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/internal/bus"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/internal/mimetype"
|
||||
@ -60,24 +61,8 @@ func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]file.E
|
||||
for _, loc := range locs {
|
||||
prog.AtomicStage.Set(loc.Path())
|
||||
|
||||
reader, err := resolver.FileContentsByLocation(loc)
|
||||
if err != nil {
|
||||
// TODO: known-unknowns
|
||||
log.WithFields("error", err).Warnf("unable to get file contents for %q", loc.RealPath)
|
||||
continue
|
||||
}
|
||||
exec := processExecutableLocation(loc, resolver)
|
||||
|
||||
uReader, err := unionreader.GetUnionReader(reader)
|
||||
if err != nil {
|
||||
// TODO: known-unknowns
|
||||
log.WithFields("error", err).Warnf("unable to get union reader for %q", loc.RealPath)
|
||||
continue
|
||||
}
|
||||
|
||||
exec, err := processExecutable(loc, uReader)
|
||||
if err != nil {
|
||||
log.WithFields("error", err).Warnf("unable to process executable %q", loc.RealPath)
|
||||
}
|
||||
if exec != nil {
|
||||
prog.Increment()
|
||||
results[loc.Coordinates] = *exec
|
||||
@ -92,6 +77,29 @@ func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]file.E
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func processExecutableLocation(loc file.Location, resolver file.Resolver) *file.Executable {
|
||||
reader, err := resolver.FileContentsByLocation(loc)
|
||||
if err != nil {
|
||||
// TODO: known-unknowns
|
||||
log.WithFields("error", err).Warnf("unable to get file contents for %q", loc.RealPath)
|
||||
return nil
|
||||
}
|
||||
defer internal.CloseAndLogError(reader, loc.RealPath)
|
||||
|
||||
uReader, err := unionreader.GetUnionReader(reader)
|
||||
if err != nil {
|
||||
// TODO: known-unknowns
|
||||
log.WithFields("error", err).Warnf("unable to get union reader for %q", loc.RealPath)
|
||||
return nil
|
||||
}
|
||||
|
||||
exec, err := processExecutable(loc, uReader)
|
||||
if err != nil {
|
||||
log.WithFields("error", err).Warnf("unable to process executable %q", loc.RealPath)
|
||||
}
|
||||
return exec
|
||||
}
|
||||
|
||||
func catalogingProgress(locations int64) *monitor.CatalogerTaskProgress {
|
||||
info := monitor.GenericTask{
|
||||
Title: monitor.Title{
|
||||
|
||||
@ -9,6 +9,7 @@ import (
|
||||
"github.com/acobaugh/osrelease"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/anchore/go-logger"
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
@ -64,25 +65,7 @@ func IdentifyRelease(resolver file.Resolver) *Release {
|
||||
}
|
||||
|
||||
for _, location := range locations {
|
||||
contentReader, err := resolver.FileContentsByLocation(location)
|
||||
if err != nil {
|
||||
logger.WithFields("error", err, "path", location.RealPath).Trace("unable to get contents")
|
||||
continue
|
||||
}
|
||||
|
||||
content, err := io.ReadAll(contentReader)
|
||||
internal.CloseAndLogError(contentReader, location.AccessPath)
|
||||
if err != nil {
|
||||
logger.WithFields("error", err, "path", location.RealPath).Trace("unable to read contents")
|
||||
continue
|
||||
}
|
||||
|
||||
release, err := entry.fn(string(content))
|
||||
if err != nil {
|
||||
logger.WithFields("error", err, "path", location.RealPath).Trace("unable to parse contents")
|
||||
continue
|
||||
}
|
||||
|
||||
release := tryParseReleaseInfo(resolver, location, logger, entry)
|
||||
if release != nil {
|
||||
return release
|
||||
}
|
||||
@ -92,6 +75,29 @@ func IdentifyRelease(resolver file.Resolver) *Release {
|
||||
return nil
|
||||
}
|
||||
|
||||
func tryParseReleaseInfo(resolver file.Resolver, location file.Location, logger logger.Logger, entry parseEntry) *Release {
|
||||
contentReader, err := resolver.FileContentsByLocation(location)
|
||||
if err != nil {
|
||||
logger.WithFields("error", err, "path", location.RealPath).Trace("unable to get contents")
|
||||
return nil
|
||||
}
|
||||
defer internal.CloseAndLogError(contentReader, location.AccessPath)
|
||||
|
||||
content, err := io.ReadAll(contentReader)
|
||||
if err != nil {
|
||||
logger.WithFields("error", err, "path", location.RealPath).Trace("unable to read contents")
|
||||
return nil
|
||||
}
|
||||
|
||||
release, err := entry.fn(string(content))
|
||||
if err != nil {
|
||||
logger.WithFields("error", err, "path", location.RealPath).Trace("unable to parse contents")
|
||||
return nil
|
||||
}
|
||||
|
||||
return release
|
||||
}
|
||||
|
||||
func parseOsRelease(contents string) (*Release, error) {
|
||||
values, err := osrelease.ReadString(contents)
|
||||
if err != nil {
|
||||
|
||||
@ -156,6 +156,7 @@ func findReleases(resolver file.Resolver, dbPath string) []linux.Release {
|
||||
log.Tracef("unable to fetch contents for APK repositories file %q: %+v", reposLocation, err)
|
||||
return nil
|
||||
}
|
||||
defer internal.CloseAndLogError(reposReader, location.RealPath)
|
||||
|
||||
return parseReleasesFromAPKRepository(file.LocationReadCloser{
|
||||
Location: location,
|
||||
|
||||
@ -14,6 +14,7 @@ import (
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"github.com/vbatts/go-mtree"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
@ -132,6 +133,7 @@ func getFileReader(path string, resolver file.Resolver) (io.Reader, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer internal.CloseAndLogError(dbContentReader, locs[0].RealPath)
|
||||
return dbContentReader, nil
|
||||
}
|
||||
|
||||
|
||||
@ -17,7 +17,6 @@ import (
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/cpe"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/internal/unionreader"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
)
|
||||
|
||||
@ -231,14 +230,10 @@ func getContents(resolver file.Resolver, location file.Location) ([]byte, error)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
unionReader, err := unionreader.GetUnionReader(reader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get union reader for file: %w", err)
|
||||
}
|
||||
defer internal.CloseAndLogError(reader, location.AccessPath)
|
||||
|
||||
// TODO: there may be room for improvement here, as this may use an excessive amount of memory. Alternate approach is to leverage a RuneReader.
|
||||
contents, err := io.ReadAll(unionReader)
|
||||
contents, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get contents for file: %w", err)
|
||||
}
|
||||
|
||||
@ -265,8 +265,9 @@ func fetchCopyrightContents(resolver file.Resolver, dbLocation file.Location, m
|
||||
|
||||
reader, err := resolver.FileContentsByLocation(*location)
|
||||
if err != nil {
|
||||
log.Warnf("failed to fetch deb copyright contents (package=%s): %w", m.Package, err)
|
||||
log.Warnf("failed to fetch deb copyright contents (package=%s): %s", m.Package, err)
|
||||
}
|
||||
defer internal.CloseAndLogError(reader, location.RealPath)
|
||||
|
||||
l := location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation)
|
||||
|
||||
|
||||
@ -12,6 +12,7 @@ import (
|
||||
|
||||
"github.com/scylladb/go-set/strset"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
@ -65,6 +66,7 @@ func addFiles(resolver file.Resolver, dbLocation file.Location, p *pkg.Package)
|
||||
log.WithFields("path", dbLocation.RealPath).Warnf("failed to fetch portage contents (package=%s): %+v", p.Name, err)
|
||||
return
|
||||
}
|
||||
defer internal.CloseAndLogError(contentsReader, dbLocation.RealPath)
|
||||
|
||||
entry, ok := p.Metadata.(pkg.PortageEntry)
|
||||
if !ok {
|
||||
@ -106,6 +108,7 @@ func addLicenses(resolver file.Resolver, dbLocation file.Location, p *pkg.Packag
|
||||
log.WithFields("path", dbLocation.RealPath).Warnf("failed to fetch portage LICENSE: %+v", err)
|
||||
return
|
||||
}
|
||||
defer internal.CloseAndLogError(licenseReader, location.RealPath)
|
||||
|
||||
findings := strset.New()
|
||||
scanner := bufio.NewScanner(licenseReader)
|
||||
@ -141,6 +144,7 @@ func addSize(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) {
|
||||
log.WithFields("name", p.Name).Warnf("failed to fetch portage SIZE: %+v", err)
|
||||
return
|
||||
}
|
||||
defer internal.CloseAndLogError(sizeReader, location.RealPath)
|
||||
|
||||
scanner := bufio.NewScanner(sizeReader)
|
||||
for scanner.Scan() {
|
||||
|
||||
@ -20,6 +20,7 @@ import (
|
||||
"github.com/go-git/go-git/v5/storage/memory"
|
||||
"github.com/scylladb/go-set/strset"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/internal/licenses"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
@ -160,12 +161,25 @@ func (c *goLicenses) findLicenses(resolver file.Resolver, globMatch string) (out
|
||||
}
|
||||
|
||||
for _, l := range locations {
|
||||
parsed, err := c.parseLicenseFromLocation(l, resolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
out = append(out, parsed...)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (c *goLicenses) parseLicenseFromLocation(l file.Location, resolver file.Resolver) ([]pkg.License, error) {
|
||||
var out []pkg.License
|
||||
fileName := path.Base(l.RealPath)
|
||||
if c.lowerLicenseFileNames.Has(strings.ToLower(fileName)) {
|
||||
contents, err := resolver.FileContentsByLocation(l)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer internal.CloseAndLogError(contents, l.RealPath)
|
||||
parsed, err := licenses.Parse(contents, l)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -173,9 +187,7 @@ func (c *goLicenses) findLicenses(resolver file.Resolver, globMatch string) (out
|
||||
|
||||
out = append(out, parsed...)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func moduleDir(moduleName, moduleVersion string) string {
|
||||
|
||||
@ -10,6 +10,7 @@ import (
|
||||
|
||||
"golang.org/x/mod/modfile"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
@ -119,6 +120,7 @@ func parseGoSumFile(resolver file.Resolver, reader file.LocationReadCloser) (map
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer internal.CloseAndLogError(contents, goSumLocation.AccessPath)
|
||||
|
||||
// go.sum has the format like:
|
||||
// github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
|
||||
@ -580,20 +580,27 @@ func (c *nativeImageCataloger) Catalog(_ context.Context, resolver file.Resolver
|
||||
}
|
||||
|
||||
for _, location := range fileMatches {
|
||||
readerCloser, err := resolver.FileContentsByLocation(location)
|
||||
if err != nil {
|
||||
log.Debugf("error opening file: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
reader, err := unionreader.GetUnionReader(readerCloser)
|
||||
newPkgs, err := processLocation(location, resolver)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
newPkgs := fetchPkgs(reader, location.RealPath)
|
||||
pkgs = append(pkgs, newPkgs...)
|
||||
internal.CloseAndLogError(readerCloser, location.RealPath)
|
||||
}
|
||||
|
||||
return pkgs, nil, nil
|
||||
}
|
||||
|
||||
func processLocation(location file.Location, resolver file.Resolver) ([]pkg.Package, error) {
|
||||
readerCloser, err := resolver.FileContentsByLocation(location)
|
||||
if err != nil {
|
||||
log.Debugf("error opening file: %v", err)
|
||||
return nil, nil
|
||||
}
|
||||
defer internal.CloseAndLogError(readerCloser, location.RealPath)
|
||||
|
||||
reader, err := unionreader.GetUnionReader(readerCloser)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return fetchPkgs(reader, location.RealPath), nil
|
||||
}
|
||||
|
||||
@ -11,6 +11,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/anchore/packageurl-go"
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
@ -253,35 +254,43 @@ func addLicenses(name string, resolver file.Resolver, location file.Location) (a
|
||||
}
|
||||
|
||||
for _, l := range locations {
|
||||
licenses, err := parseLicensesFromLocation(l, resolver, pkgFile)
|
||||
if err != nil {
|
||||
return allLicenses
|
||||
}
|
||||
allLicenses = append(allLicenses, licenses...)
|
||||
}
|
||||
|
||||
return allLicenses
|
||||
}
|
||||
|
||||
func parseLicensesFromLocation(l file.Location, resolver file.Resolver, pkgFile string) ([]string, error) {
|
||||
contentReader, err := resolver.FileContentsByLocation(l)
|
||||
if err != nil {
|
||||
log.Debugf("error getting file content reader for %s: %v", pkgFile, err)
|
||||
return allLicenses
|
||||
return nil, err
|
||||
}
|
||||
defer internal.CloseAndLogError(contentReader, l.RealPath)
|
||||
|
||||
contents, err := io.ReadAll(contentReader)
|
||||
if err != nil {
|
||||
log.Debugf("error reading file contents for %s: %v", pkgFile, err)
|
||||
return allLicenses
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pkgJSON packageJSON
|
||||
err = json.Unmarshal(contents, &pkgJSON)
|
||||
if err != nil {
|
||||
log.Debugf("error parsing %s: %v", pkgFile, err)
|
||||
return allLicenses
|
||||
return nil, err
|
||||
}
|
||||
|
||||
licenses, err := pkgJSON.licensesFromJSON()
|
||||
if err != nil {
|
||||
log.Debugf("error getting licenses from %s: %v", pkgFile, err)
|
||||
return allLicenses
|
||||
return nil, err
|
||||
}
|
||||
|
||||
allLicenses = append(allLicenses, licenses...)
|
||||
}
|
||||
|
||||
return allLicenses
|
||||
return licenses, nil
|
||||
}
|
||||
|
||||
// packageURL returns the PURL for the specific NPM package (see https://github.com/package-url/purl-spec)
|
||||
|
||||
15
test/rules/rules.go
Normal file
15
test/rules/rules.go
Normal file
@ -0,0 +1,15 @@
|
||||
//go:build gorules
|
||||
|
||||
package rules
|
||||
|
||||
import "github.com/quasilyte/go-ruleguard/dsl"
|
||||
|
||||
// nolint:unused
|
||||
func resourceCleanup(m dsl.Matcher) {
|
||||
m.Match(`$res, $err := $resolver.FileContentsByLocation($loc); if $*_ { $*_ }; $next`).
|
||||
Where(m["res"].Type.Implements(`io.Closer`) &&
|
||||
m["err"].Type.Implements(`error`) &&
|
||||
m["res"].Type.Implements(`io.Closer`) &&
|
||||
!m["next"].Text.Matches(`defer internal.CloseAndLogError`)).
|
||||
Report(`please call "defer internal.CloseAndLogError($res, $loc.RealPath)" right after checking the error returned from $resolver.FileContentsByLocation.`)
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user