plumb context through catalogers (#2528)

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
This commit is contained in:
Alex Goodman 2024-01-22 15:54:51 -05:00 committed by GitHub
parent c5d15d1d6c
commit cdad5e767a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
78 changed files with 226 additions and 144 deletions

View File

@ -30,7 +30,7 @@ func NewFileDigestCatalogerTask(selection file.Selection, hashers ...crypto.Hash
return nil
}
result, err := digestsCataloger.Catalog(resolver, coordinates...)
result, err := digestsCataloger.Catalog(ctx, resolver, coordinates...)
if err != nil {
return fmt.Errorf("unable to catalog file digests: %w", err)
}
@ -60,7 +60,7 @@ func NewFileMetadataCatalogerTask(selection file.Selection) Task {
return nil
}
result, err := metadataCataloger.Catalog(resolver, coordinates...)
result, err := metadataCataloger.Catalog(ctx, resolver, coordinates...)
if err != nil {
return err
}
@ -85,7 +85,7 @@ func NewFileContentCatalogerTask(cfg filecontent.Config) Task {
fn := func(ctx context.Context, resolver file.Resolver, builder sbomsync.Builder) error {
accessor := builder.(sbomsync.Accessor)
result, err := cat.Catalog(resolver)
result, err := cat.Catalog(ctx, resolver)
if err != nil {
return err
}

View File

@ -100,7 +100,7 @@ func NewPackageTask(cfg CatalogingFactoryConfig, c pkg.Cataloger, tags ...string
t := bus.StartCatalogerTask(info, -1, "")
pkgs, relationships, err := c.Catalog(resolver)
pkgs, relationships, err := c.Catalog(ctx, resolver)
if err != nil {
return fmt.Errorf("unable to catalog packages with %q: %w", c.Name(), err)
}

View File

@ -1,6 +1,7 @@
package syft
import (
"context"
"sort"
"testing"
@ -34,7 +35,7 @@ func (d dummyCataloger) Name() string {
return d.name
}
func (d dummyCataloger) Catalog(_ file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
func (d dummyCataloger) Catalog(_ context.Context, _ file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
return nil, nil, nil
}

View File

@ -2,6 +2,7 @@ package filecontent
import (
"bytes"
"context"
"encoding/base64"
"fmt"
"io"
@ -42,7 +43,7 @@ func NewCataloger(cfg Config) *Cataloger {
}
}
func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]string, error) {
func (i *Cataloger) Catalog(_ context.Context, resolver file.Resolver) (map[file.Coordinates]string, error) {
results := make(map[file.Coordinates]string)
var locations []file.Location

View File

@ -1,6 +1,7 @@
package filecontent
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
@ -73,7 +74,7 @@ func TestContentsCataloger(t *testing.T) {
})
resolver := file.NewMockResolverForPaths(test.files...)
actual, err := c.Catalog(resolver)
actual, err := c.Catalog(context.Background(), resolver)
assert.NoError(t, err)
assert.Equal(t, test.expected, actual, "mismatched contents")

View File

@ -1,6 +1,7 @@
package filedigest
import (
"context"
"crypto"
"errors"
"fmt"
@ -29,12 +30,12 @@ func NewCataloger(hashes []crypto.Hash) *Cataloger {
}
}
func (i *Cataloger) Catalog(resolver file.Resolver, coordinates ...file.Coordinates) (map[file.Coordinates][]file.Digest, error) {
func (i *Cataloger) Catalog(ctx context.Context, resolver file.Resolver, coordinates ...file.Coordinates) (map[file.Coordinates][]file.Digest, error) {
results := make(map[file.Coordinates][]file.Digest)
var locations []file.Location
if len(coordinates) == 0 {
locations = intCataloger.AllRegularFiles(resolver)
locations = intCataloger.AllRegularFiles(ctx, resolver)
} else {
for _, c := range coordinates {
locs, err := resolver.FilesByPath(c.RealPath)

View File

@ -1,6 +1,7 @@
package filedigest
import (
"context"
"crypto"
"fmt"
"io"
@ -82,7 +83,7 @@ func TestDigestsCataloger(t *testing.T) {
resolver, err := src.FileResolver(source.SquashedScope)
require.NoError(t, err)
actual, err := c.Catalog(resolver)
actual, err := c.Catalog(context.Background(), resolver)
require.NoError(t, err)
assert.Equal(t, test.expected, actual, "mismatched digests")
@ -138,7 +139,7 @@ func TestDigestsCataloger_MixFileTypes(t *testing.T) {
t.Run(test.path, func(t *testing.T) {
c := NewCataloger([]crypto.Hash{crypto.MD5})
actual, err := c.Catalog(resolver)
actual, err := c.Catalog(context.Background(), resolver)
if err != nil {
t.Fatalf("could not catalog: %+v", err)
}
@ -195,7 +196,7 @@ func TestFileDigestCataloger_GivenCoordinates(t *testing.T) {
// note: an important difference between this test and the previous is that this test is using a list
// of specific coordinates to catalog
actual, err := c.Catalog(resolver, l.Coordinates)
actual, err := c.Catalog(context.Background(), resolver, l.Coordinates)
require.NoError(t, err)
require.Len(t, actual, 1)

View File

@ -19,10 +19,10 @@ func NewCataloger() *Cataloger {
return &Cataloger{}
}
func (i *Cataloger) Catalog(resolver file.Resolver, coordinates ...file.Coordinates) (map[file.Coordinates]file.Metadata, error) {
func (i *Cataloger) Catalog(ctx context.Context, resolver file.Resolver, coordinates ...file.Coordinates) (map[file.Coordinates]file.Metadata, error) {
results := make(map[file.Coordinates]file.Metadata)
var locations <-chan file.Location
ctx, cancel := context.WithCancel(context.TODO())
ctx, cancel := context.WithCancel(ctx)
defer cancel()
if len(coordinates) == 0 {
locations = resolver.AllLocations(ctx)

View File

@ -1,6 +1,7 @@
package filemetadata
import (
"context"
"os"
"testing"
@ -26,7 +27,7 @@ func TestFileMetadataCataloger(t *testing.T) {
resolver, err := src.FileResolver(source.SquashedScope)
require.NoError(t, err)
actual, err := c.Catalog(resolver)
actual, err := c.Catalog(context.Background(), resolver)
require.NoError(t, err)
tests := []struct {
@ -196,7 +197,7 @@ func TestFileMetadataCataloger_GivenCoordinates(t *testing.T) {
// note: an important difference between this test and the previous is that this test is using a list
// of specific coordinates to catalog
actual, err := c.Catalog(resolver, l.Coordinates)
actual, err := c.Catalog(context.Background(), resolver, l.Coordinates)
require.NoError(t, err)
require.Len(t, actual, 1)

View File

@ -8,8 +8,8 @@ import (
"github.com/anchore/syft/syft/file"
)
func AllRegularFiles(resolver file.Resolver) (locations []file.Location) {
ctx, cancel := context.WithCancel(context.Background())
func AllRegularFiles(ctx context.Context, resolver file.Resolver) (locations []file.Location) {
ctx, cancel := context.WithCancel(ctx)
defer cancel()
for location := range resolver.AllLocations(ctx) {
resolvedLocations, err := resolver.FilesByPath(location.RealPath)

View File

@ -1,6 +1,7 @@
package internal
import (
"context"
"testing"
"github.com/google/go-cmp/cmp"
@ -54,7 +55,7 @@ func Test_allRegularFiles(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
resolver := tt.setup()
locations := AllRegularFiles(resolver)
locations := AllRegularFiles(context.Background(), resolver)
realLocations := strset.New()
virtualLocations := strset.New()
for _, l := range locations {

View File

@ -1,6 +1,8 @@
package pkg
import (
"context"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
)
@ -12,5 +14,5 @@ type Cataloger interface {
// Name returns a string that uniquely describes a cataloger
Name() string
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
Catalog(resolver file.Resolver) ([]Package, []artifact.Relationship, error)
Catalog(context.Context, file.Resolver) ([]Package, []artifact.Relationship, error)
}

View File

@ -2,6 +2,7 @@ package alpine
import (
"bufio"
"context"
"fmt"
"io"
"path"
@ -34,7 +35,7 @@ type parsedData struct {
// information on specific fields, see https://wiki.alpinelinux.org/wiki/Apk_spec.
//
//nolint:funlen,gocognit
func parseApkDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseApkDB(_ context.Context, resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
scanner := bufio.NewScanner(reader)
var apks []parsedData

View File

@ -1,6 +1,7 @@
package alpine
import (
"context"
"io"
"os"
"path/filepath"
@ -64,7 +65,7 @@ func TestExtraFileAttributes(t *testing.T) {
fixturePath := "test-fixtures/extra-file-attributes"
lrc := newLocationReadCloser(t, fixturePath)
pkgs, _, err := parseApkDB(nil, new(generic.Environment), lrc)
pkgs, _, err := parseApkDB(context.Background(), nil, new(generic.Environment), lrc)
assert.NoError(t, err)
require.Len(t, pkgs, 1)
metadata := pkgs[0].Metadata.(pkg.ApkDBEntry)
@ -1055,7 +1056,7 @@ func TestPackageDbDependenciesByParse(t *testing.T) {
require.NoError(t, err)
t.Cleanup(func() { require.NoError(t, f.Close()) })
pkgs, relationships, err := parseApkDB(nil, nil, file.LocationReadCloser{
pkgs, relationships, err := parseApkDB(context.Background(), nil, nil, file.LocationReadCloser{
Location: file.NewLocation(test.fixture),
ReadCloser: f,
})
@ -1146,7 +1147,7 @@ func Test_parseApkDB_expectedPkgNames(t *testing.T) {
fixturePath := filepath.Join("test-fixtures", test.fixture)
lrc := newLocationReadCloser(t, fixturePath)
pkgs, _, err := parseApkDB(nil, new(generic.Environment), lrc)
pkgs, _, err := parseApkDB(context.Background(), nil, new(generic.Environment), lrc)
test.wantErr(t, err)
names := toPackageNames(pkgs)

View File

@ -3,6 +3,7 @@ package arch
import (
"bufio"
"compress/gzip"
"context"
"fmt"
"io"
"path/filepath"
@ -36,7 +37,7 @@ type parsedData struct {
}
// parseAlpmDB parses the arch linux pacman database flat-files and returns the packages and relationships found within.
func parseAlpmDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseAlpmDB(_ context.Context, resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
data, err := parseAlpmDBEntry(reader)
if err != nil {
return nil, nil, err

View File

@ -4,6 +4,7 @@ Package binary provides a concrete Cataloger implementations for surfacing possi
package binary
import (
"context"
"encoding/json"
"github.com/anchore/syft/internal/log"
@ -55,7 +56,7 @@ func (c Cataloger) Name() string {
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages
// after analyzing the catalog source.
func (c Cataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
func (c Cataloger) Catalog(_ context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
var packages []pkg.Package
var relationships []artifact.Relationship

View File

@ -902,7 +902,7 @@ func Test_Cataloger_PositiveCases(t *testing.T) {
resolver, err := src.FileResolver(source.SquashedScope)
require.NoError(t, err)
packages, _, err := c.Catalog(resolver)
packages, _, err := c.Catalog(context.Background(), resolver)
require.NoError(t, err)
require.Len(t, packages, 1, "mismatched package count")
@ -942,7 +942,7 @@ func Test_Cataloger_DefaultClassifiers_PositiveCases_Image(t *testing.T) {
resolver, err := src.FileResolver(source.SquashedScope)
require.NoError(t, err)
packages, _, err := c.Catalog(resolver)
packages, _, err := c.Catalog(context.Background(), resolver)
require.NoError(t, err)
for _, p := range packages {
@ -972,7 +972,7 @@ func TestClassifierCataloger_DefaultClassifiers_NegativeCases(t *testing.T) {
resolver, err := src.FileResolver(source.SquashedScope)
assert.NoError(t, err)
actualResults, _, err := c.Catalog(resolver)
actualResults, _, err := c.Catalog(context.Background(), resolver)
assert.NoError(t, err)
assert.Equal(t, 0, len(actualResults))
}
@ -1086,7 +1086,7 @@ func Test_Cataloger_CustomClassifiers(t *testing.T) {
resolver, err := src.FileResolver(source.SquashedScope)
require.NoError(t, err)
packages, _, err := c.Catalog(resolver)
packages, _, err := c.Catalog(context.Background(), resolver)
require.NoError(t, err)
if test.expected == nil {
@ -1252,7 +1252,7 @@ func Test_Cataloger_ResilientToErrors(t *testing.T) {
c := NewCataloger(DefaultCatalogerConfig())
resolver := &panicyResolver{}
_, _, err := c.Catalog(resolver)
_, _, err := c.Catalog(context.Background(), resolver)
assert.NoError(t, err)
assert.True(t, resolver.searchCalled)
}

View File

@ -2,6 +2,7 @@ package cpp
import (
"bufio"
"context"
"errors"
"fmt"
"io"
@ -20,7 +21,7 @@ type Conanfile struct {
}
// parseConanfile is a parser function for conanfile.txt contents, returning all packages discovered.
func parseConanfile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseConanfile(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
r := bufio.NewReader(reader)
inRequirements := false
var pkgs []pkg.Package

View File

@ -2,6 +2,7 @@ package cpp
import (
"bufio"
"context"
"errors"
"fmt"
"io"
@ -78,7 +79,7 @@ func parseFullRequiresLine(line string, reader file.LocationReadCloser, pkgs *[]
// The conaninfo.txt gives:
// - package requires (full_requires)
// - recipe revision (recipe_hash)
func parseConaninfo(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseConaninfo(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
// First set the base package info by checking the relative path
fullFilePath := string(reader.Location.LocationData.Reference().RealPath)
if len(fullFilePath) == 0 {

View File

@ -1,6 +1,7 @@
package cpp
import (
"context"
"encoding/json"
"strings"
@ -30,7 +31,7 @@ type conanLock struct {
}
// parseConanlock is a parser function for conan.lock contents, returning all packages discovered.
func parseConanlock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseConanlock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
var cl conanLock
if err := json.NewDecoder(reader).Decode(&cl); err != nil {

View File

@ -1,6 +1,7 @@
package dart
import (
"context"
"fmt"
"net/url"
"sort"
@ -55,7 +56,7 @@ func (p *pubspecLockDescription) UnmarshalYAML(value *yaml.Node) error {
return nil
}
func parsePubspecLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parsePubspecLock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
dec := yaml.NewDecoder(reader)

View File

@ -2,6 +2,7 @@ package debian
import (
"bufio"
"context"
"errors"
"fmt"
"io"
@ -25,7 +26,7 @@ var (
)
// parseDpkgDB reads a dpkg database "status" file (and surrounding data files) and returns the packages and relationships found.
func parseDpkgDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseDpkgDB(_ context.Context, resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
metadata, err := parseDpkgStatus(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to catalog dpkg DB=%q: %w", reader.RealPath, err)

View File

@ -2,6 +2,7 @@ package debian
import (
"bufio"
"context"
"errors"
"fmt"
"os"
@ -519,7 +520,7 @@ func Test_associateRelationships(t *testing.T) {
reader := file.NewLocationReadCloser(file.NewLocation(tt.fixture), f)
pkgs, relationships, err := parseDpkgDB(nil, &generic.Environment{}, reader)
pkgs, relationships, err := parseDpkgDB(context.Background(), nil, &generic.Environment{}, reader)
require.NotEmpty(t, pkgs)
require.NotEmpty(t, relationships)
require.NoError(t, err)

View File

@ -1,6 +1,7 @@
package dotnet
import (
"context"
"encoding/json"
"fmt"
"sort"
@ -38,7 +39,7 @@ type dotnetDepsLibrary struct {
}
//nolint:funlen
func parseDotnetDeps(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseDotnetDeps(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
var pkgMap = make(map[string]pkg.Package)
var relationships []artifact.Relationship

View File

@ -1,6 +1,7 @@
package dotnet
import (
"context"
"fmt"
"io"
"regexp"
@ -18,7 +19,7 @@ import (
var _ generic.Parser = parseDotnetPortableExecutable
func parseDotnetPortableExecutable(_ file.Resolver, _ *generic.Environment, f file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseDotnetPortableExecutable(_ context.Context, _ file.Resolver, _ *generic.Environment, f file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
by, err := io.ReadAll(f)
if err != nil {
return nil, nil, fmt.Errorf("unable to read file: %w", err)

View File

@ -2,6 +2,7 @@ package elixir
import (
"bufio"
"context"
"errors"
"fmt"
"io"
@ -20,7 +21,7 @@ var _ generic.Parser = parseMixLock
var mixLockDelimiter = regexp.MustCompile(`[%{}\n" ,:]+`)
// parseMixLock parses a mix.lock and returns the discovered Elixir packages.
func parseMixLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseMixLock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
r := bufio.NewReader(reader)
var packages []pkg.Package

View File

@ -1,6 +1,8 @@
package erlang
import (
"context"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
@ -11,7 +13,7 @@ import (
// parseRebarLock parses a rebar.lock and returns the discovered Elixir packages.
//
//nolint:funlen
func parseRebarLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseRebarLock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
doc, err := parseErlang(reader)
if err != nil {
return nil, nil, err

View File

@ -1,6 +1,8 @@
package generic
import (
"context"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/artifact"
@ -104,7 +106,7 @@ func (c *Cataloger) Name() string {
}
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
func (c *Cataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
func (c *Cataloger) Catalog(ctx context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
var packages []pkg.Package
var relationships []artifact.Relationship
@ -126,7 +128,7 @@ func (c *Cataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.R
continue
}
discoveredPackages, discoveredRelationships, err := parser(resolver, &env, file.NewLocationReadCloser(location, contentReader))
discoveredPackages, discoveredRelationships, err := parser(ctx, resolver, &env, file.NewLocationReadCloser(location, contentReader))
internal.CloseAndLogError(contentReader, location.AccessPath)
if err != nil {
logger.WithFields("location", location.RealPath, "error", err).Warnf("cataloger failed")

View File

@ -1,6 +1,7 @@
package generic
import (
"context"
"fmt"
"io"
"testing"
@ -15,7 +16,7 @@ import (
func Test_Cataloger(t *testing.T) {
allParsedPaths := make(map[string]bool)
parser := func(resolver file.Resolver, env *Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
parser := func(_ context.Context, resolver file.Resolver, env *Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
allParsedPaths[reader.Path()] = true
contents, err := io.ReadAll(reader)
require.NoError(t, err)
@ -45,7 +46,7 @@ func Test_Cataloger(t *testing.T) {
WithParserByPath(parser, "test-fixtures/another-path.txt", "test-fixtures/last/path.txt").
WithParserByGlobs(parser, "**/a-path.txt", "**/empty.txt")
actualPkgs, relationships, err := cataloger.Catalog(resolver)
actualPkgs, relationships, err := cataloger.Catalog(context.Background(), resolver)
assert.NoError(t, err)
expectedPkgs := make(map[string]pkg.Package)

View File

@ -1,6 +1,8 @@
package generic
import (
"context"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/linux"
@ -11,4 +13,4 @@ type Environment struct {
LinuxRelease *linux.Release
}
type Parser func(file.Resolver, *Environment, file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error)
type Parser func(context.Context, file.Resolver, *Environment, file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error)

View File

@ -2,6 +2,7 @@ package gentoo
import (
"bufio"
"context"
"fmt"
"path"
"path/filepath"
@ -24,7 +25,7 @@ var (
)
// parses individual CONTENTS files from the portage flat-file store (e.g. /var/db/pkg/*/*/CONTENTS).
func parsePortageContents(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parsePortageContents(_ context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
cpvMatch := cpvRe.FindStringSubmatch(reader.Location.RealPath)
if cpvMatch == nil {
return nil, nil, fmt.Errorf("failed to match package and version in %s", reader.Location.RealPath)

View File

@ -1,6 +1,7 @@
package githubactions
import (
"context"
"fmt"
"io"
@ -22,7 +23,7 @@ type compositeActionRunsDef struct {
Steps []stepDef `yaml:"steps"`
}
func parseCompositeActionForActionUsage(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseCompositeActionForActionUsage(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
contents, err := io.ReadAll(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to read yaml composite action file: %w", err)

View File

@ -1,6 +1,7 @@
package githubactions
import (
"context"
"fmt"
"io"
@ -35,7 +36,7 @@ type stepDef struct {
} `yaml:"with"`
}
func parseWorkflowForWorkflowUsage(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseWorkflowForWorkflowUsage(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
contents, err := io.ReadAll(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to read yaml workflow file: %w", err)
@ -61,7 +62,7 @@ func parseWorkflowForWorkflowUsage(_ file.Resolver, _ *generic.Environment, read
return pkgs.Sorted(), nil, nil
}
func parseWorkflowForActionUsage(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseWorkflowForActionUsage(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
contents, err := io.ReadAll(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to read yaml workflow file: %w", err)

View File

@ -4,6 +4,7 @@ Package golang provides a concrete Cataloger implementation relating to packages
package golang
import (
"context"
"fmt"
"regexp"
"strings"
@ -53,8 +54,8 @@ func (p *progressingCataloger) Name() string {
return p.cataloger.Name()
}
func (p *progressingCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
pkgs, relationships, err := p.cataloger.Catalog(resolver)
func (p *progressingCataloger) Catalog(ctx context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
pkgs, relationships, err := p.cataloger.Catalog(ctx, resolver)
goCompilerPkgs := []pkg.Package{}
totalLocations := file.NewLocationSet()
for _, goPkg := range pkgs {

View File

@ -2,6 +2,7 @@ package golang
import (
"bytes"
"context"
"debug/elf"
"debug/macho"
"debug/pe"
@ -48,7 +49,7 @@ type goBinaryCataloger struct {
}
// parseGoBinary catalogs packages found in the "buildinfo" section of a binary built by the go compiler.
func (c *goBinaryCataloger) parseGoBinary(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func (c *goBinaryCataloger) parseGoBinary(_ context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
unionReader, err := unionreader.GetUnionReader(reader.ReadCloser)

View File

@ -2,6 +2,7 @@ package golang
import (
"bufio"
"context"
"fmt"
"io"
"sort"
@ -23,7 +24,7 @@ type goModCataloger struct {
// parseGoModFile takes a go.mod and lists all packages discovered.
//
//nolint:funlen
func (c *goModCataloger) parseGoModFile(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func (c *goModCataloger) parseGoModFile(_ context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
packages := make(map[string]pkg.Package)
contents, err := io.ReadAll(reader)

View File

@ -2,6 +2,7 @@ package haskell
import (
"bufio"
"context"
"errors"
"fmt"
"io"
@ -16,7 +17,7 @@ import (
var _ generic.Parser = parseCabalFreeze
// parseCabalFreeze is a parser function for cabal.project.freeze contents, returning all packages discovered.
func parseCabalFreeze(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseCabalFreeze(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
r := bufio.NewReader(reader)
var pkgs []pkg.Package
for {

View File

@ -1,6 +1,7 @@
package haskell
import (
"context"
"fmt"
"io"
"strings"
@ -39,7 +40,7 @@ type completedSnapshot struct {
}
// parseStackLock is a parser function for stack.yaml.lock contents, returning all packages discovered.
func parseStackLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseStackLock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
bytes, err := io.ReadAll(reader)
if err != nil {
return nil, nil, fmt.Errorf("failed to load stack.yaml.lock file: %w", err)

View File

@ -1,6 +1,7 @@
package haskell
import (
"context"
"fmt"
"io"
@ -20,7 +21,7 @@ type stackYaml struct {
}
// parseStackYaml is a parser function for stack.yaml contents, returning all packages discovered.
func parseStackYaml(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseStackYaml(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
bytes, err := io.ReadAll(reader)
if err != nil {
return nil, nil, fmt.Errorf("failed to load stack.yaml file: %w", err)

View File

@ -1,6 +1,7 @@
package pkgtest
import (
"context"
"fmt"
"io"
"os"
@ -221,7 +222,7 @@ func (p *CatalogTester) IgnoreUnfulfilledPathResponses(paths ...string) *Catalog
func (p *CatalogTester) TestParser(t *testing.T, parser generic.Parser) {
t.Helper()
pkgs, relationships, err := parser(p.resolver, p.env, p.reader)
pkgs, relationships, err := parser(context.Background(), p.resolver, p.env, p.reader)
p.wantErr(t, err)
p.assertPkgs(t, pkgs, relationships)
}
@ -231,7 +232,7 @@ func (p *CatalogTester) TestCataloger(t *testing.T, cataloger pkg.Cataloger) {
resolver := NewObservingResolver(p.resolver)
pkgs, relationships, err := cataloger.Catalog(resolver)
pkgs, relationships, err := cataloger.Catalog(context.Background(), resolver)
// this is a minimum set, the resolver may return more that just this list
for _, path := range p.expectedPathResponses {

View File

@ -1,6 +1,7 @@
package java
import (
"context"
"crypto"
"fmt"
"os"
@ -59,14 +60,14 @@ func newGenericArchiveParserAdapter(cfg ArchiveCatalogerConfig) genericArchivePa
}
// parseJavaArchive is a parser function for java archive contents, returning all Java libraries and nested archives.
func (gap genericArchiveParserAdapter) parseJavaArchive(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func (gap genericArchiveParserAdapter) parseJavaArchive(ctx context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
parser, cleanupFn, err := newJavaArchiveParser(reader, true, gap.cfg)
// note: even on error, we should always run cleanup functions
defer cleanupFn()
if err != nil {
return nil, nil, err
}
return parser.parse()
return parser.parse(ctx)
}
// uniquePkgKey creates a unique string to identify the given package.
@ -106,19 +107,19 @@ func newJavaArchiveParser(reader file.LocationReadCloser, detectNested bool, cfg
}
// parse the loaded archive and return all packages found.
func (j *archiveParser) parse() ([]pkg.Package, []artifact.Relationship, error) {
func (j *archiveParser) parse(ctx context.Context) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
var relationships []artifact.Relationship
// find the parent package from the java manifest
parentPkg, err := j.discoverMainPackage()
parentPkg, err := j.discoverMainPackage(ctx)
if err != nil {
return nil, nil, fmt.Errorf("could not generate package from %s: %w", j.location, err)
}
// find aux packages from pom.properties/pom.xml and potentially modify the existing parentPkg
// NOTE: we cannot generate sha1 digests from packages discovered via pom.properties/pom.xml
auxPkgs, err := j.discoverPkgsFromAllMavenFiles(parentPkg)
auxPkgs, err := j.discoverPkgsFromAllMavenFiles(ctx, parentPkg)
if err != nil {
return nil, nil, err
}
@ -126,7 +127,7 @@ func (j *archiveParser) parse() ([]pkg.Package, []artifact.Relationship, error)
if j.detectNested {
// find nested java archive packages
nestedPkgs, nestedRelationships, err := j.discoverPkgsFromNestedArchives(parentPkg)
nestedPkgs, nestedRelationships, err := j.discoverPkgsFromNestedArchives(ctx, parentPkg)
if err != nil {
return nil, nil, err
}
@ -156,7 +157,7 @@ func (j *archiveParser) parse() ([]pkg.Package, []artifact.Relationship, error)
}
// discoverMainPackage parses the root Java manifest used as the parent package to all discovered nested packages.
func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) {
func (j *archiveParser) discoverMainPackage(ctx context.Context) (*pkg.Package, error) {
// search and parse java manifest files
manifestMatches := j.fileManifest.GlobMatch(false, manifestGlob)
if len(manifestMatches) > 1 {
@ -186,7 +187,7 @@ func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) {
return nil, err
}
licenses, name, version, err := j.parseLicenses(manifest)
licenses, name, version, err := j.parseLicenses(ctx, manifest)
if err != nil {
return nil, err
}
@ -209,7 +210,7 @@ func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) {
}, nil
}
func (j *archiveParser) parseLicenses(manifest *pkg.JavaManifest) ([]pkg.License, string, string, error) {
func (j *archiveParser) parseLicenses(ctx context.Context, manifest *pkg.JavaManifest) ([]pkg.License, string, string, error) {
// we use j.location because we want to associate the license declaration with where we discovered the contents in the manifest
// TODO: when we support locations of paths within archives we should start passing the specific manifest location object instead of the top jar
licenses := pkg.NewLicensesFromLocation(j.location, selectLicenses(manifest)...)
@ -220,7 +221,7 @@ func (j *archiveParser) parseLicenses(manifest *pkg.JavaManifest) ([]pkg.License
3. manifest
4. filename
*/
name, version, pomLicenses := j.guessMainPackageNameAndVersionFromPomInfo()
name, version, pomLicenses := j.guessMainPackageNameAndVersionFromPomInfo(ctx)
if name == "" {
name = selectName(manifest, j.fileInfo)
}
@ -246,24 +247,24 @@ func (j *archiveParser) parseLicenses(manifest *pkg.JavaManifest) ([]pkg.License
// If we didn't find any licenses in the archive so far, we'll try again in Maven Central using groupIDFromJavaMetadata
if len(licenses) == 0 && j.cfg.UseNetwork {
licenses = findLicenseFromJavaMetadata(name, manifest, version, j, licenses)
licenses = findLicenseFromJavaMetadata(ctx, name, manifest, version, j, licenses)
}
return licenses, name, version, nil
}
func findLicenseFromJavaMetadata(name string, manifest *pkg.JavaManifest, version string, j *archiveParser, licenses []pkg.License) []pkg.License {
func findLicenseFromJavaMetadata(ctx context.Context, name string, manifest *pkg.JavaManifest, version string, j *archiveParser, licenses []pkg.License) []pkg.License {
var groupID = name
if gID := groupIDFromJavaMetadata(name, pkg.JavaArchive{Manifest: manifest}); gID != "" {
groupID = gID
}
pomLicenses := recursivelyFindLicensesFromParentPom(groupID, name, version, j.cfg)
pomLicenses := recursivelyFindLicensesFromParentPom(ctx, groupID, name, version, j.cfg)
if len(pomLicenses) == 0 {
// Try removing the last part of the groupId, as sometimes it duplicates the artifactId
packages := strings.Split(groupID, ".")
groupID = strings.Join(packages[:len(packages)-1], ".")
pomLicenses = recursivelyFindLicensesFromParentPom(groupID, name, version, j.cfg)
pomLicenses = recursivelyFindLicensesFromParentPom(ctx, groupID, name, version, j.cfg)
}
if len(pomLicenses) > 0 {
@ -280,7 +281,7 @@ type parsedPomProject struct {
Licenses []pkg.License
}
func (j *archiveParser) guessMainPackageNameAndVersionFromPomInfo() (name, version string, licenses []pkg.License) {
func (j *archiveParser) guessMainPackageNameAndVersionFromPomInfo(ctx context.Context) (name, version string, licenses []pkg.License) {
pomPropertyMatches := j.fileManifest.GlobMatch(false, pomPropertiesGlob)
pomMatches := j.fileManifest.GlobMatch(false, pomXMLGlob)
var pomPropertiesObject pkg.JavaPomProperties
@ -311,14 +312,14 @@ func (j *archiveParser) guessMainPackageNameAndVersionFromPomInfo() (name, versi
if j.cfg.UseNetwork {
if pomProjectObject == nil {
// If we have no pom.xml, check maven central using pom.properties
parentLicenses := recursivelyFindLicensesFromParentPom(pomPropertiesObject.GroupID, pomPropertiesObject.ArtifactID, pomPropertiesObject.Version, j.cfg)
parentLicenses := recursivelyFindLicensesFromParentPom(ctx, pomPropertiesObject.GroupID, pomPropertiesObject.ArtifactID, pomPropertiesObject.Version, j.cfg)
if len(parentLicenses) > 0 {
for _, licenseName := range parentLicenses {
licenses = append(licenses, pkg.NewLicenseFromFields(licenseName, "", nil))
}
}
} else {
findPomLicenses(pomProjectObject, j.cfg)
findPomLicenses(ctx, pomProjectObject, j.cfg)
}
}
@ -336,10 +337,11 @@ func artifactIDMatchesFilename(artifactID, fileName string) bool {
return strings.HasPrefix(artifactID, fileName) || strings.HasSuffix(fileName, artifactID)
}
func findPomLicenses(pomProjectObject *parsedPomProject, cfg ArchiveCatalogerConfig) {
func findPomLicenses(ctx context.Context, pomProjectObject *parsedPomProject, cfg ArchiveCatalogerConfig) {
// If we don't have any licenses until now, and if we have a parent Pom, then we'll check the parent pom in maven central for licenses.
if pomProjectObject != nil && pomProjectObject.Parent != nil && len(pomProjectObject.Licenses) == 0 {
parentLicenses := recursivelyFindLicensesFromParentPom(
ctx,
pomProjectObject.Parent.GroupID,
pomProjectObject.Parent.ArtifactID,
pomProjectObject.Parent.Version,
@ -357,7 +359,7 @@ func findPomLicenses(pomProjectObject *parsedPomProject, cfg ArchiveCatalogerCon
// parent package, returning all listed Java packages found for each pom
// properties discovered and potentially updating the given parentPkg with new
// data.
func (j *archiveParser) discoverPkgsFromAllMavenFiles(parentPkg *pkg.Package) ([]pkg.Package, error) {
func (j *archiveParser) discoverPkgsFromAllMavenFiles(ctx context.Context, parentPkg *pkg.Package) ([]pkg.Package, error) {
if parentPkg == nil {
return nil, nil
}
@ -382,7 +384,7 @@ func (j *archiveParser) discoverPkgsFromAllMavenFiles(parentPkg *pkg.Package) ([
pomProject = proj
}
pkgFromPom := newPackageFromMavenData(propertiesObj, pomProject, parentPkg, j.location, j.cfg)
pkgFromPom := newPackageFromMavenData(ctx, propertiesObj, pomProject, parentPkg, j.location, j.cfg)
if pkgFromPom != nil {
pkgs = append(pkgs, *pkgFromPom)
}
@ -439,30 +441,30 @@ func (j *archiveParser) getLicenseFromFileInArchive() ([]pkg.License, error) {
return fileLicenses, nil
}
func (j *archiveParser) discoverPkgsFromNestedArchives(parentPkg *pkg.Package) ([]pkg.Package, []artifact.Relationship, error) {
func (j *archiveParser) discoverPkgsFromNestedArchives(ctx context.Context, parentPkg *pkg.Package) ([]pkg.Package, []artifact.Relationship, error) {
// we know that all java archives are zip formatted files, so we can use the shared zip helper
return discoverPkgsFromZip(j.location, j.archivePath, j.contentPath, j.fileManifest, parentPkg, j.cfg)
return discoverPkgsFromZip(ctx, j.location, j.archivePath, j.contentPath, j.fileManifest, parentPkg, j.cfg)
}
// discoverPkgsFromZip finds Java archives within Java archives, returning all listed Java packages found and
// associating each discovered package to the given parent package.
func discoverPkgsFromZip(location file.Location, archivePath, contentPath string, fileManifest intFile.ZipFileManifest, parentPkg *pkg.Package, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
func discoverPkgsFromZip(ctx context.Context, location file.Location, archivePath, contentPath string, fileManifest intFile.ZipFileManifest, parentPkg *pkg.Package, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
// search and parse pom.properties files & fetch the contents
openers, err := intFile.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(false, archiveFormatGlobs...)...)
if err != nil {
return nil, nil, fmt.Errorf("unable to extract files from zip: %w", err)
}
return discoverPkgsFromOpeners(location, openers, parentPkg, cfg)
return discoverPkgsFromOpeners(ctx, location, openers, parentPkg, cfg)
}
// discoverPkgsFromOpeners finds Java archives within the given files and associates them with the given parent package.
func discoverPkgsFromOpeners(location file.Location, openers map[string]intFile.Opener, parentPkg *pkg.Package, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
func discoverPkgsFromOpeners(ctx context.Context, location file.Location, openers map[string]intFile.Opener, parentPkg *pkg.Package, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
var relationships []artifact.Relationship
for pathWithinArchive, archiveOpener := range openers {
nestedPkgs, nestedRelationships, err := discoverPkgsFromOpener(location, pathWithinArchive, archiveOpener, cfg)
nestedPkgs, nestedRelationships, err := discoverPkgsFromOpener(ctx, location, pathWithinArchive, archiveOpener, cfg)
if err != nil {
log.WithFields("location", location.Path()).Warnf("unable to discover java packages from opener: %+v", err)
continue
@ -486,7 +488,7 @@ func discoverPkgsFromOpeners(location file.Location, openers map[string]intFile.
}
// discoverPkgsFromOpener finds Java archives within the given file.
func discoverPkgsFromOpener(location file.Location, pathWithinArchive string, archiveOpener intFile.Opener, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
func discoverPkgsFromOpener(ctx context.Context, location file.Location, pathWithinArchive string, archiveOpener intFile.Opener, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
archiveReadCloser, err := archiveOpener.Open()
if err != nil {
return nil, nil, fmt.Errorf("unable to open archived file from tempdir: %w", err)
@ -501,7 +503,7 @@ func discoverPkgsFromOpener(location file.Location, pathWithinArchive string, ar
nestedLocation := file.NewLocationFromCoordinates(location.Coordinates)
nestedLocation.AccessPath = nestedPath
gap := newGenericArchiveParserAdapter(cfg)
nestedPkgs, nestedRelationships, err := gap.parseJavaArchive(nil, nil, file.LocationReadCloser{
nestedPkgs, nestedRelationships, err := gap.parseJavaArchive(ctx, nil, nil, file.LocationReadCloser{
Location: nestedLocation,
ReadCloser: archiveReadCloser,
})
@ -573,7 +575,7 @@ func pomProjectByParentPath(archivePath string, location file.Location, extractP
// newPackageFromMavenData processes a single Maven POM properties for a given parent package, returning all listed Java packages found and
// associating each discovered package to the given parent package. Note the pom.xml is optional, the pom.properties is not.
func newPackageFromMavenData(pomProperties pkg.JavaPomProperties, parsedPomProject *parsedPomProject, parentPkg *pkg.Package, location file.Location, cfg ArchiveCatalogerConfig) *pkg.Package {
func newPackageFromMavenData(ctx context.Context, pomProperties pkg.JavaPomProperties, parsedPomProject *parsedPomProject, parentPkg *pkg.Package, location file.Location, cfg ArchiveCatalogerConfig) *pkg.Package {
// keep the artifact name within the virtual path if this package does not match the parent package
vPathSuffix := ""
groupID := ""
@ -601,14 +603,14 @@ func newPackageFromMavenData(pomProperties pkg.JavaPomProperties, parsedPomProje
if cfg.UseNetwork {
if parsedPomProject == nil {
// If we have no pom.xml, check maven central using pom.properties
parentLicenses := recursivelyFindLicensesFromParentPom(pomProperties.GroupID, pomProperties.ArtifactID, pomProperties.Version, cfg)
parentLicenses := recursivelyFindLicensesFromParentPom(ctx, pomProperties.GroupID, pomProperties.ArtifactID, pomProperties.Version, cfg)
if len(parentLicenses) > 0 {
for _, licenseName := range parentLicenses {
licenses = append(licenses, pkg.NewLicenseFromFields(licenseName, "", nil))
}
}
} else {
findPomLicenses(parsedPomProject, cfg)
findPomLicenses(ctx, parsedPomProject, cfg)
}
}

View File

@ -2,6 +2,7 @@ package java
import (
"bufio"
"context"
"fmt"
"io"
"net/http"
@ -136,7 +137,7 @@ func TestSearchMavenForLicenses(t *testing.T) {
defer cleanupFn()
// assert licenses are discovered from upstream
_, _, licenses := ap.guessMainPackageNameAndVersionFromPomInfo()
_, _, licenses := ap.guessMainPackageNameAndVersionFromPomInfo(context.Background())
assert.Equal(t, tc.expectedLicenses, licenses)
})
}
@ -405,7 +406,7 @@ func TestParseJar(t *testing.T) {
defer cleanupFn()
require.NoError(t, err)
actual, _, err := parser.parse()
actual, _, err := parser.parse(context.Background())
require.NoError(t, err)
if len(actual) != len(test.expected) {
@ -669,7 +670,7 @@ func TestParseNestedJar(t *testing.T) {
require.NoError(t, err)
gap := newGenericArchiveParserAdapter(ArchiveCatalogerConfig{})
actual, _, err := gap.parseJavaArchive(nil, nil, file.LocationReadCloser{
actual, _, err := gap.parseJavaArchive(context.Background(), nil, nil, file.LocationReadCloser{
Location: file.NewLocation(fixture.Name()),
ReadCloser: fixture,
})
@ -1089,7 +1090,7 @@ func Test_newPackageFromMavenData(t *testing.T) {
}
test.expectedParent.Locations = locations
actualPackage := newPackageFromMavenData(test.props, test.project, test.parent, file.NewLocation(virtualPath), DefaultArchiveCatalogerConfig())
actualPackage := newPackageFromMavenData(context.Background(), test.props, test.project, test.parent, file.NewLocation(virtualPath), DefaultArchiveCatalogerConfig())
if test.expectedPackage == nil {
require.Nil(t, actualPackage)
} else {

View File

@ -3,6 +3,7 @@ package java
import (
"bytes"
"compress/gzip"
"context"
"debug/elf"
"debug/macho"
"debug/pe"
@ -570,7 +571,7 @@ func fetchPkgs(reader unionreader.UnionReader, filename string) []pkg.Package {
}
// Catalog attempts to find any native image executables reachable from a resolver.
func (c *NativeImageCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
func (c *NativeImageCataloger) Catalog(_ context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
fileMatches, err := resolver.FilesByMIMEType(internal.ExecutableMIMETypeSet.List()...)
if err != nil {

View File

@ -1,6 +1,7 @@
package java
import (
"context"
"fmt"
"io"
"net/http"
@ -28,10 +29,10 @@ func formatMavenPomURL(groupID, artifactID, version, mavenBaseURL string) (reque
}
// An artifact can have its version defined in a parent's DependencyManagement section
func recursivelyFindVersionFromParentPom(groupID, artifactID, parentGroupID, parentArtifactID, parentVersion string, cfg ArchiveCatalogerConfig) string {
func recursivelyFindVersionFromParentPom(ctx context.Context, groupID, artifactID, parentGroupID, parentArtifactID, parentVersion string, cfg ArchiveCatalogerConfig) string {
// As there can be nested parent poms, we'll recursively check for the version until we reach the max depth
for i := 0; i < cfg.MaxParentRecursiveDepth; i++ {
parentPom, err := getPomFromMavenRepo(parentGroupID, parentArtifactID, parentVersion, cfg.MavenBaseURL)
parentPom, err := getPomFromMavenRepo(ctx, parentGroupID, parentArtifactID, parentVersion, cfg.MavenBaseURL)
if err != nil {
// We don't want to abort here as the parent pom might not exist in Maven Central, we'll just log the error
log.Tracef("unable to get parent pom from Maven central: %v", err)
@ -54,11 +55,11 @@ func recursivelyFindVersionFromParentPom(groupID, artifactID, parentGroupID, par
return ""
}
func recursivelyFindLicensesFromParentPom(groupID, artifactID, version string, cfg ArchiveCatalogerConfig) []string {
func recursivelyFindLicensesFromParentPom(ctx context.Context, groupID, artifactID, version string, cfg ArchiveCatalogerConfig) []string {
var licenses []string
// As there can be nested parent poms, we'll recursively check for licenses until we reach the max depth
for i := 0; i < cfg.MaxParentRecursiveDepth; i++ {
parentPom, err := getPomFromMavenRepo(groupID, artifactID, version, cfg.MavenBaseURL)
parentPom, err := getPomFromMavenRepo(ctx, groupID, artifactID, version, cfg.MavenBaseURL)
if err != nil {
// We don't want to abort here as the parent pom might not exist in Maven Central, we'll just log the error
log.Tracef("unable to get parent pom from Maven central: %v", err)
@ -78,7 +79,7 @@ func recursivelyFindLicensesFromParentPom(groupID, artifactID, version string, c
return licenses
}
func getPomFromMavenRepo(groupID, artifactID, version, mavenBaseURL string) (*gopom.Project, error) {
func getPomFromMavenRepo(ctx context.Context, groupID, artifactID, version, mavenBaseURL string) (*gopom.Project, error) {
requestURL, err := formatMavenPomURL(groupID, artifactID, version, mavenBaseURL)
if err != nil {
return nil, err
@ -94,6 +95,8 @@ func getPomFromMavenRepo(groupID, artifactID, version, mavenBaseURL string) (*go
Timeout: time.Second * 10,
}
mavenRequest = mavenRequest.WithContext(ctx)
resp, err := httpClient.Do(mavenRequest)
if err != nil {
return nil, fmt.Errorf("unable to get pom from Maven central: %w", err)

View File

@ -2,6 +2,7 @@ package java
import (
"bufio"
"context"
"strings"
"github.com/anchore/syft/syft/artifact"
@ -12,14 +13,14 @@ import (
const gradleLockfileGlob = "**/gradle.lockfile*"
// Dependency represents a single dependency in the gradle.lockfile file
// LockfileDependency represents a single dependency in the gradle.lockfile file
type LockfileDependency struct {
Group string
Name string
Version string
}
func parseGradleLockfile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseGradleLockfile(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
// Create a new scanner to read the file

View File

@ -2,6 +2,7 @@ package java
import (
"bytes"
"context"
"encoding/xml"
"fmt"
"io"
@ -24,7 +25,7 @@ const pomXMLGlob = "*pom.xml"
var propertyMatcher = regexp.MustCompile("[$][{][^}]+[}]")
func (gap genericArchiveParserAdapter) parserPomXML(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func (gap genericArchiveParserAdapter) parserPomXML(ctx context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
pom, err := decodePomXML(reader)
if err != nil {
return nil, nil, err
@ -34,6 +35,7 @@ func (gap genericArchiveParserAdapter) parserPomXML(_ file.Resolver, _ *generic.
if pom.Dependencies != nil {
for _, dep := range *pom.Dependencies {
p := newPackageFromPom(
ctx,
pom,
dep,
gap.cfg,
@ -98,7 +100,7 @@ func newPomProject(path string, p gopom.Project, location file.Location) *parsed
}
}
func newPackageFromPom(pom gopom.Project, dep gopom.Dependency, cfg ArchiveCatalogerConfig, locations ...file.Location) pkg.Package {
func newPackageFromPom(ctx context.Context, pom gopom.Project, dep gopom.Dependency, cfg ArchiveCatalogerConfig, locations ...file.Location) pkg.Package {
m := pkg.JavaArchive{
PomProperties: &pkg.JavaPomProperties{
GroupID: resolveProperty(pom, dep.GroupID, "groupId"),
@ -114,10 +116,11 @@ func newPackageFromPom(pom gopom.Project, dep gopom.Dependency, cfg ArchiveCatal
if cfg.UseNetwork {
if version == "" {
// If we have no version then let's try to get it from a parent pom DependencyManagement section
version = recursivelyFindVersionFromParentPom(*dep.GroupID, *dep.ArtifactID, *pom.Parent.GroupID, *pom.Parent.ArtifactID, *pom.Parent.Version, cfg)
version = recursivelyFindVersionFromParentPom(ctx, *dep.GroupID, *dep.ArtifactID, *pom.Parent.GroupID, *pom.Parent.ArtifactID, *pom.Parent.Version, cfg)
}
if version != "" {
parentLicenses := recursivelyFindLicensesFromParentPom(
ctx,
m.PomProperties.GroupID,
m.PomProperties.ArtifactID,
version,

View File

@ -1,6 +1,7 @@
package java
import (
"context"
"fmt"
intFile "github.com/anchore/syft/internal/file"
@ -56,7 +57,7 @@ func newGenericTarWrappedJavaArchiveParser(cfg ArchiveCatalogerConfig) genericTa
}
}
func (gtp genericTarWrappedJavaArchiveParser) parseTarWrappedJavaArchive(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func (gtp genericTarWrappedJavaArchiveParser) parseTarWrappedJavaArchive(ctx context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
contentPath, archivePath, cleanupFn, err := saveArchiveToTmp(reader.Path(), reader)
// note: even on error, we should always run cleanup functions
defer cleanupFn()
@ -65,14 +66,14 @@ func (gtp genericTarWrappedJavaArchiveParser) parseTarWrappedJavaArchive(_ file.
}
// look for java archives within the tar archive
return discoverPkgsFromTar(reader.Location, archivePath, contentPath, gtp.cfg)
return discoverPkgsFromTar(ctx, reader.Location, archivePath, contentPath, gtp.cfg)
}
func discoverPkgsFromTar(location file.Location, archivePath, contentPath string, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
func discoverPkgsFromTar(ctx context.Context, location file.Location, archivePath, contentPath string, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
openers, err := intFile.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...)
if err != nil {
return nil, nil, fmt.Errorf("unable to extract files from tar: %w", err)
}
return discoverPkgsFromOpeners(location, openers, nil, cfg)
return discoverPkgsFromOpeners(ctx, location, openers, nil, cfg)
}

View File

@ -1,6 +1,7 @@
package java
import (
"context"
"os"
"path"
"testing"
@ -41,7 +42,7 @@ func Test_parseTarWrappedJavaArchive(t *testing.T) {
}
gtp := newGenericTarWrappedJavaArchiveParser(ArchiveCatalogerConfig{})
actualPkgs, _, err := gtp.parseTarWrappedJavaArchive(nil, nil, file.LocationReadCloser{
actualPkgs, _, err := gtp.parseTarWrappedJavaArchive(context.Background(), nil, nil, file.LocationReadCloser{
Location: file.NewLocation(test.fixture),
ReadCloser: fixture,
})

View File

@ -1,6 +1,7 @@
package java
import (
"context"
"fmt"
intFile "github.com/anchore/syft/internal/file"
@ -28,7 +29,7 @@ func newGenericZipWrappedJavaArchiveParser(cfg ArchiveCatalogerConfig) genericZi
}
}
func (gzp genericZipWrappedJavaArchiveParser) parseZipWrappedJavaArchive(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func (gzp genericZipWrappedJavaArchiveParser) parseZipWrappedJavaArchive(ctx context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
contentPath, archivePath, cleanupFn, err := saveArchiveToTmp(reader.Path(), reader)
// note: even on error, we should always run cleanup functions
defer cleanupFn()
@ -46,5 +47,5 @@ func (gzp genericZipWrappedJavaArchiveParser) parseZipWrappedJavaArchive(_ file.
}
// look for java archives within the zip archive
return discoverPkgsFromZip(reader.Location, archivePath, contentPath, fileManifest, nil, gzp.cfg)
return discoverPkgsFromZip(ctx, reader.Location, archivePath, contentPath, fileManifest, nil, gzp.cfg)
}

View File

@ -1,6 +1,7 @@
package java
import (
"context"
"os"
"path"
"testing"
@ -35,7 +36,7 @@ func Test_parseZipWrappedJavaArchive(t *testing.T) {
gzp := newGenericZipWrappedJavaArchiveParser(ArchiveCatalogerConfig{})
actualPkgs, _, err := gzp.parseZipWrappedJavaArchive(nil, nil, file.LocationReadCloser{
actualPkgs, _, err := gzp.parseZipWrappedJavaArchive(context.Background(), nil, nil, file.LocationReadCloser{
Location: file.NewLocation(test.fixture),
ReadCloser: fixture,
})

View File

@ -1,6 +1,7 @@
package javascript
import (
"context"
"encoding/json"
"errors"
"fmt"
@ -51,7 +52,7 @@ type repository struct {
var authorPattern = regexp.MustCompile(`^\s*(?P<name>[^<(]*)(\s+<(?P<email>.*)>)?(\s\((?P<url>.*)\))?\s*$`)
// parsePackageJSON parses a package.json and returns the discovered JavaScript packages.
func parsePackageJSON(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parsePackageJSON(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
dec := json.NewDecoder(reader)

View File

@ -1,6 +1,7 @@
package javascript
import (
"context"
"encoding/json"
"errors"
"fmt"
@ -44,7 +45,7 @@ type lockPackage struct {
type packageLockLicense []string
// parsePackageLock parses a package-lock.json and returns the discovered JavaScript packages.
func parsePackageLock(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parsePackageLock(_ context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
// in the case we find package-lock.json files in the node_modules directories, skip those
// as the whole purpose of the lock file is for the specific dependencies of the root project
if pathContainsNodeModulesDirectory(reader.Path()) {

View File

@ -1,6 +1,7 @@
package javascript
import (
"context"
"fmt"
"io"
"regexp"
@ -25,7 +26,7 @@ type pnpmLockYaml struct {
Packages map[string]interface{} `json:"packages" yaml:"packages"`
}
func parsePnpmLock(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parsePnpmLock(_ context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
bytes, err := io.ReadAll(reader)
if err != nil {
return nil, nil, fmt.Errorf("failed to load pnpm-lock.yaml file: %w", err)

View File

@ -2,6 +2,7 @@ package javascript
import (
"bufio"
"context"
"fmt"
"regexp"
@ -50,7 +51,7 @@ func newGenericYarnLockAdapter(cfg CatalogerConfig) genericYarnLockAdapter {
}
}
func (a genericYarnLockAdapter) parseYarnLock(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func (a genericYarnLockAdapter) parseYarnLock(_ context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
// in the case we find yarn.lock files in the node_modules directories, skip those
// as the whole purpose of the lock file is for the specific dependencies of the project
if pathContainsNodeModulesDirectory(reader.Path()) {

View File

@ -4,6 +4,8 @@ Package kernel provides a concrete Cataloger implementation for linux kernel and
package kernel
import (
"context"
"github.com/hashicorp/go-multierror"
"github.com/anchore/syft/internal/log"
@ -53,12 +55,12 @@ func (l LinuxKernelCataloger) Name() string {
return "linux-kernel-cataloger"
}
func (l LinuxKernelCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
func (l LinuxKernelCataloger) Catalog(ctx context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
var allPackages []pkg.Package
var allRelationships []artifact.Relationship
var errs error
kernelPackages, kernelRelationships, err := generic.NewCataloger(l.Name()).WithParserByGlobs(parseLinuxKernelFile, kernelArchiveGlobs...).Catalog(resolver)
kernelPackages, kernelRelationships, err := generic.NewCataloger(l.Name()).WithParserByGlobs(parseLinuxKernelFile, kernelArchiveGlobs...).Catalog(ctx, resolver)
if err != nil {
errs = multierror.Append(errs, err)
}
@ -67,7 +69,7 @@ func (l LinuxKernelCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []
allPackages = append(allPackages, kernelPackages...)
if l.cfg.CatalogModules {
modulePackages, moduleRelationships, err := generic.NewCataloger(l.Name()).WithParserByGlobs(parseLinuxKernelModuleFile, kernelModuleGlobs...).Catalog(resolver)
modulePackages, moduleRelationships, err := generic.NewCataloger(l.Name()).WithParserByGlobs(parseLinuxKernelModuleFile, kernelModuleGlobs...).Catalog(ctx, resolver)
if err != nil {
errs = multierror.Append(errs, err)
}

View File

@ -1,6 +1,7 @@
package kernel
import (
"context"
"fmt"
"strconv"
"strings"
@ -17,7 +18,7 @@ import (
const linuxKernelMagicName = "Linux kernel"
func parseLinuxKernelFile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseLinuxKernelFile(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
unionReader, err := unionreader.GetUnionReader(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to get union reader for file: %w", err)

View File

@ -1,6 +1,7 @@
package kernel
import (
"context"
"debug/elf"
"fmt"
"strings"
@ -14,7 +15,7 @@ import (
const modinfoName = ".modinfo"
func parseLinuxKernelModuleFile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseLinuxKernelModuleFile(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
unionReader, err := unionreader.GetUnionReader(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to get union reader for file: %w", err)

View File

@ -28,11 +28,11 @@ func (c *StoreCataloger) Name() string {
return catalogerName
}
func (c *StoreCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
func (c *StoreCataloger) Catalog(ctx context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
// we want to search for only directories, which isn't possible via the stereoscope API, so we need to apply the glob manually on all returned paths
var pkgs []pkg.Package
var filesByPath = make(map[string]*file.LocationSet)
ctx, cancel := context.WithCancel(context.Background())
ctx, cancel := context.WithCancel(ctx)
defer cancel()
for location := range resolver.AllLocations(ctx) {
matchesStorePath, err := doublestar.Match("**/nix/store/*", location.RealPath)

View File

@ -1,6 +1,7 @@
package php
import (
"context"
"encoding/json"
"errors"
"fmt"
@ -25,7 +26,7 @@ type composerLock struct {
}
// parseComposerLock is a parser function for Composer.lock contents, returning "Default" php packages discovered.
func parseComposerLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseComposerLock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
pkgs := make([]pkg.Package, 0)
dec := json.NewDecoder(reader)

View File

@ -1,6 +1,7 @@
package php
import (
"context"
"encoding/json"
"errors"
"fmt"
@ -46,7 +47,7 @@ func (w *installedJSONComposerV2) UnmarshalJSON(data []byte) error {
}
// parseInstalledJSON is a parser function for Composer.lock contents, returning "Default" php packages discovered.
func parseInstalledJSON(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseInstalledJSON(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
dec := json.NewDecoder(reader)

View File

@ -1,6 +1,7 @@
package python
import (
"context"
"testing"
"github.com/stretchr/testify/require"
@ -296,7 +297,7 @@ func Test_PackageCataloger_IgnorePackage(t *testing.T) {
t.Run(test.MetadataFixture, func(t *testing.T) {
resolver := file.NewMockResolverForPaths(test.MetadataFixture)
actual, _, err := NewInstalledPackageCataloger().Catalog(resolver)
actual, _, err := NewInstalledPackageCataloger().Catalog(context.Background(), resolver)
require.NoError(t, err)
if len(actual) != 0 {

View File

@ -1,6 +1,7 @@
package python
import (
"context"
"encoding/json"
"errors"
"fmt"
@ -41,7 +42,7 @@ type Dependency struct {
var _ generic.Parser = parsePipfileLock
// parsePipfileLock is a parser function for Pipfile.lock contents, returning "Default" python packages discovered.
func parsePipfileLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parsePipfileLock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
pkgs := make([]pkg.Package, 0)
dec := json.NewDecoder(reader)

View File

@ -1,6 +1,7 @@
package python
import (
"context"
"fmt"
"github.com/pelletier/go-toml"
@ -25,7 +26,7 @@ type poetryMetadata struct {
}
// parsePoetryLock is a parser function for poetry.lock contents, returning all python packages discovered.
func parsePoetryLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parsePoetryLock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
tree, err := toml.LoadReader(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to load poetry.lock for parsing: %w", err)

View File

@ -2,6 +2,7 @@ package python
import (
"bufio"
"context"
"fmt"
"regexp"
"strings"
@ -92,7 +93,7 @@ func newRequirementsParser(cfg CatalogerConfig) requirementsParser {
// parseRequirementsTxt takes a Python requirements.txt file, returning all Python packages that are locked to a
// specific version.
func (rp requirementsParser) parseRequirementsTxt(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func (rp requirementsParser) parseRequirementsTxt(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var packages []pkg.Package
scanner := bufio.NewScanner(reader)

View File

@ -2,6 +2,7 @@ package python
import (
"bufio"
"context"
"regexp"
"strings"
@ -22,7 +23,7 @@ var _ generic.Parser = parseSetup
// " mypy2 == v0.770", ' mypy3== v0.770', --> match(name=mypy2 version=v0.770), match(name=mypy3, version=v0.770)
var pinnedDependency = regexp.MustCompile(`['"]\W?(\w+\W?==\W?[\w.]*)`)
func parseSetup(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseSetup(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var packages []pkg.Package
scanner := bufio.NewScanner(reader)

View File

@ -2,6 +2,7 @@ package python
import (
"bufio"
"context"
"encoding/json"
"fmt"
"io"
@ -17,7 +18,7 @@ import (
// parseWheelOrEgg takes the primary metadata file reference and returns the python package it represents. Contained
// fields are governed by the PyPA core metadata specification (https://packaging.python.org/en/latest/specifications/core-metadata/).
func parseWheelOrEgg(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseWheelOrEgg(_ context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
pd, sources, err := assembleEggOrWheelMetadata(resolver, reader.Location)
if err != nil {
return nil, nil, err

View File

@ -2,6 +2,7 @@ package r
import (
"bufio"
"context"
"io"
"regexp"
"strings"
@ -28,7 +29,7 @@ License: Part of R 4.3.0
License: Unlimited
*/
func parseDescriptionFile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseDescriptionFile(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
values := extractFieldsFromDescriptionFile(reader)
m := parseDataFromDescriptionMap(values)
p := newPackage(m, []file.Location{reader.Location}...)

View File

@ -1,6 +1,7 @@
package r
import (
"context"
"os"
"path/filepath"
"testing"
@ -57,7 +58,7 @@ func Test_parseDescriptionFile(t *testing.T) {
Location: file.NewLocation(tt.fixture),
ReadCloser: f,
}
got, _, err := parseDescriptionFile(nil, nil, input)
got, _, err := parseDescriptionFile(context.Background(), nil, nil, input)
assert.NoError(t, err)
for _, assertion := range tt.assertions {
assertion(t, got)

View File

@ -1,6 +1,7 @@
package redhat
import (
"context"
"fmt"
"strconv"
@ -14,7 +15,7 @@ import (
)
// parseRpmArchive parses a single RPM
func parseRpmArchive(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseRpmArchive(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
rpm, err := rpmutils.ReadRpm(reader)
if err != nil {
return nil, nil, fmt.Errorf("RPM file found but unable to read: %s (%w)", reader.Location.RealPath, err)

View File

@ -1,6 +1,7 @@
package redhat
import (
"context"
"fmt"
"io"
"os"
@ -16,7 +17,7 @@ import (
)
// parseRpmDb parses an "Packages" RPM DB and returns the Packages listed within it.
func parseRpmDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseRpmDB(_ context.Context, resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
f, err := os.CreateTemp("", "rpmdb")
if err != nil {
return nil, nil, fmt.Errorf("failed to create temp rpmdb file: %w", err)

View File

@ -2,6 +2,7 @@ package redhat
import (
"bufio"
"context"
"errors"
"io"
"strings"
@ -14,7 +15,7 @@ import (
)
// Parses an RPM manifest file, as used in Mariner distroless containers, and returns the Packages listed
func parseRpmManifest(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseRpmManifest(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
r := bufio.NewReader(reader)
allPkgs := make([]pkg.Package, 0)

View File

@ -2,6 +2,7 @@ package ruby
import (
"bufio"
"context"
"strings"
"github.com/scylladb/go-set/strset"
@ -17,7 +18,7 @@ var _ generic.Parser = parseGemFileLockEntries
var sectionsOfInterest = strset.New("GEM", "GIT", "PATH", "PLUGIN SOURCE")
// parseGemFileLockEntries is a parser function for Gemfile.lock contents, returning all Gems discovered.
func parseGemFileLockEntries(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseGemFileLockEntries(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
scanner := bufio.NewScanner(reader)

View File

@ -2,6 +2,7 @@ package ruby
import (
"bufio"
"context"
"encoding/json"
"fmt"
"regexp"
@ -65,7 +66,7 @@ func processList(s string) []string {
}
// parseGemSpecEntries parses the gemspec file and returns the packages and relationships found.
func parseGemSpecEntries(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseGemSpecEntries(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
var fields = make(map[string]interface{})
scanner := bufio.NewScanner(reader)

View File

@ -1,6 +1,7 @@
package rust
import (
"context"
"errors"
rustaudit "github.com/microsoft/go-rustaudit"
@ -14,7 +15,7 @@ import (
)
// Catalog identifies executables then attempts to read Rust dependency information from them
func parseAuditBinary(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseAuditBinary(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
unionReader, err := unionreader.GetUnionReader(reader.ReadCloser)

View File

@ -1,6 +1,7 @@
package rust
import (
"context"
"fmt"
"github.com/pelletier/go-toml"
@ -18,7 +19,7 @@ type cargoLockFile struct {
}
// parseCargoLock is a parser function for Cargo.lock contents, returning all rust cargo crates discovered.
func parseCargoLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseCargoLock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
tree, err := toml.LoadReader(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to load Cargo.lock for parsing: %w", err)

View File

@ -5,6 +5,7 @@ package sbom
import (
"bytes"
"context"
"fmt"
"io"
@ -36,7 +37,7 @@ func NewCataloger() pkg.Cataloger {
)
}
func parseSBOM(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parseSBOM(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
readSeeker, err := adaptToReadSeeker(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to read SBOM file %q: %w", reader.Location.RealPath, err)

View File

@ -1,6 +1,7 @@
package swift
import (
"context"
"encoding/json"
"errors"
"fmt"
@ -57,7 +58,7 @@ type packageState struct {
}
// parsePackageResolved is a parser for the contents of a Package.resolved file, which is generated by Xcode after it's resolved Swift Package Manger packages.
func parsePackageResolved(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parsePackageResolved(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
dec := json.NewDecoder(reader)
var packageResolvedData map[string]interface{}
for {

View File

@ -1,6 +1,7 @@
package swift
import (
"context"
"fmt"
"io"
"strings"
@ -25,7 +26,7 @@ type podfileLock struct {
}
// parsePodfileLock is a parser function for Podfile.lock contents, returning all cocoapods pods discovered.
func parsePodfileLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parsePodfileLock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
bytes, err := io.ReadAll(reader)
if err != nil {
return nil, nil, fmt.Errorf("unable to read file: %w", err)