mirror of
https://github.com/anchore/syft.git
synced 2025-11-17 16:33:21 +01:00
Normalize cataloger configuration patterns (#2365)
* normalize cataloger patterns Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> * remove central reference for maven configurable Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> --------- Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
This commit is contained in:
parent
4d0da703bf
commit
1cfc4c7387
@ -12,6 +12,7 @@ import (
|
|||||||
"github.com/anchore/clio"
|
"github.com/anchore/clio"
|
||||||
"github.com/anchore/fangs"
|
"github.com/anchore/fangs"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
|
"github.com/anchore/syft/syft/cataloging"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger"
|
"github.com/anchore/syft/syft/pkg/cataloger"
|
||||||
golangCataloger "github.com/anchore/syft/syft/pkg/cataloger/golang"
|
golangCataloger "github.com/anchore/syft/syft/pkg/cataloger/golang"
|
||||||
javaCataloger "github.com/anchore/syft/syft/pkg/cataloger/java"
|
javaCataloger "github.com/anchore/syft/syft/pkg/cataloger/java"
|
||||||
@ -126,19 +127,24 @@ func (cfg Catalog) ToCatalogerConfig() cataloger.Config {
|
|||||||
},
|
},
|
||||||
Catalogers: cfg.Catalogers,
|
Catalogers: cfg.Catalogers,
|
||||||
Parallelism: cfg.Parallelism,
|
Parallelism: cfg.Parallelism,
|
||||||
Golang: golangCataloger.NewGoCatalogerOpts().
|
Golang: golangCataloger.DefaultCatalogerConfig().
|
||||||
WithSearchLocalModCacheLicenses(cfg.Golang.SearchLocalModCacheLicenses).
|
WithSearchLocalModCacheLicenses(cfg.Golang.SearchLocalModCacheLicenses).
|
||||||
WithLocalModCacheDir(cfg.Golang.LocalModCacheDir).
|
WithLocalModCacheDir(cfg.Golang.LocalModCacheDir).
|
||||||
WithSearchRemoteLicenses(cfg.Golang.SearchRemoteLicenses).
|
WithSearchRemoteLicenses(cfg.Golang.SearchRemoteLicenses).
|
||||||
WithProxy(cfg.Golang.Proxy).
|
WithProxy(cfg.Golang.Proxy).
|
||||||
WithNoProxy(cfg.Golang.NoProxy),
|
WithNoProxy(cfg.Golang.NoProxy),
|
||||||
LinuxKernel: kernel.LinuxCatalogerConfig{
|
LinuxKernel: kernel.LinuxKernelCatalogerConfig{
|
||||||
CatalogModules: cfg.LinuxKernel.CatalogModules,
|
CatalogModules: cfg.LinuxKernel.CatalogModules,
|
||||||
},
|
},
|
||||||
Java: javaCataloger.DefaultCatalogerOpts().
|
Java: javaCataloger.DefaultArchiveCatalogerConfig().
|
||||||
WithUseNetwork(cfg.Java.UseNetwork).
|
WithUseNetwork(cfg.Java.UseNetwork).
|
||||||
WithMavenURL(cfg.Java.MavenURL).
|
WithMavenBaseURL(cfg.Java.MavenURL).
|
||||||
WithMaxParentRecursiveDepth(cfg.Java.MaxParentRecursiveDepth),
|
WithArchiveTraversal(
|
||||||
|
cataloging.ArchiveSearchConfig{
|
||||||
|
IncludeIndexedArchives: cfg.Package.SearchIndexedArchives,
|
||||||
|
IncludeUnindexedArchives: cfg.Package.SearchUnindexedArchives,
|
||||||
|
},
|
||||||
|
cfg.Java.MaxParentRecursiveDepth),
|
||||||
Python: pythonCataloger.CatalogerConfig{
|
Python: pythonCataloger.CatalogerConfig{
|
||||||
GuessUnpinnedRequirements: cfg.Python.GuessUnpinnedRequirements,
|
GuessUnpinnedRequirements: cfg.Python.GuessUnpinnedRequirements,
|
||||||
},
|
},
|
||||||
|
|||||||
6
syft/cataloging/config.go
Normal file
6
syft/cataloging/config.go
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
package cataloging
|
||||||
|
|
||||||
|
type ArchiveSearchConfig struct {
|
||||||
|
IncludeIndexedArchives bool `yaml:"include-indexed-archives" json:"include-indexed-archives" mapstructure:"include-indexed-archives"`
|
||||||
|
IncludeUnindexedArchives bool `yaml:"include-unindexed-archives" json:"include-unindexed-archives" mapstructure:"include-unindexed-archives"`
|
||||||
|
}
|
||||||
@ -9,7 +9,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// NewDBCataloger returns a new cataloger object initialized for Alpine package DB flat-file stores.
|
// NewDBCataloger returns a new cataloger object initialized for Alpine package DB flat-file stores.
|
||||||
func NewDBCataloger() *generic.Cataloger {
|
func NewDBCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("apk-db-cataloger").
|
return generic.NewCataloger("apk-db-cataloger").
|
||||||
WithParserByGlobs(parseApkDB, pkg.ApkDBGlob)
|
WithParserByGlobs(parseApkDB, pkg.ApkDBGlob)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// NewDBCataloger returns a new cataloger object initialized for arch linux pacman database flat-file stores.
|
// NewDBCataloger returns a new cataloger object initialized for arch linux pacman database flat-file stores.
|
||||||
func NewDBCataloger() *generic.Cataloger {
|
func NewDBCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("alpm-db-cataloger").
|
return generic.NewCataloger("alpm-db-cataloger").
|
||||||
WithParserByGlobs(parseAlpmDB, pkg.AlpmDBGlob)
|
WithParserByGlobs(parseAlpmDB, pkg.AlpmDBGlob)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -12,7 +12,7 @@ import (
|
|||||||
|
|
||||||
const catalogerName = "binary-cataloger"
|
const catalogerName = "binary-cataloger"
|
||||||
|
|
||||||
func NewCataloger() *Cataloger {
|
func NewCataloger() pkg.Cataloger {
|
||||||
return &Cataloger{}
|
return &Cataloger{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
package cataloger
|
package cataloger
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/cataloging"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/golang"
|
"github.com/anchore/syft/syft/pkg/cataloger/golang"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/java"
|
"github.com/anchore/syft/syft/pkg/cataloger/java"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/kernel"
|
"github.com/anchore/syft/syft/pkg/cataloger/kernel"
|
||||||
@ -10,10 +11,10 @@ import (
|
|||||||
// TODO: these field naming vs helper function naming schemes are inconsistent.
|
// TODO: these field naming vs helper function naming schemes are inconsistent.
|
||||||
type Config struct {
|
type Config struct {
|
||||||
Search SearchConfig
|
Search SearchConfig
|
||||||
Golang golang.GoCatalogerOpts
|
Golang golang.CatalogerConfig
|
||||||
LinuxKernel kernel.LinuxCatalogerConfig
|
LinuxKernel kernel.LinuxKernelCatalogerConfig
|
||||||
Python python.CatalogerConfig
|
Python python.CatalogerConfig
|
||||||
Java java.CatalogerOpts
|
Java java.ArchiveCatalogerConfig
|
||||||
Catalogers []string
|
Catalogers []string
|
||||||
Parallelism int
|
Parallelism int
|
||||||
ExcludeBinaryOverlapByOwnership bool
|
ExcludeBinaryOverlapByOwnership bool
|
||||||
@ -25,7 +26,7 @@ func DefaultConfig() Config {
|
|||||||
Parallelism: 1,
|
Parallelism: 1,
|
||||||
LinuxKernel: kernel.DefaultLinuxCatalogerConfig(),
|
LinuxKernel: kernel.DefaultLinuxCatalogerConfig(),
|
||||||
Python: python.DefaultCatalogerConfig(),
|
Python: python.DefaultCatalogerConfig(),
|
||||||
Java: java.DefaultCatalogerOpts(),
|
Java: java.DefaultArchiveCatalogerConfig(),
|
||||||
ExcludeBinaryOverlapByOwnership: true,
|
ExcludeBinaryOverlapByOwnership: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -33,12 +34,14 @@ func DefaultConfig() Config {
|
|||||||
// JavaConfig merges relevant config values from Config to return a java.Config struct.
|
// JavaConfig merges relevant config values from Config to return a java.Config struct.
|
||||||
// Values like IncludeUnindexedArchives and IncludeIndexedArchives are used across catalogers
|
// Values like IncludeUnindexedArchives and IncludeIndexedArchives are used across catalogers
|
||||||
// and are not specific to Java requiring this merge.
|
// and are not specific to Java requiring this merge.
|
||||||
func (c Config) JavaConfig() java.Config {
|
func (c Config) JavaConfig() java.ArchiveCatalogerConfig {
|
||||||
return java.Config{
|
return java.ArchiveCatalogerConfig{
|
||||||
SearchUnindexedArchives: c.Search.IncludeUnindexedArchives,
|
ArchiveSearchConfig: cataloging.ArchiveSearchConfig{
|
||||||
SearchIndexedArchives: c.Search.IncludeIndexedArchives,
|
IncludeUnindexedArchives: c.Search.IncludeUnindexedArchives,
|
||||||
|
IncludeIndexedArchives: c.Search.IncludeIndexedArchives,
|
||||||
|
},
|
||||||
UseNetwork: c.Java.UseNetwork,
|
UseNetwork: c.Java.UseNetwork,
|
||||||
MavenBaseURL: c.Java.MavenURL,
|
MavenBaseURL: c.Java.MavenBaseURL,
|
||||||
MaxParentRecursiveDepth: c.Java.MaxParentRecursiveDepth,
|
MaxParentRecursiveDepth: c.Java.MaxParentRecursiveDepth,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,18 +4,19 @@ Package cpp provides a concrete Cataloger implementations for the C/C++ language
|
|||||||
package cpp
|
package cpp
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewConanCataloger returns a new C/C++ conanfile.txt and conan.lock cataloger object.
|
// NewConanCataloger returns a new C/C++ conanfile.txt and conan.lock cataloger object.
|
||||||
func NewConanCataloger() *generic.Cataloger {
|
func NewConanCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("conan-cataloger").
|
return generic.NewCataloger("conan-cataloger").
|
||||||
WithParserByGlobs(parseConanfile, "**/conanfile.txt").
|
WithParserByGlobs(parseConanfile, "**/conanfile.txt").
|
||||||
WithParserByGlobs(parseConanlock, "**/conan.lock")
|
WithParserByGlobs(parseConanlock, "**/conan.lock")
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewConanInfoCataloger returns a new C/C++ conaninfo.txt cataloger object.
|
// NewConanInfoCataloger returns a new C/C++ conaninfo.txt cataloger object.
|
||||||
func NewConanInfoCataloger() *generic.Cataloger {
|
func NewConanInfoCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("conan-info-cataloger").
|
return generic.NewCataloger("conan-info-cataloger").
|
||||||
WithParserByGlobs(parseConaninfo, "**/conaninfo.txt")
|
WithParserByGlobs(parseConaninfo, "**/conaninfo.txt")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,11 +4,12 @@ Package dart provides a concrete Cataloger implementations for the Dart language
|
|||||||
package dart
|
package dart
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewPubspecLockCataloger returns a new Dartlang cataloger object base on pubspec lock files.
|
// NewPubspecLockCataloger returns a new Dartlang cataloger object base on pubspec lock files.
|
||||||
func NewPubspecLockCataloger() *generic.Cataloger {
|
func NewPubspecLockCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("dart-pubspec-lock-cataloger").
|
return generic.NewCataloger("dart-pubspec-lock-cataloger").
|
||||||
WithParserByGlobs(parsePubspecLock, "**/pubspec.lock")
|
WithParserByGlobs(parsePubspecLock, "**/pubspec.lock")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,11 +4,12 @@ Package debian provides a concrete Cataloger implementation relating to packages
|
|||||||
package debian
|
package debian
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewDBCataloger returns a new Deb package cataloger capable of parsing DPKG status DB flat-file stores.
|
// NewDBCataloger returns a new Deb package cataloger capable of parsing DPKG status DB flat-file stores.
|
||||||
func NewDBCataloger() *generic.Cataloger {
|
func NewDBCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("dpkg-db-cataloger").
|
return generic.NewCataloger("dpkg-db-cataloger").
|
||||||
// note: these globs have been intentionally split up in order to improve search performance,
|
// note: these globs have been intentionally split up in order to improve search performance,
|
||||||
// please do NOT combine into: "**/var/lib/dpkg/{status,status.d/*}"
|
// please do NOT combine into: "**/var/lib/dpkg/{status,status.d/*}"
|
||||||
|
|||||||
@ -4,17 +4,18 @@ Package dotnet provides a concrete Cataloger implementation relating to packages
|
|||||||
package dotnet
|
package dotnet
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewDotnetDepsCataloger returns a new Dotnet cataloger object base on deps json files.
|
// NewDotnetDepsCataloger returns a new Dotnet cataloger object base on deps json files.
|
||||||
func NewDotnetDepsCataloger() *generic.Cataloger {
|
func NewDotnetDepsCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("dotnet-deps-cataloger").
|
return generic.NewCataloger("dotnet-deps-cataloger").
|
||||||
WithParserByGlobs(parseDotnetDeps, "**/*.deps.json")
|
WithParserByGlobs(parseDotnetDeps, "**/*.deps.json")
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewDotnetPortableExecutableCataloger returns a new Dotnet cataloger object base on portable executable files.
|
// NewDotnetPortableExecutableCataloger returns a new Dotnet cataloger object base on portable executable files.
|
||||||
func NewDotnetPortableExecutableCataloger() *generic.Cataloger {
|
func NewDotnetPortableExecutableCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("dotnet-portable-executable-cataloger").
|
return generic.NewCataloger("dotnet-portable-executable-cataloger").
|
||||||
WithParserByGlobs(parseDotnetPortableExecutable, "**/*.dll", "**/*.exe")
|
WithParserByGlobs(parseDotnetPortableExecutable, "**/*.dll", "**/*.exe")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,7 +3,7 @@ package dotnet
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
|
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -11,7 +11,7 @@ func TestCataloger_Globs(t *testing.T) {
|
|||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
fixture string
|
fixture string
|
||||||
cataloger *generic.Cataloger
|
cataloger pkg.Cataloger
|
||||||
expected []string
|
expected []string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
|
|||||||
@ -4,11 +4,12 @@ Package elixir provides a concrete Cataloger implementation relating to packages
|
|||||||
package elixir
|
package elixir
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewMixLockCataloger returns a cataloger object for Elixir mix.lock files.
|
// NewMixLockCataloger returns a cataloger object for Elixir mix.lock files.
|
||||||
func NewMixLockCataloger() *generic.Cataloger {
|
func NewMixLockCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("elixir-mix-lock-cataloger").
|
return generic.NewCataloger("elixir-mix-lock-cataloger").
|
||||||
WithParserByGlobs(parseMixLock, "**/mix.lock")
|
WithParserByGlobs(parseMixLock, "**/mix.lock")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,11 +4,12 @@ Package erlang provides a concrete Cataloger implementation relating to packages
|
|||||||
package erlang
|
package erlang
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewRebarLockCataloger returns a new cataloger instance for Erlang rebar.lock files.
|
// NewRebarLockCataloger returns a new cataloger instance for Erlang rebar.lock files.
|
||||||
func NewRebarLockCataloger() *generic.Cataloger {
|
func NewRebarLockCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("erlang-rebar-lock-cataloger").
|
return generic.NewCataloger("erlang-rebar-lock-cataloger").
|
||||||
WithParserByGlobs(parseRebarLock, "**/rebar.lock")
|
WithParserByGlobs(parseRebarLock, "**/rebar.lock")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,11 +4,12 @@ Package gentoo provides a concrete Cataloger implementation related to packages
|
|||||||
package gentoo
|
package gentoo
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewPortageCataloger returns a new cataloger object initialized for Gentoo Portage package manager files (a flat-file store).
|
// NewPortageCataloger returns a new cataloger object initialized for Gentoo Portage package manager files (a flat-file store).
|
||||||
func NewPortageCataloger() *generic.Cataloger {
|
func NewPortageCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("portage-cataloger").
|
return generic.NewCataloger("portage-cataloger").
|
||||||
WithParserByGlobs(parsePortageContents, "**/var/db/pkg/*/*/CONTENTS")
|
WithParserByGlobs(parsePortageContents, "**/var/db/pkg/*/*/CONTENTS")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,17 +3,20 @@ Package githubactions provides a concrete Cataloger implementation for GitHub Ac
|
|||||||
*/
|
*/
|
||||||
package githubactions
|
package githubactions
|
||||||
|
|
||||||
import "github.com/anchore/syft/syft/pkg/cataloger/generic"
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
|
)
|
||||||
|
|
||||||
// NewActionUsageCataloger returns GitHub Actions used within workflows and composite actions.
|
// NewActionUsageCataloger returns GitHub Actions used within workflows and composite actions.
|
||||||
func NewActionUsageCataloger() *generic.Cataloger {
|
func NewActionUsageCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("github-actions-usage-cataloger").
|
return generic.NewCataloger("github-actions-usage-cataloger").
|
||||||
WithParserByGlobs(parseWorkflowForActionUsage, "**/.github/workflows/*.yaml", "**/.github/workflows/*.yml").
|
WithParserByGlobs(parseWorkflowForActionUsage, "**/.github/workflows/*.yaml", "**/.github/workflows/*.yml").
|
||||||
WithParserByGlobs(parseCompositeActionForActionUsage, "**/.github/actions/*/action.yml", "**/.github/actions/*/action.yaml")
|
WithParserByGlobs(parseCompositeActionForActionUsage, "**/.github/actions/*/action.yml", "**/.github/actions/*/action.yaml")
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewWorkflowUsageCataloger returns shared workflows used within workflows.
|
// NewWorkflowUsageCataloger returns shared workflows used within workflows.
|
||||||
func NewWorkflowUsageCataloger() *generic.Cataloger {
|
func NewWorkflowUsageCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("github-action-workflow-usage-cataloger").
|
return generic.NewCataloger("github-action-workflow-usage-cataloger").
|
||||||
WithParserByGlobs(parseWorkflowForWorkflowUsage, "**/.github/workflows/*.yaml", "**/.github/workflows/*.yml")
|
WithParserByGlobs(parseWorkflowForWorkflowUsage, "**/.github/workflows/*.yaml", "**/.github/workflows/*.yml")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,7 +3,7 @@ package githubactions
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
|
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -11,7 +11,7 @@ func TestCataloger_Globs(t *testing.T) {
|
|||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
fixture string
|
fixture string
|
||||||
cataloger *generic.Cataloger
|
cataloger pkg.Cataloger
|
||||||
expected []string
|
expected []string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
|
|||||||
@ -11,7 +11,6 @@ import (
|
|||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/cpe"
|
"github.com/anchore/syft/syft/cpe"
|
||||||
"github.com/anchore/syft/syft/event/monitor"
|
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
@ -20,31 +19,28 @@ import (
|
|||||||
var versionCandidateGroups = regexp.MustCompile(`(?P<version>\d+(\.\d+)?(\.\d+)?)(?P<candidate>\w*)`)
|
var versionCandidateGroups = regexp.MustCompile(`(?P<version>\d+(\.\d+)?(\.\d+)?)(?P<candidate>\w*)`)
|
||||||
|
|
||||||
// NewGoModuleFileCataloger returns a new cataloger object that searches within go.mod files.
|
// NewGoModuleFileCataloger returns a new cataloger object that searches within go.mod files.
|
||||||
func NewGoModuleFileCataloger(opts GoCatalogerOpts) pkg.Cataloger {
|
func NewGoModuleFileCataloger(opts CatalogerConfig) pkg.Cataloger {
|
||||||
c := goModCataloger{
|
c := goModCataloger{
|
||||||
licenses: newGoLicenses(opts),
|
licenses: newGoLicenses(opts),
|
||||||
}
|
}
|
||||||
return &progressingCataloger{
|
return &progressingCataloger{
|
||||||
progress: c.licenses.progress,
|
|
||||||
cataloger: generic.NewCataloger("go-module-file-cataloger").
|
cataloger: generic.NewCataloger("go-module-file-cataloger").
|
||||||
WithParserByGlobs(c.parseGoModFile, "**/go.mod"),
|
WithParserByGlobs(c.parseGoModFile, "**/go.mod"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewGoModuleBinaryCataloger returns a new cataloger object that searches within binaries built by the go compiler.
|
// NewGoModuleBinaryCataloger returns a new cataloger object that searches within binaries built by the go compiler.
|
||||||
func NewGoModuleBinaryCataloger(opts GoCatalogerOpts) pkg.Cataloger {
|
func NewGoModuleBinaryCataloger(opts CatalogerConfig) pkg.Cataloger {
|
||||||
c := goBinaryCataloger{
|
c := goBinaryCataloger{
|
||||||
licenses: newGoLicenses(opts),
|
licenses: newGoLicenses(opts),
|
||||||
}
|
}
|
||||||
return &progressingCataloger{
|
return &progressingCataloger{
|
||||||
progress: c.licenses.progress,
|
|
||||||
cataloger: generic.NewCataloger("go-module-binary-cataloger").
|
cataloger: generic.NewCataloger("go-module-binary-cataloger").
|
||||||
WithParserByMimeTypes(c.parseGoBinary, internal.ExecutableMIMETypeSet.List()...),
|
WithParserByMimeTypes(c.parseGoBinary, internal.ExecutableMIMETypeSet.List()...),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type progressingCataloger struct {
|
type progressingCataloger struct {
|
||||||
progress *monitor.CatalogerTask
|
|
||||||
cataloger *generic.Cataloger
|
cataloger *generic.Cataloger
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -53,7 +49,6 @@ func (p *progressingCataloger) Name() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p *progressingCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
|
func (p *progressingCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
defer p.progress.SetCompleted()
|
|
||||||
pkgs, relationships, err := p.cataloger.Catalog(resolver)
|
pkgs, relationships, err := p.cataloger.Catalog(resolver)
|
||||||
goCompilerPkgs := []pkg.Package{}
|
goCompilerPkgs := []pkg.Package{}
|
||||||
totalLocations := file.NewLocationSet()
|
totalLocations := file.NewLocationSet()
|
||||||
@ -76,6 +71,7 @@ func (p *progressingCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, [
|
|||||||
pkgs = append(pkgs, goCompilerPkgs...)
|
pkgs = append(pkgs, goCompilerPkgs...)
|
||||||
return pkgs, relationships, err
|
return pkgs, relationships, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func newGoStdLib(version string, location file.LocationSet) *pkg.Package {
|
func newGoStdLib(version string, location file.LocationSet) *pkg.Package {
|
||||||
stdlibCpe, err := generateStdlibCpe(version)
|
stdlibCpe, err := generateStdlibCpe(version)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@ -30,7 +30,7 @@ func Test_Mod_Cataloger_Globs(t *testing.T) {
|
|||||||
FromDirectory(t, test.fixture).
|
FromDirectory(t, test.fixture).
|
||||||
ExpectsResolverContentQueries(test.expected).
|
ExpectsResolverContentQueries(test.expected).
|
||||||
IgnoreUnfulfilledPathResponses("src/go.sum").
|
IgnoreUnfulfilledPathResponses("src/go.sum").
|
||||||
TestCataloger(t, NewGoModuleFileCataloger(GoCatalogerOpts{}))
|
TestCataloger(t, NewGoModuleFileCataloger(CatalogerConfig{}))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -55,7 +55,7 @@ func Test_Binary_Cataloger_Globs(t *testing.T) {
|
|||||||
pkgtest.NewCatalogTester().
|
pkgtest.NewCatalogTester().
|
||||||
FromDirectory(t, test.fixture).
|
FromDirectory(t, test.fixture).
|
||||||
ExpectsResolverContentQueries(test.expected).
|
ExpectsResolverContentQueries(test.expected).
|
||||||
TestCataloger(t, NewGoModuleBinaryCataloger(GoCatalogerOpts{}))
|
TestCataloger(t, NewGoModuleBinaryCataloger(CatalogerConfig{}))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
114
syft/pkg/cataloger/golang/config.go
Normal file
114
syft/pkg/cataloger/golang/config.go
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
package golang
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/mitchellh/go-homedir"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/internal/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
defaultProxies = "https://proxy.golang.org,direct"
|
||||||
|
directProxyOnly = "direct"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
directProxiesOnly = []string{directProxyOnly}
|
||||||
|
)
|
||||||
|
|
||||||
|
type CatalogerConfig struct {
|
||||||
|
SearchLocalModCacheLicenses bool `yaml:"search-local-mod-cache-licenses" json:"search-local-mod-cache-licenses" mapstructure:"search-local-mod-cache-licenses"`
|
||||||
|
LocalModCacheDir string `yaml:"local-mod-cache-dir" json:"local-mod-cache-dir" mapstructure:"local-mod-cache-dir"`
|
||||||
|
SearchRemoteLicenses bool `yaml:"search-remote-licenses" json:"search-remote-licenses" mapstructure:"search-remote-licenses"`
|
||||||
|
Proxies []string `yaml:"proxies,omitempty" json:"proxies,omitempty" mapstructure:"proxies"`
|
||||||
|
NoProxy []string `yaml:"no-proxy,omitempty" json:"no-proxy,omitempty" mapstructure:"no-proxy"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// DefaultCatalogerConfig create a CatalogerConfig with default options, which includes:
|
||||||
|
// - setting the default remote proxy if none is provided
|
||||||
|
// - setting the default no proxy if none is provided
|
||||||
|
// - setting the default local module cache dir if none is provided
|
||||||
|
func DefaultCatalogerConfig() CatalogerConfig {
|
||||||
|
g := CatalogerConfig{}
|
||||||
|
|
||||||
|
// first process the proxy settings
|
||||||
|
if len(g.Proxies) == 0 {
|
||||||
|
goProxy := os.Getenv("GOPROXY")
|
||||||
|
if goProxy == "" {
|
||||||
|
goProxy = defaultProxies
|
||||||
|
}
|
||||||
|
g = g.WithProxy(goProxy)
|
||||||
|
}
|
||||||
|
|
||||||
|
// next process the gonoproxy settings
|
||||||
|
if len(g.NoProxy) == 0 {
|
||||||
|
goPrivate := os.Getenv("GOPRIVATE")
|
||||||
|
goNoProxy := os.Getenv("GONOPROXY")
|
||||||
|
// we only use the env var if it was not set explicitly
|
||||||
|
if goPrivate != "" {
|
||||||
|
g.NoProxy = append(g.NoProxy, strings.Split(goPrivate, ",")...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// next process the goprivate settings; we always add those
|
||||||
|
if goNoProxy != "" {
|
||||||
|
g.NoProxy = append(g.NoProxy, strings.Split(goNoProxy, ",")...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if g.LocalModCacheDir == "" {
|
||||||
|
goPath := os.Getenv("GOPATH")
|
||||||
|
|
||||||
|
if goPath == "" {
|
||||||
|
homeDir, err := homedir.Dir()
|
||||||
|
if err != nil {
|
||||||
|
log.Debug("unable to determine user home dir: %v", err)
|
||||||
|
} else {
|
||||||
|
goPath = path.Join(homeDir, "go")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if goPath != "" {
|
||||||
|
g.LocalModCacheDir = path.Join(goPath, "pkg", "mod")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return g
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g CatalogerConfig) WithSearchLocalModCacheLicenses(input bool) CatalogerConfig {
|
||||||
|
g.SearchLocalModCacheLicenses = input
|
||||||
|
return g
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g CatalogerConfig) WithLocalModCacheDir(input string) CatalogerConfig {
|
||||||
|
if input == "" {
|
||||||
|
return g
|
||||||
|
}
|
||||||
|
g.LocalModCacheDir = input
|
||||||
|
return g
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g CatalogerConfig) WithSearchRemoteLicenses(input bool) CatalogerConfig {
|
||||||
|
g.SearchRemoteLicenses = input
|
||||||
|
return g
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g CatalogerConfig) WithProxy(input string) CatalogerConfig {
|
||||||
|
if input == "" {
|
||||||
|
return g
|
||||||
|
}
|
||||||
|
if input == "off" {
|
||||||
|
input = directProxyOnly
|
||||||
|
}
|
||||||
|
g.Proxies = strings.Split(input, ",")
|
||||||
|
return g
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g CatalogerConfig) WithNoProxy(input string) CatalogerConfig {
|
||||||
|
if input == "" {
|
||||||
|
return g
|
||||||
|
}
|
||||||
|
g.NoProxy = strings.Split(input, ",")
|
||||||
|
return g
|
||||||
|
}
|
||||||
@ -34,7 +34,7 @@ func Test_Options(t *testing.T) {
|
|||||||
name string
|
name string
|
||||||
env map[string]string
|
env map[string]string
|
||||||
opts opts
|
opts opts
|
||||||
expected GoCatalogerOpts
|
expected CatalogerConfig
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "set via env defaults",
|
name: "set via env defaults",
|
||||||
@ -45,12 +45,12 @@ func Test_Options(t *testing.T) {
|
|||||||
"GONOPROXY": "no.proxy",
|
"GONOPROXY": "no.proxy",
|
||||||
},
|
},
|
||||||
opts: opts{},
|
opts: opts{},
|
||||||
expected: GoCatalogerOpts{
|
expected: CatalogerConfig{
|
||||||
searchLocalModCacheLicenses: false,
|
SearchLocalModCacheLicenses: false,
|
||||||
localModCacheDir: "/go/pkg/mod",
|
LocalModCacheDir: "/go/pkg/mod",
|
||||||
searchRemoteLicenses: false,
|
SearchRemoteLicenses: false,
|
||||||
proxies: []string{"https://my.proxy"},
|
Proxies: []string{"https://my.proxy"},
|
||||||
noProxy: []string{"my.private", "no.proxy"},
|
NoProxy: []string{"my.private", "no.proxy"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -68,12 +68,12 @@ func Test_Options(t *testing.T) {
|
|||||||
proxy: "https://alt.proxy,direct",
|
proxy: "https://alt.proxy,direct",
|
||||||
noProxy: "alt.no.proxy",
|
noProxy: "alt.no.proxy",
|
||||||
},
|
},
|
||||||
expected: GoCatalogerOpts{
|
expected: CatalogerConfig{
|
||||||
searchLocalModCacheLicenses: true,
|
SearchLocalModCacheLicenses: true,
|
||||||
localModCacheDir: "/go-cache",
|
LocalModCacheDir: "/go-cache",
|
||||||
searchRemoteLicenses: true,
|
SearchRemoteLicenses: true,
|
||||||
proxies: []string{"https://alt.proxy", "direct"},
|
Proxies: []string{"https://alt.proxy", "direct"},
|
||||||
noProxy: []string{"alt.no.proxy"},
|
NoProxy: []string{"alt.no.proxy"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -86,7 +86,7 @@ func Test_Options(t *testing.T) {
|
|||||||
for k, v := range test.env {
|
for k, v := range test.env {
|
||||||
t.Setenv(k, v)
|
t.Setenv(k, v)
|
||||||
}
|
}
|
||||||
got := NewGoCatalogerOpts().
|
got := DefaultCatalogerConfig().
|
||||||
WithSearchLocalModCacheLicenses(test.opts.local).
|
WithSearchLocalModCacheLicenses(test.opts.local).
|
||||||
WithLocalModCacheDir(test.opts.cacheDir).
|
WithLocalModCacheDir(test.opts.cacheDir).
|
||||||
WithSearchRemoteLicenses(test.opts.remote).
|
WithSearchRemoteLicenses(test.opts.remote).
|
||||||
@ -29,16 +29,16 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type goLicenses struct {
|
type goLicenses struct {
|
||||||
opts GoCatalogerOpts
|
opts CatalogerConfig
|
||||||
localModCacheResolver file.WritableResolver
|
localModCacheResolver file.WritableResolver
|
||||||
progress *monitor.CatalogerTask
|
progress *monitor.CatalogerTask
|
||||||
lowerLicenseFileNames *strset.Set
|
lowerLicenseFileNames *strset.Set
|
||||||
}
|
}
|
||||||
|
|
||||||
func newGoLicenses(opts GoCatalogerOpts) goLicenses {
|
func newGoLicenses(opts CatalogerConfig) goLicenses {
|
||||||
return goLicenses{
|
return goLicenses{
|
||||||
opts: opts,
|
opts: opts,
|
||||||
localModCacheResolver: modCacheResolver(opts.localModCacheDir),
|
localModCacheResolver: modCacheResolver(opts.LocalModCacheDir),
|
||||||
progress: &monitor.CatalogerTask{
|
progress: &monitor.CatalogerTask{
|
||||||
SubStatus: true,
|
SubStatus: true,
|
||||||
RemoveOnCompletion: true,
|
RemoveOnCompletion: true,
|
||||||
@ -107,7 +107,7 @@ func (c *goLicenses) getLicenses(resolver file.Resolver, moduleName, moduleVersi
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *goLicenses) getLicensesFromLocal(moduleName, moduleVersion string) ([]pkg.License, error) {
|
func (c *goLicenses) getLicensesFromLocal(moduleName, moduleVersion string) ([]pkg.License, error) {
|
||||||
if !c.opts.searchLocalModCacheLicenses {
|
if !c.opts.SearchLocalModCacheLicenses {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -117,11 +117,11 @@ func (c *goLicenses) getLicensesFromLocal(moduleName, moduleVersion string) ([]p
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *goLicenses) getLicensesFromRemote(moduleName, moduleVersion string) ([]pkg.License, error) {
|
func (c *goLicenses) getLicensesFromRemote(moduleName, moduleVersion string) ([]pkg.License, error) {
|
||||||
if !c.opts.searchRemoteLicenses {
|
if !c.opts.SearchRemoteLicenses {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
proxies := remotesForModule(c.opts.proxies, c.opts.noProxy, moduleName)
|
proxies := remotesForModule(c.opts.Proxies, c.opts.NoProxy, moduleName)
|
||||||
|
|
||||||
fsys, err := getModule(c.progress, proxies, moduleName, moduleVersion)
|
fsys, err := getModule(c.progress, proxies, moduleName, moduleVersion)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -231,15 +231,18 @@ func getModule(progress *monitor.CatalogerTask, proxies []string, moduleName, mo
|
|||||||
func getModuleProxy(progress *monitor.CatalogerTask, proxy string, moduleName string, moduleVersion string) (out fs.FS, _ error) {
|
func getModuleProxy(progress *monitor.CatalogerTask, proxy string, moduleName string, moduleVersion string) (out fs.FS, _ error) {
|
||||||
u := fmt.Sprintf("%s/%s/@v/%s.zip", proxy, moduleName, moduleVersion)
|
u := fmt.Sprintf("%s/%s/@v/%s.zip", proxy, moduleName, moduleVersion)
|
||||||
progress.SetValue(u)
|
progress.SetValue(u)
|
||||||
|
|
||||||
// get the module zip
|
// get the module zip
|
||||||
resp, err := http.Get(u) //nolint:gosec
|
resp, err := http.Get(u) //nolint:gosec
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer func() { _ = resp.Body.Close() }()
|
defer func() { _ = resp.Body.Close() }()
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
u = fmt.Sprintf("%s/%s/@v/%s.zip", proxy, strings.ToLower(moduleName), moduleVersion)
|
u = fmt.Sprintf("%s/%s/@v/%s.zip", proxy, strings.ToLower(moduleName), moduleVersion)
|
||||||
progress.SetValue(u)
|
progress.SetValue(u)
|
||||||
|
|
||||||
// try lowercasing it; some packages have mixed casing that really messes up the proxy
|
// try lowercasing it; some packages have mixed casing that really messes up the proxy
|
||||||
resp, err = http.Get(u) //nolint:gosec
|
resp, err = http.Get(u) //nolint:gosec
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -250,19 +253,23 @@ func getModuleProxy(progress *monitor.CatalogerTask, proxy string, moduleName st
|
|||||||
return nil, fmt.Errorf("failed to get module zip: %s", resp.Status)
|
return nil, fmt.Errorf("failed to get module zip: %s", resp.Status)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// read the zip
|
// read the zip
|
||||||
b, err := io.ReadAll(resp.Body)
|
b, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
out, err = zip.NewReader(bytes.NewReader(b), resp.ContentLength)
|
out, err = zip.NewReader(bytes.NewReader(b), resp.ContentLength)
|
||||||
versionPath := findVersionPath(out, ".")
|
versionPath := findVersionPath(out, ".")
|
||||||
out = getSubFS(out, versionPath)
|
out = getSubFS(out, versionPath)
|
||||||
|
|
||||||
return out, err
|
return out, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func findVersionPath(f fs.FS, dir string) string {
|
func findVersionPath(f fs.FS, dir string) string {
|
||||||
list, _ := fs.ReadDir(f, dir)
|
list, _ := fs.ReadDir(f, dir)
|
||||||
|
|
||||||
for _, entry := range list {
|
for _, entry := range list {
|
||||||
name := entry.Name()
|
name := entry.Name()
|
||||||
if strings.Contains(name, "@") {
|
if strings.Contains(name, "@") {
|
||||||
@ -273,6 +280,7 @@ func findVersionPath(f fs.FS, dir string) string {
|
|||||||
return path.Join(name, found)
|
return path.Join(name, found)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -282,7 +290,9 @@ func getModuleRepository(progress *monitor.CatalogerTask, moduleName string, mod
|
|||||||
if len(parts) > 2 {
|
if len(parts) > 2 {
|
||||||
repoName = fmt.Sprintf("%s/%s/%s", parts[0], parts[1], parts[2])
|
repoName = fmt.Sprintf("%s/%s/%s", parts[0], parts[1], parts[2])
|
||||||
}
|
}
|
||||||
|
|
||||||
progress.SetValue(fmt.Sprintf("git: %s", repoName))
|
progress.SetValue(fmt.Sprintf("git: %s", repoName))
|
||||||
|
|
||||||
f := memfs.New()
|
f := memfs.New()
|
||||||
buf := &bytes.Buffer{}
|
buf := &bytes.Buffer{}
|
||||||
_, err := git.Clone(memory.NewStorage(), f, &git.CloneOptions{
|
_, err := git.Clone(memory.NewStorage(), f, &git.CloneOptions{
|
||||||
@ -292,6 +302,7 @@ func getModuleRepository(progress *monitor.CatalogerTask, moduleName string, mod
|
|||||||
Depth: 1,
|
Depth: 1,
|
||||||
Progress: buf,
|
Progress: buf,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("%w -- %s", err, buf.String())
|
return nil, fmt.Errorf("%w -- %s", err, buf.String())
|
||||||
}
|
}
|
||||||
|
|||||||
@ -67,9 +67,9 @@ func Test_LocalLicenseSearch(t *testing.T) {
|
|||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
t.Run(test.name, func(t *testing.T) {
|
t.Run(test.name, func(t *testing.T) {
|
||||||
l := newGoLicenses(
|
l := newGoLicenses(
|
||||||
GoCatalogerOpts{
|
CatalogerConfig{
|
||||||
searchLocalModCacheLicenses: true,
|
SearchLocalModCacheLicenses: true,
|
||||||
localModCacheDir: path.Join(wd, "test-fixtures", "licenses", "pkg", "mod"),
|
LocalModCacheDir: path.Join(wd, "test-fixtures", "licenses", "pkg", "mod"),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
licenses, err := l.getLicenses(fileresolver.Empty{}, test.name, test.version)
|
licenses, err := l.getLicenses(fileresolver.Empty{}, test.name, test.version)
|
||||||
@ -154,10 +154,10 @@ func Test_RemoteProxyLicenseSearch(t *testing.T) {
|
|||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
t.Run(test.name, func(t *testing.T) {
|
t.Run(test.name, func(t *testing.T) {
|
||||||
l := newGoLicenses(GoCatalogerOpts{
|
l := newGoLicenses(CatalogerConfig{
|
||||||
searchRemoteLicenses: true,
|
SearchRemoteLicenses: true,
|
||||||
proxies: []string{server.URL},
|
Proxies: []string{server.URL},
|
||||||
localModCacheDir: modDir,
|
LocalModCacheDir: modDir,
|
||||||
})
|
})
|
||||||
|
|
||||||
licenses, err := l.getLicenses(fileresolver.Empty{}, test.name, test.version)
|
licenses, err := l.getLicenses(fileresolver.Empty{}, test.name, test.version)
|
||||||
|
|||||||
@ -1,114 +0,0 @@
|
|||||||
package golang
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/mitchellh/go-homedir"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
defaultProxies = "https://proxy.golang.org,direct"
|
|
||||||
directProxyOnly = "direct"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
directProxiesOnly = []string{directProxyOnly}
|
|
||||||
)
|
|
||||||
|
|
||||||
type GoCatalogerOpts struct {
|
|
||||||
searchLocalModCacheLicenses bool
|
|
||||||
localModCacheDir string
|
|
||||||
searchRemoteLicenses bool
|
|
||||||
proxies []string
|
|
||||||
noProxy []string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g GoCatalogerOpts) WithSearchLocalModCacheLicenses(input bool) GoCatalogerOpts {
|
|
||||||
g.searchLocalModCacheLicenses = input
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g GoCatalogerOpts) WithLocalModCacheDir(input string) GoCatalogerOpts {
|
|
||||||
if input == "" {
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
g.localModCacheDir = input
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g GoCatalogerOpts) WithSearchRemoteLicenses(input bool) GoCatalogerOpts {
|
|
||||||
g.searchRemoteLicenses = input
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g GoCatalogerOpts) WithProxy(input string) GoCatalogerOpts {
|
|
||||||
if input == "" {
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
if input == "off" {
|
|
||||||
input = directProxyOnly
|
|
||||||
}
|
|
||||||
g.proxies = strings.Split(input, ",")
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g GoCatalogerOpts) WithNoProxy(input string) GoCatalogerOpts {
|
|
||||||
if input == "" {
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
g.noProxy = strings.Split(input, ",")
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewGoCatalogerOpts create a GoCatalogerOpts with default options, which includes:
|
|
||||||
// - setting the default remote proxy if none is provided
|
|
||||||
// - setting the default no proxy if none is provided
|
|
||||||
// - setting the default local module cache dir if none is provided
|
|
||||||
func NewGoCatalogerOpts() GoCatalogerOpts {
|
|
||||||
g := GoCatalogerOpts{}
|
|
||||||
|
|
||||||
// first process the proxy settings
|
|
||||||
if len(g.proxies) == 0 {
|
|
||||||
goProxy := os.Getenv("GOPROXY")
|
|
||||||
if goProxy == "" {
|
|
||||||
goProxy = defaultProxies
|
|
||||||
}
|
|
||||||
g = g.WithProxy(goProxy)
|
|
||||||
}
|
|
||||||
|
|
||||||
// next process the gonoproxy settings
|
|
||||||
if len(g.noProxy) == 0 {
|
|
||||||
goPrivate := os.Getenv("GOPRIVATE")
|
|
||||||
goNoProxy := os.Getenv("GONOPROXY")
|
|
||||||
// we only use the env var if it was not set explicitly
|
|
||||||
if goPrivate != "" {
|
|
||||||
g.noProxy = append(g.noProxy, strings.Split(goPrivate, ",")...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// next process the goprivate settings; we always add those
|
|
||||||
if goNoProxy != "" {
|
|
||||||
g.noProxy = append(g.noProxy, strings.Split(goNoProxy, ",")...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if g.localModCacheDir == "" {
|
|
||||||
goPath := os.Getenv("GOPATH")
|
|
||||||
|
|
||||||
if goPath == "" {
|
|
||||||
homeDir, err := homedir.Dir()
|
|
||||||
if err != nil {
|
|
||||||
log.Debug("unable to determine user home dir: %v", err)
|
|
||||||
} else {
|
|
||||||
goPath = path.Join(homeDir, "go")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if goPath != "" {
|
|
||||||
g.localModCacheDir = path.Join(goPath, "pkg", "mod")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
@ -142,7 +142,7 @@ func Test_GoSumHashes(t *testing.T) {
|
|||||||
pkgtest.NewCatalogTester().
|
pkgtest.NewCatalogTester().
|
||||||
FromDirectory(t, test.fixture).
|
FromDirectory(t, test.fixture).
|
||||||
Expects(test.expected, nil).
|
Expects(test.expected, nil).
|
||||||
TestCataloger(t, NewGoModuleFileCataloger(GoCatalogerOpts{}))
|
TestCataloger(t, NewGoModuleFileCataloger(CatalogerConfig{}))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,6 +4,7 @@ Package haskell provides a concrete Cataloger implementation relating to package
|
|||||||
package haskell
|
package haskell
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -12,7 +13,7 @@ import (
|
|||||||
// This hints at splitting these into multiple catalogers, but for now we'll keep them together.
|
// This hints at splitting these into multiple catalogers, but for now we'll keep them together.
|
||||||
|
|
||||||
// NewHackageCataloger returns a new Haskell cataloger object.
|
// NewHackageCataloger returns a new Haskell cataloger object.
|
||||||
func NewHackageCataloger() *generic.Cataloger {
|
func NewHackageCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("haskell-cataloger").
|
return generic.NewCataloger("haskell-cataloger").
|
||||||
WithParserByGlobs(parseStackYaml, "**/stack.yaml").
|
WithParserByGlobs(parseStackYaml, "**/stack.yaml").
|
||||||
WithParserByGlobs(parseStackLock, "**/stack.yaml.lock").
|
WithParserByGlobs(parseStackLock, "**/stack.yaml.lock").
|
||||||
|
|||||||
@ -53,14 +53,14 @@ type archiveParser struct {
|
|||||||
contentPath string
|
contentPath string
|
||||||
fileInfo archiveFilename
|
fileInfo archiveFilename
|
||||||
detectNested bool
|
detectNested bool
|
||||||
cfg Config
|
cfg ArchiveCatalogerConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
type genericArchiveParserAdapter struct {
|
type genericArchiveParserAdapter struct {
|
||||||
cfg Config
|
cfg ArchiveCatalogerConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
func newGenericArchiveParserAdapter(cfg Config) genericArchiveParserAdapter {
|
func newGenericArchiveParserAdapter(cfg ArchiveCatalogerConfig) genericArchiveParserAdapter {
|
||||||
return genericArchiveParserAdapter{cfg: cfg}
|
return genericArchiveParserAdapter{cfg: cfg}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -85,7 +85,7 @@ func uniquePkgKey(groupID string, p *pkg.Package) string {
|
|||||||
|
|
||||||
// newJavaArchiveParser returns a new java archive parser object for the given archive. Can be configured to discover
|
// newJavaArchiveParser returns a new java archive parser object for the given archive. Can be configured to discover
|
||||||
// and parse nested archives or ignore them.
|
// and parse nested archives or ignore them.
|
||||||
func newJavaArchiveParser(reader file.LocationReadCloser, detectNested bool, cfg Config) (*archiveParser, func(), error) {
|
func newJavaArchiveParser(reader file.LocationReadCloser, detectNested bool, cfg ArchiveCatalogerConfig) (*archiveParser, func(), error) {
|
||||||
// fetch the last element of the virtual path
|
// fetch the last element of the virtual path
|
||||||
virtualElements := strings.Split(reader.Path(), ":")
|
virtualElements := strings.Split(reader.Path(), ":")
|
||||||
currentFilepath := virtualElements[len(virtualElements)-1]
|
currentFilepath := virtualElements[len(virtualElements)-1]
|
||||||
@ -338,7 +338,7 @@ func artifactIDMatchesFilename(artifactID, fileName string) bool {
|
|||||||
return strings.HasPrefix(artifactID, fileName) || strings.HasSuffix(fileName, artifactID)
|
return strings.HasPrefix(artifactID, fileName) || strings.HasSuffix(fileName, artifactID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func findPomLicenses(pomProjectObject *parsedPomProject, cfg Config) {
|
func findPomLicenses(pomProjectObject *parsedPomProject, cfg ArchiveCatalogerConfig) {
|
||||||
// If we don't have any licenses until now, and if we have a parent Pom, then we'll check the parent pom in maven central for licenses.
|
// If we don't have any licenses until now, and if we have a parent Pom, then we'll check the parent pom in maven central for licenses.
|
||||||
if pomProjectObject != nil && pomProjectObject.Parent != nil && len(pomProjectObject.Licenses) == 0 {
|
if pomProjectObject != nil && pomProjectObject.Parent != nil && len(pomProjectObject.Licenses) == 0 {
|
||||||
parentLicenses, err := recursivelyFindLicensesFromParentPom(
|
parentLicenses, err := recursivelyFindLicensesFromParentPom(
|
||||||
@ -373,11 +373,11 @@ func formatMavenPomURL(groupID, artifactID, version, mavenBaseURL string) (reque
|
|||||||
return requestURL, err
|
return requestURL, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func recursivelyFindLicensesFromParentPom(groupID, artifactID, version string, cfg Config) ([]string, error) {
|
func recursivelyFindLicensesFromParentPom(groupID, artifactID, version string, cfg ArchiveCatalogerConfig) ([]string, error) {
|
||||||
var licenses []string
|
var licenses []string
|
||||||
// As there can be nested parent poms, we'll recursively check for licenses until we reach the max depth
|
// As there can be nested parent poms, we'll recursively check for licenses until we reach the max depth
|
||||||
for i := 0; i < cfg.MaxParentRecursiveDepth; i++ {
|
for i := 0; i < cfg.MaxParentRecursiveDepth; i++ {
|
||||||
parentPom, err := getPomFromMavenCentral(groupID, artifactID, version, cfg.MavenBaseURL)
|
parentPom, err := getPomFromMavenRepo(groupID, artifactID, version, cfg.MavenBaseURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -395,7 +395,7 @@ func recursivelyFindLicensesFromParentPom(groupID, artifactID, version string, c
|
|||||||
return licenses, nil
|
return licenses, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getPomFromMavenCentral(groupID, artifactID, version, mavenBaseURL string) (*gopom.Project, error) {
|
func getPomFromMavenRepo(groupID, artifactID, version, mavenBaseURL string) (*gopom.Project, error) {
|
||||||
requestURL, err := formatMavenPomURL(groupID, artifactID, version, mavenBaseURL)
|
requestURL, err := formatMavenPomURL(groupID, artifactID, version, mavenBaseURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -542,7 +542,7 @@ func (j *archiveParser) discoverPkgsFromNestedArchives(parentPkg *pkg.Package) (
|
|||||||
|
|
||||||
// discoverPkgsFromZip finds Java archives within Java archives, returning all listed Java packages found and
|
// discoverPkgsFromZip finds Java archives within Java archives, returning all listed Java packages found and
|
||||||
// associating each discovered package to the given parent package.
|
// associating each discovered package to the given parent package.
|
||||||
func discoverPkgsFromZip(location file.Location, archivePath, contentPath string, fileManifest intFile.ZipFileManifest, parentPkg *pkg.Package, cfg Config) ([]pkg.Package, []artifact.Relationship, error) {
|
func discoverPkgsFromZip(location file.Location, archivePath, contentPath string, fileManifest intFile.ZipFileManifest, parentPkg *pkg.Package, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
// search and parse pom.properties files & fetch the contents
|
// search and parse pom.properties files & fetch the contents
|
||||||
openers, err := intFile.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(false, archiveFormatGlobs...)...)
|
openers, err := intFile.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(false, archiveFormatGlobs...)...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -553,7 +553,7 @@ func discoverPkgsFromZip(location file.Location, archivePath, contentPath string
|
|||||||
}
|
}
|
||||||
|
|
||||||
// discoverPkgsFromOpeners finds Java archives within the given files and associates them with the given parent package.
|
// discoverPkgsFromOpeners finds Java archives within the given files and associates them with the given parent package.
|
||||||
func discoverPkgsFromOpeners(location file.Location, openers map[string]intFile.Opener, parentPkg *pkg.Package, cfg Config) ([]pkg.Package, []artifact.Relationship, error) {
|
func discoverPkgsFromOpeners(location file.Location, openers map[string]intFile.Opener, parentPkg *pkg.Package, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
var pkgs []pkg.Package
|
var pkgs []pkg.Package
|
||||||
var relationships []artifact.Relationship
|
var relationships []artifact.Relationship
|
||||||
|
|
||||||
@ -582,7 +582,7 @@ func discoverPkgsFromOpeners(location file.Location, openers map[string]intFile.
|
|||||||
}
|
}
|
||||||
|
|
||||||
// discoverPkgsFromOpener finds Java archives within the given file.
|
// discoverPkgsFromOpener finds Java archives within the given file.
|
||||||
func discoverPkgsFromOpener(location file.Location, pathWithinArchive string, archiveOpener intFile.Opener, cfg Config) ([]pkg.Package, []artifact.Relationship, error) {
|
func discoverPkgsFromOpener(location file.Location, pathWithinArchive string, archiveOpener intFile.Opener, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
archiveReadCloser, err := archiveOpener.Open()
|
archiveReadCloser, err := archiveOpener.Open()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("unable to open archived file from tempdir: %w", err)
|
return nil, nil, fmt.Errorf("unable to open archived file from tempdir: %w", err)
|
||||||
@ -669,7 +669,7 @@ func pomProjectByParentPath(archivePath string, location file.Location, extractP
|
|||||||
|
|
||||||
// newPackageFromMavenData processes a single Maven POM properties for a given parent package, returning all listed Java packages found and
|
// newPackageFromMavenData processes a single Maven POM properties for a given parent package, returning all listed Java packages found and
|
||||||
// associating each discovered package to the given parent package. Note the pom.xml is optional, the pom.properties is not.
|
// associating each discovered package to the given parent package. Note the pom.xml is optional, the pom.properties is not.
|
||||||
func newPackageFromMavenData(pomProperties pkg.JavaPomProperties, parsedPomProject *parsedPomProject, parentPkg *pkg.Package, location file.Location, cfg Config) *pkg.Package {
|
func newPackageFromMavenData(pomProperties pkg.JavaPomProperties, parsedPomProject *parsedPomProject, parentPkg *pkg.Package, location file.Location, cfg ArchiveCatalogerConfig) *pkg.Package {
|
||||||
// keep the artifact name within the virtual path if this package does not match the parent package
|
// keep the artifact name within the virtual path if this package does not match the parent package
|
||||||
vPathSuffix := ""
|
vPathSuffix := ""
|
||||||
groupID := ""
|
groupID := ""
|
||||||
|
|||||||
@ -78,7 +78,7 @@ func TestSearchMavenForLicenses(t *testing.T) {
|
|||||||
name string
|
name string
|
||||||
fixture string
|
fixture string
|
||||||
detectNested bool
|
detectNested bool
|
||||||
config Config
|
config ArchiveCatalogerConfig
|
||||||
requestPath string
|
requestPath string
|
||||||
requestHandlers []handlerPath
|
requestHandlers []handlerPath
|
||||||
expectedLicenses []pkg.License
|
expectedLicenses []pkg.License
|
||||||
@ -87,7 +87,7 @@ func TestSearchMavenForLicenses(t *testing.T) {
|
|||||||
name: "searchMavenForLicenses returns the expected licenses when search is set to true",
|
name: "searchMavenForLicenses returns the expected licenses when search is set to true",
|
||||||
fixture: "opensaml-core-3.4.6",
|
fixture: "opensaml-core-3.4.6",
|
||||||
detectNested: false,
|
detectNested: false,
|
||||||
config: Config{
|
config: ArchiveCatalogerConfig{
|
||||||
UseNetwork: true,
|
UseNetwork: true,
|
||||||
MavenBaseURL: url,
|
MavenBaseURL: url,
|
||||||
MaxParentRecursiveDepth: 2,
|
MaxParentRecursiveDepth: 2,
|
||||||
@ -161,7 +161,7 @@ func TestFormatMavenURL(t *testing.T) {
|
|||||||
|
|
||||||
for _, tc := range tests {
|
for _, tc := range tests {
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
requestURL, err := formatMavenPomURL(tc.groupID, tc.artifactID, tc.version, MavenBaseURL)
|
requestURL, err := formatMavenPomURL(tc.groupID, tc.artifactID, tc.version, mavenBaseURL)
|
||||||
assert.NoError(t, err, "expected no err; got %w", err)
|
assert.NoError(t, err, "expected no err; got %w", err)
|
||||||
assert.Equal(t, tc.expected, requestURL)
|
assert.Equal(t, tc.expected, requestURL)
|
||||||
})
|
})
|
||||||
@ -401,7 +401,7 @@ func TestParseJar(t *testing.T) {
|
|||||||
parser, cleanupFn, err := newJavaArchiveParser(file.LocationReadCloser{
|
parser, cleanupFn, err := newJavaArchiveParser(file.LocationReadCloser{
|
||||||
Location: file.NewLocation(fixture.Name()),
|
Location: file.NewLocation(fixture.Name()),
|
||||||
ReadCloser: fixture,
|
ReadCloser: fixture,
|
||||||
}, false, Config{UseNetwork: false})
|
}, false, ArchiveCatalogerConfig{UseNetwork: false})
|
||||||
defer cleanupFn()
|
defer cleanupFn()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
@ -667,7 +667,7 @@ func TestParseNestedJar(t *testing.T) {
|
|||||||
|
|
||||||
fixture, err := os.Open(test.fixture)
|
fixture, err := os.Open(test.fixture)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
gap := newGenericArchiveParserAdapter(Config{})
|
gap := newGenericArchiveParserAdapter(ArchiveCatalogerConfig{})
|
||||||
|
|
||||||
actual, _, err := gap.parseJavaArchive(nil, nil, file.LocationReadCloser{
|
actual, _, err := gap.parseJavaArchive(nil, nil, file.LocationReadCloser{
|
||||||
Location: file.NewLocation(fixture.Name()),
|
Location: file.NewLocation(fixture.Name()),
|
||||||
@ -1089,7 +1089,7 @@ func Test_newPackageFromMavenData(t *testing.T) {
|
|||||||
}
|
}
|
||||||
test.expectedParent.Locations = locations
|
test.expectedParent.Locations = locations
|
||||||
|
|
||||||
actualPackage := newPackageFromMavenData(test.props, test.project, test.parent, file.NewLocation(virtualPath), Config{})
|
actualPackage := newPackageFromMavenData(test.props, test.project, test.parent, file.NewLocation(virtualPath), DefaultArchiveCatalogerConfig())
|
||||||
if test.expectedPackage == nil {
|
if test.expectedPackage == nil {
|
||||||
require.Nil(t, actualPackage)
|
require.Nil(t, actualPackage)
|
||||||
} else {
|
} else {
|
||||||
@ -1309,7 +1309,7 @@ func Test_parseJavaArchive_regressions(t *testing.T) {
|
|||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
gap := newGenericArchiveParserAdapter(Config{})
|
gap := newGenericArchiveParserAdapter(ArchiveCatalogerConfig{})
|
||||||
if tt.assignParent {
|
if tt.assignParent {
|
||||||
assignParent(&tt.expectedPkgs[0], tt.expectedPkgs[1:]...)
|
assignParent(&tt.expectedPkgs[0], tt.expectedPkgs[1:]...)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,23 +4,24 @@ Package java provides a concrete Cataloger implementation for packages relating
|
|||||||
package java
|
package java
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewArchiveCataloger returns a new Java archive cataloger object for detecting packages with archives (jar, war, ear, par, sar, jpi, hpi, and native-image formats)
|
// NewArchiveCataloger returns a new Java archive cataloger object for detecting packages with archives (jar, war, ear, par, sar, jpi, hpi, and native-image formats)
|
||||||
func NewArchiveCataloger(cfg Config) *generic.Cataloger {
|
func NewArchiveCataloger(cfg ArchiveCatalogerConfig) *generic.Cataloger {
|
||||||
gap := newGenericArchiveParserAdapter(cfg)
|
gap := newGenericArchiveParserAdapter(cfg)
|
||||||
|
|
||||||
c := generic.NewCataloger("java-archive-cataloger").
|
c := generic.NewCataloger("java-archive-cataloger").
|
||||||
WithParserByGlobs(gap.parseJavaArchive, archiveFormatGlobs...)
|
WithParserByGlobs(gap.parseJavaArchive, archiveFormatGlobs...)
|
||||||
|
|
||||||
if cfg.SearchIndexedArchives {
|
if cfg.IncludeIndexedArchives {
|
||||||
// java archives wrapped within zip files
|
// java archives wrapped within zip files
|
||||||
gzp := newGenericZipWrappedJavaArchiveParser(cfg)
|
gzp := newGenericZipWrappedJavaArchiveParser(cfg)
|
||||||
c.WithParserByGlobs(gzp.parseZipWrappedJavaArchive, genericZipGlobs...)
|
c.WithParserByGlobs(gzp.parseZipWrappedJavaArchive, genericZipGlobs...)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.SearchUnindexedArchives {
|
if cfg.IncludeUnindexedArchives {
|
||||||
// java archives wrapped within tar files
|
// java archives wrapped within tar files
|
||||||
gtp := newGenericTarWrappedJavaArchiveParser(cfg)
|
gtp := newGenericTarWrappedJavaArchiveParser(cfg)
|
||||||
c.WithParserByGlobs(gtp.parseTarWrappedJavaArchive, genericTarGlobs...)
|
c.WithParserByGlobs(gtp.parseTarWrappedJavaArchive, genericTarGlobs...)
|
||||||
@ -30,14 +31,14 @@ func NewArchiveCataloger(cfg Config) *generic.Cataloger {
|
|||||||
|
|
||||||
// NewPomCataloger returns a cataloger capable of parsing dependencies from a pom.xml file.
|
// NewPomCataloger returns a cataloger capable of parsing dependencies from a pom.xml file.
|
||||||
// Pom files list dependencies that maybe not be locally installed yet.
|
// Pom files list dependencies that maybe not be locally installed yet.
|
||||||
func NewPomCataloger() *generic.Cataloger {
|
func NewPomCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("java-pom-cataloger").
|
return generic.NewCataloger("java-pom-cataloger").
|
||||||
WithParserByGlobs(parserPomXML, "**/pom.xml")
|
WithParserByGlobs(parserPomXML, "**/pom.xml")
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewGradleLockfileCataloger returns a cataloger capable of parsing dependencies from a gradle.lockfile file.
|
// NewGradleLockfileCataloger returns a cataloger capable of parsing dependencies from a gradle.lockfile file.
|
||||||
// Note: Older versions of lockfiles aren't supported yet
|
// Note: Older versions of lockfiles aren't supported yet
|
||||||
func NewGradleLockfileCataloger() *generic.Cataloger {
|
func NewGradleLockfileCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("java-gradle-lockfile-cataloger").
|
return generic.NewCataloger("java-gradle-lockfile-cataloger").
|
||||||
WithParserByGlobs(parseGradleLockfile, gradleLockfileGlob)
|
WithParserByGlobs(parseGradleLockfile, gradleLockfileGlob)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package java
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/cataloging"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
|
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -54,10 +55,16 @@ func Test_ArchiveCataloger_Globs(t *testing.T) {
|
|||||||
pkgtest.NewCatalogTester().
|
pkgtest.NewCatalogTester().
|
||||||
FromDirectory(t, test.fixture).
|
FromDirectory(t, test.fixture).
|
||||||
ExpectsResolverContentQueries(test.expected).
|
ExpectsResolverContentQueries(test.expected).
|
||||||
TestCataloger(t, NewArchiveCataloger(Config{
|
TestCataloger(t,
|
||||||
SearchUnindexedArchives: true,
|
NewArchiveCataloger(
|
||||||
SearchIndexedArchives: true,
|
ArchiveCatalogerConfig{
|
||||||
}))
|
ArchiveSearchConfig: cataloging.ArchiveSearchConfig{
|
||||||
|
IncludeIndexedArchives: true,
|
||||||
|
IncludeUnindexedArchives: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,9 +1,44 @@
|
|||||||
package java
|
package java
|
||||||
|
|
||||||
type Config struct {
|
import "github.com/anchore/syft/syft/cataloging"
|
||||||
SearchUnindexedArchives bool
|
|
||||||
SearchIndexedArchives bool
|
const mavenBaseURL = "https://repo1.maven.org/maven2"
|
||||||
UseNetwork bool
|
|
||||||
MavenBaseURL string
|
type ArchiveCatalogerConfig struct {
|
||||||
MaxParentRecursiveDepth int
|
cataloging.ArchiveSearchConfig `yaml:",inline" json:"" mapstructure:",squash"`
|
||||||
|
UseNetwork bool `yaml:"use-network" json:"use-network" mapstructure:"use-network"`
|
||||||
|
MavenBaseURL string `yaml:"maven-base-url" json:"maven-base-url" mapstructure:"maven-base-url"`
|
||||||
|
MaxParentRecursiveDepth int `yaml:"max-parent-recursive-depth" json:"max-parent-recursive-depth" mapstructure:"max-parent-recursive-depth"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func DefaultArchiveCatalogerConfig() ArchiveCatalogerConfig {
|
||||||
|
return ArchiveCatalogerConfig{
|
||||||
|
ArchiveSearchConfig: cataloging.ArchiveSearchConfig{
|
||||||
|
IncludeIndexedArchives: true,
|
||||||
|
IncludeUnindexedArchives: false,
|
||||||
|
},
|
||||||
|
UseNetwork: false,
|
||||||
|
MavenBaseURL: mavenBaseURL,
|
||||||
|
MaxParentRecursiveDepth: 5,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j ArchiveCatalogerConfig) WithUseNetwork(input bool) ArchiveCatalogerConfig {
|
||||||
|
j.UseNetwork = input
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j ArchiveCatalogerConfig) WithMavenBaseURL(input string) ArchiveCatalogerConfig {
|
||||||
|
if input != "" {
|
||||||
|
j.MavenBaseURL = input
|
||||||
|
}
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j ArchiveCatalogerConfig) WithArchiveTraversal(search cataloging.ArchiveSearchConfig, maxDepth int) ArchiveCatalogerConfig {
|
||||||
|
if maxDepth > 0 {
|
||||||
|
j.MaxParentRecursiveDepth = maxDepth
|
||||||
|
}
|
||||||
|
j.ArchiveSearchConfig = search
|
||||||
|
return j
|
||||||
}
|
}
|
||||||
|
|||||||
@ -102,7 +102,7 @@ const nativeImageInvalidIndexError = "parsing the executable file generated an i
|
|||||||
const nativeImageMissingExportedDataDirectoryError = "exported data directory is missing"
|
const nativeImageMissingExportedDataDirectoryError = "exported data directory is missing"
|
||||||
|
|
||||||
// newNativeImageCataloger returns a new Native Image cataloger object.
|
// newNativeImageCataloger returns a new Native Image cataloger object.
|
||||||
func NewNativeImageCataloger() *NativeImageCataloger {
|
func NewNativeImageCataloger() pkg.Cataloger {
|
||||||
return &NativeImageCataloger{}
|
return &NativeImageCataloger{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,36 +0,0 @@
|
|||||||
package java
|
|
||||||
|
|
||||||
const MavenBaseURL = "https://repo1.maven.org/maven2"
|
|
||||||
|
|
||||||
type CatalogerOpts struct {
|
|
||||||
UseNetwork bool
|
|
||||||
MavenURL string
|
|
||||||
MaxParentRecursiveDepth int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (j CatalogerOpts) WithUseNetwork(input bool) CatalogerOpts {
|
|
||||||
j.UseNetwork = input
|
|
||||||
return j
|
|
||||||
}
|
|
||||||
|
|
||||||
func (j CatalogerOpts) WithMavenURL(input string) CatalogerOpts {
|
|
||||||
if input != "" {
|
|
||||||
j.MavenURL = input
|
|
||||||
}
|
|
||||||
return j
|
|
||||||
}
|
|
||||||
|
|
||||||
func (j CatalogerOpts) WithMaxParentRecursiveDepth(input int) CatalogerOpts {
|
|
||||||
if input > 0 {
|
|
||||||
j.MaxParentRecursiveDepth = input
|
|
||||||
}
|
|
||||||
return j
|
|
||||||
}
|
|
||||||
|
|
||||||
func DefaultCatalogerOpts() CatalogerOpts {
|
|
||||||
return CatalogerOpts{
|
|
||||||
UseNetwork: false,
|
|
||||||
MavenURL: MavenBaseURL,
|
|
||||||
MaxParentRecursiveDepth: 5,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -47,10 +47,10 @@ var genericTarGlobs = []string{
|
|||||||
// a file listing within the archive you must decompress the entire archive and seek through all of the entries.
|
// a file listing within the archive you must decompress the entire archive and seek through all of the entries.
|
||||||
|
|
||||||
type genericTarWrappedJavaArchiveParser struct {
|
type genericTarWrappedJavaArchiveParser struct {
|
||||||
cfg Config
|
cfg ArchiveCatalogerConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
func newGenericTarWrappedJavaArchiveParser(cfg Config) genericTarWrappedJavaArchiveParser {
|
func newGenericTarWrappedJavaArchiveParser(cfg ArchiveCatalogerConfig) genericTarWrappedJavaArchiveParser {
|
||||||
return genericTarWrappedJavaArchiveParser{
|
return genericTarWrappedJavaArchiveParser{
|
||||||
cfg: cfg,
|
cfg: cfg,
|
||||||
}
|
}
|
||||||
@ -68,7 +68,7 @@ func (gtp genericTarWrappedJavaArchiveParser) parseTarWrappedJavaArchive(_ file.
|
|||||||
return discoverPkgsFromTar(reader.Location, archivePath, contentPath, gtp.cfg)
|
return discoverPkgsFromTar(reader.Location, archivePath, contentPath, gtp.cfg)
|
||||||
}
|
}
|
||||||
|
|
||||||
func discoverPkgsFromTar(location file.Location, archivePath, contentPath string, cfg Config) ([]pkg.Package, []artifact.Relationship, error) {
|
func discoverPkgsFromTar(location file.Location, archivePath, contentPath string, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
openers, err := intFile.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...)
|
openers, err := intFile.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("unable to extract files from tar: %w", err)
|
return nil, nil, fmt.Errorf("unable to extract files from tar: %w", err)
|
||||||
|
|||||||
@ -40,7 +40,7 @@ func Test_parseTarWrappedJavaArchive(t *testing.T) {
|
|||||||
t.Fatalf("failed to open fixture: %+v", err)
|
t.Fatalf("failed to open fixture: %+v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
gtp := newGenericTarWrappedJavaArchiveParser(Config{})
|
gtp := newGenericTarWrappedJavaArchiveParser(ArchiveCatalogerConfig{})
|
||||||
actualPkgs, _, err := gtp.parseTarWrappedJavaArchive(nil, nil, file.LocationReadCloser{
|
actualPkgs, _, err := gtp.parseTarWrappedJavaArchive(nil, nil, file.LocationReadCloser{
|
||||||
Location: file.NewLocation(test.fixture),
|
Location: file.NewLocation(test.fixture),
|
||||||
ReadCloser: fixture,
|
ReadCloser: fixture,
|
||||||
|
|||||||
@ -19,10 +19,10 @@ var genericZipGlobs = []string{
|
|||||||
// parseZipWrappedJavaArchive is a parser function for java archive contents contained within arbitrary zip files.
|
// parseZipWrappedJavaArchive is a parser function for java archive contents contained within arbitrary zip files.
|
||||||
|
|
||||||
type genericZipWrappedJavaArchiveParser struct {
|
type genericZipWrappedJavaArchiveParser struct {
|
||||||
cfg Config
|
cfg ArchiveCatalogerConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
func newGenericZipWrappedJavaArchiveParser(cfg Config) genericZipWrappedJavaArchiveParser {
|
func newGenericZipWrappedJavaArchiveParser(cfg ArchiveCatalogerConfig) genericZipWrappedJavaArchiveParser {
|
||||||
return genericZipWrappedJavaArchiveParser{
|
return genericZipWrappedJavaArchiveParser{
|
||||||
cfg: cfg,
|
cfg: cfg,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -33,7 +33,7 @@ func Test_parseZipWrappedJavaArchive(t *testing.T) {
|
|||||||
t.Fatalf("failed to open fixture: %+v", err)
|
t.Fatalf("failed to open fixture: %+v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
gzp := newGenericZipWrappedJavaArchiveParser(Config{})
|
gzp := newGenericZipWrappedJavaArchiveParser(ArchiveCatalogerConfig{})
|
||||||
|
|
||||||
actualPkgs, _, err := gzp.parseZipWrappedJavaArchive(nil, nil, file.LocationReadCloser{
|
actualPkgs, _, err := gzp.parseZipWrappedJavaArchive(nil, nil, file.LocationReadCloser{
|
||||||
Location: file.NewLocation(test.fixture),
|
Location: file.NewLocation(test.fixture),
|
||||||
|
|||||||
@ -4,17 +4,18 @@ Package javascript provides a concrete Cataloger implementation for packages rel
|
|||||||
package javascript
|
package javascript
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewPackageCataloger returns a new cataloger object for NPM.
|
// NewPackageCataloger returns a new cataloger object for NPM.
|
||||||
func NewPackageCataloger() *generic.Cataloger {
|
func NewPackageCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("javascript-package-cataloger").
|
return generic.NewCataloger("javascript-package-cataloger").
|
||||||
WithParserByGlobs(parsePackageJSON, "**/package.json")
|
WithParserByGlobs(parsePackageJSON, "**/package.json")
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewLockCataloger returns a new cataloger object for NPM (and NPM-adjacent, such as yarn) lock files.
|
// NewLockCataloger returns a new cataloger object for NPM (and NPM-adjacent, such as yarn) lock files.
|
||||||
func NewLockCataloger() *generic.Cataloger {
|
func NewLockCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("javascript-lock-cataloger").
|
return generic.NewCataloger("javascript-lock-cataloger").
|
||||||
WithParserByGlobs(parsePackageLock, "**/package-lock.json").
|
WithParserByGlobs(parsePackageLock, "**/package-lock.json").
|
||||||
WithParserByGlobs(parseYarnLock, "**/yarn.lock").
|
WithParserByGlobs(parseYarnLock, "**/yarn.lock").
|
||||||
|
|||||||
@ -15,16 +15,16 @@ import (
|
|||||||
|
|
||||||
var _ pkg.Cataloger = (*LinuxKernelCataloger)(nil)
|
var _ pkg.Cataloger = (*LinuxKernelCataloger)(nil)
|
||||||
|
|
||||||
type LinuxCatalogerConfig struct {
|
type LinuxKernelCatalogerConfig struct {
|
||||||
CatalogModules bool
|
CatalogModules bool `yaml:"catalog-modules" json:"catalog-modules" mapstructure:"catalog-modules"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type LinuxKernelCataloger struct {
|
type LinuxKernelCataloger struct {
|
||||||
cfg LinuxCatalogerConfig
|
cfg LinuxKernelCatalogerConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
func DefaultLinuxCatalogerConfig() LinuxCatalogerConfig {
|
func DefaultLinuxCatalogerConfig() LinuxKernelCatalogerConfig {
|
||||||
return LinuxCatalogerConfig{
|
return LinuxKernelCatalogerConfig{
|
||||||
CatalogModules: true,
|
CatalogModules: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -43,7 +43,7 @@ var kernelModuleGlobs = []string{
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewLinuxKernelCataloger returns a new kernel files cataloger object.
|
// NewLinuxKernelCataloger returns a new kernel files cataloger object.
|
||||||
func NewLinuxKernelCataloger(cfg LinuxCatalogerConfig) *LinuxKernelCataloger {
|
func NewLinuxKernelCataloger(cfg LinuxKernelCatalogerConfig) *LinuxKernelCataloger {
|
||||||
return &LinuxKernelCataloger{
|
return &LinuxKernelCataloger{
|
||||||
cfg: cfg,
|
cfg: cfg,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -87,7 +87,7 @@ func Test_KernelCataloger(t *testing.T) {
|
|||||||
Expects(expectedPkgs, expectedRelationships).
|
Expects(expectedPkgs, expectedRelationships).
|
||||||
TestCataloger(t,
|
TestCataloger(t,
|
||||||
NewLinuxKernelCataloger(
|
NewLinuxKernelCataloger(
|
||||||
LinuxCatalogerConfig{
|
LinuxKernelCatalogerConfig{
|
||||||
CatalogModules: true,
|
CatalogModules: true,
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
|||||||
@ -19,7 +19,7 @@ const catalogerName = "nix-store-cataloger"
|
|||||||
// StoreCataloger finds package outputs installed in the Nix store location (/nix/store/*).
|
// StoreCataloger finds package outputs installed in the Nix store location (/nix/store/*).
|
||||||
type StoreCataloger struct{}
|
type StoreCataloger struct{}
|
||||||
|
|
||||||
func NewStoreCataloger() *StoreCataloger {
|
func NewStoreCataloger() pkg.Cataloger {
|
||||||
return &StoreCataloger{}
|
return &StoreCataloger{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -4,6 +4,7 @@ Package php provides a concrete Cataloger implementation relating to packages wi
|
|||||||
package php
|
package php
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -11,13 +12,13 @@ import (
|
|||||||
// semantic meanings. The lock file represents what should be installed, whereas the installed file represents what is installed.
|
// semantic meanings. The lock file represents what should be installed, whereas the installed file represents what is installed.
|
||||||
|
|
||||||
// NewComposerInstalledCataloger returns a new cataloger for PHP installed.json files.
|
// NewComposerInstalledCataloger returns a new cataloger for PHP installed.json files.
|
||||||
func NewComposerInstalledCataloger() *generic.Cataloger {
|
func NewComposerInstalledCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("php-composer-installed-cataloger").
|
return generic.NewCataloger("php-composer-installed-cataloger").
|
||||||
WithParserByGlobs(parseInstalledJSON, "**/installed.json")
|
WithParserByGlobs(parseInstalledJSON, "**/installed.json")
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewComposerLockCataloger returns a new cataloger for PHP composer.lock files.
|
// NewComposerLockCataloger returns a new cataloger for PHP composer.lock files.
|
||||||
func NewComposerLockCataloger() *generic.Cataloger {
|
func NewComposerLockCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("php-composer-lock-cataloger").
|
return generic.NewCataloger("php-composer-lock-cataloger").
|
||||||
WithParserByGlobs(parseComposerLock, "**/composer.lock")
|
WithParserByGlobs(parseComposerLock, "**/composer.lock")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,13 +4,14 @@ Package python provides a concrete Cataloger implementation relating to packages
|
|||||||
package python
|
package python
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
const eggInfoGlob = "**/*.egg-info"
|
const eggInfoGlob = "**/*.egg-info"
|
||||||
|
|
||||||
type CatalogerConfig struct {
|
type CatalogerConfig struct {
|
||||||
GuessUnpinnedRequirements bool
|
GuessUnpinnedRequirements bool `yaml:"guess-unpinned-requirements" json:"guess-unpinned-requirements" mapstructure:"guess-unpinned-requirements"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func DefaultCatalogerConfig() CatalogerConfig {
|
func DefaultCatalogerConfig() CatalogerConfig {
|
||||||
@ -30,7 +31,7 @@ func NewPackageCataloger(cfg CatalogerConfig) *generic.Cataloger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewInstalledPackageCataloger returns a new cataloger for python packages within egg or wheel installation directories.
|
// NewInstalledPackageCataloger returns a new cataloger for python packages within egg or wheel installation directories.
|
||||||
func NewInstalledPackageCataloger() *generic.Cataloger {
|
func NewInstalledPackageCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("python-installed-package-cataloger").
|
return generic.NewCataloger("python-installed-package-cataloger").
|
||||||
WithParserByGlobs(
|
WithParserByGlobs(
|
||||||
parseWheelOrEgg,
|
parseWheelOrEgg,
|
||||||
|
|||||||
@ -4,11 +4,12 @@ Package r provides a concrete Cataloger implementation relating to packages with
|
|||||||
package r
|
package r
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewPackageCataloger returns a new R cataloger object based on detection of R package DESCRIPTION files.
|
// NewPackageCataloger returns a new R cataloger object based on detection of R package DESCRIPTION files.
|
||||||
func NewPackageCataloger() *generic.Cataloger {
|
func NewPackageCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("r-package-cataloger").
|
return generic.NewCataloger("r-package-cataloger").
|
||||||
WithParserByGlobs(parseDescriptionFile, "**/DESCRIPTION")
|
WithParserByGlobs(parseDescriptionFile, "**/DESCRIPTION")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -12,7 +12,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// NewDBCataloger returns a new RPM DB cataloger object.
|
// NewDBCataloger returns a new RPM DB cataloger object.
|
||||||
func NewDBCataloger() *generic.Cataloger {
|
func NewDBCataloger() pkg.Cataloger {
|
||||||
// check if a sqlite driver is available
|
// check if a sqlite driver is available
|
||||||
if !isSqliteDriverAvailable() {
|
if !isSqliteDriverAvailable() {
|
||||||
log.Warnf("sqlite driver is not available, newer RPM databases might not be cataloged")
|
log.Warnf("sqlite driver is not available, newer RPM databases might not be cataloged")
|
||||||
@ -24,7 +24,7 @@ func NewDBCataloger() *generic.Cataloger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewArchiveCataloger returns a new RPM file cataloger object.
|
// NewArchiveCataloger returns a new RPM file cataloger object.
|
||||||
func NewArchiveCataloger() *generic.Cataloger {
|
func NewArchiveCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("rpm-archive-cataloger").
|
return generic.NewCataloger("rpm-archive-cataloger").
|
||||||
WithParserByGlobs(parseRpmArchive, "**/*.rpm")
|
WithParserByGlobs(parseRpmArchive, "**/*.rpm")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,23 +4,24 @@ Package ruby provides a concrete Cataloger implementation relating to packages w
|
|||||||
package ruby
|
package ruby
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewGemFileLockCataloger returns a new Bundler cataloger object tailored for parsing index-oriented files (e.g. Gemfile.lock).
|
// NewGemFileLockCataloger returns a new Bundler cataloger object tailored for parsing index-oriented files (e.g. Gemfile.lock).
|
||||||
func NewGemFileLockCataloger() *generic.Cataloger {
|
func NewGemFileLockCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("ruby-gemfile-cataloger").
|
return generic.NewCataloger("ruby-gemfile-cataloger").
|
||||||
WithParserByGlobs(parseGemFileLockEntries, "**/Gemfile.lock")
|
WithParserByGlobs(parseGemFileLockEntries, "**/Gemfile.lock")
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewInstalledGemSpecCataloger returns a new Bundler cataloger object tailored for detecting installations of gems (e.g. Gemspec).
|
// NewInstalledGemSpecCataloger returns a new Bundler cataloger object tailored for detecting installations of gems (e.g. Gemspec).
|
||||||
func NewInstalledGemSpecCataloger() *generic.Cataloger {
|
func NewInstalledGemSpecCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("ruby-installed-gemspec-cataloger").
|
return generic.NewCataloger("ruby-installed-gemspec-cataloger").
|
||||||
WithParserByGlobs(parseGemSpecEntries, "**/specifications/**/*.gemspec")
|
WithParserByGlobs(parseGemSpecEntries, "**/specifications/**/*.gemspec")
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewGemSpecCataloger looks for gems without the additional requirement of the gem being installed.
|
// NewGemSpecCataloger looks for gems without the additional requirement of the gem being installed.
|
||||||
func NewGemSpecCataloger() *generic.Cataloger {
|
func NewGemSpecCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("ruby-gemspec-cataloger").
|
return generic.NewCataloger("ruby-gemspec-cataloger").
|
||||||
WithParserByGlobs(parseGemSpecEntries, "**/*.gemspec")
|
WithParserByGlobs(parseGemSpecEntries, "**/*.gemspec")
|
||||||
}
|
}
|
||||||
@ -5,18 +5,19 @@ package rust
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewCargoLockCataloger returns a new Rust Cargo lock file cataloger object.
|
// NewCargoLockCataloger returns a new Rust Cargo lock file cataloger object.
|
||||||
func NewCargoLockCataloger() *generic.Cataloger {
|
func NewCargoLockCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("rust-cargo-lock-cataloger").
|
return generic.NewCataloger("rust-cargo-lock-cataloger").
|
||||||
WithParserByGlobs(parseCargoLock, "**/Cargo.lock")
|
WithParserByGlobs(parseCargoLock, "**/Cargo.lock")
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewAuditBinaryCataloger returns a new Rust auditable binary cataloger object that can detect dependencies
|
// NewAuditBinaryCataloger returns a new Rust auditable binary cataloger object that can detect dependencies
|
||||||
// in binaries produced with https://github.com/Shnatsel/rust-audit
|
// in binaries produced with https://github.com/Shnatsel/rust-audit
|
||||||
func NewAuditBinaryCataloger() *generic.Cataloger {
|
func NewAuditBinaryCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("cargo-auditable-binary-cataloger").
|
return generic.NewCataloger("cargo-auditable-binary-cataloger").
|
||||||
WithParserByMimeTypes(parseAuditBinary, internal.ExecutableMIMETypeSet.List()...)
|
WithParserByMimeTypes(parseAuditBinary, internal.ExecutableMIMETypeSet.List()...)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -19,7 +19,7 @@ import (
|
|||||||
const catalogerName = "sbom-cataloger"
|
const catalogerName = "sbom-cataloger"
|
||||||
|
|
||||||
// NewCataloger returns a new SBOM cataloger object loaded from saved SBOM JSON.
|
// NewCataloger returns a new SBOM cataloger object loaded from saved SBOM JSON.
|
||||||
func NewCataloger() *generic.Cataloger {
|
func NewCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger(catalogerName).
|
return generic.NewCataloger(catalogerName).
|
||||||
WithParserByGlobs(parseSBOM,
|
WithParserByGlobs(parseSBOM,
|
||||||
"**/*.syft.json",
|
"**/*.syft.json",
|
||||||
|
|||||||
@ -4,16 +4,17 @@ Package swift provides a concrete Cataloger implementation relating to packages
|
|||||||
package swift
|
package swift
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewSwiftPackageManagerCataloger() *generic.Cataloger {
|
func NewSwiftPackageManagerCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("swift-package-manager-cataloger").
|
return generic.NewCataloger("swift-package-manager-cataloger").
|
||||||
WithParserByGlobs(parsePackageResolved, "**/Package.resolved", "**/.package.resolved")
|
WithParserByGlobs(parsePackageResolved, "**/Package.resolved", "**/.package.resolved")
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewCocoapodsCataloger returns a new Swift Cocoapods lock file cataloger object.
|
// NewCocoapodsCataloger returns a new Swift Cocoapods lock file cataloger object.
|
||||||
func NewCocoapodsCataloger() *generic.Cataloger {
|
func NewCocoapodsCataloger() pkg.Cataloger {
|
||||||
return generic.NewCataloger("cocoapods-cataloger").
|
return generic.NewCataloger("cocoapods-cataloger").
|
||||||
WithParserByGlobs(parsePodfileLock, "**/Podfile.lock")
|
WithParserByGlobs(parsePodfileLock, "**/Podfile.lock")
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user