mirror of
https://github.com/anchore/syft.git
synced 2025-11-17 16:33:21 +01:00
Merge remote-tracking branch 'origin/main' into ast-parse-cataloger-capabilities
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
This commit is contained in:
commit
4a2d94b4b9
@ -42,7 +42,7 @@ tools:
|
||||
# used for signing the checksums file at release
|
||||
- name: cosign
|
||||
version:
|
||||
want: v3.0.1
|
||||
want: v3.0.2
|
||||
method: github-release
|
||||
with:
|
||||
repo: sigstore/cosign
|
||||
@ -58,7 +58,7 @@ tools:
|
||||
# used to release all artifacts
|
||||
- name: goreleaser
|
||||
version:
|
||||
want: v2.12.5
|
||||
want: v2.12.7
|
||||
method: github-release
|
||||
with:
|
||||
repo: goreleaser/goreleaser
|
||||
@ -98,7 +98,7 @@ tools:
|
||||
# used for triggering a release
|
||||
- name: gh
|
||||
version:
|
||||
want: v2.81.0
|
||||
want: v2.82.1
|
||||
method: github-release
|
||||
with:
|
||||
repo: cli/cli
|
||||
@ -114,7 +114,7 @@ tools:
|
||||
# used to upload test fixture cache
|
||||
- name: yq
|
||||
version:
|
||||
want: v4.47.2
|
||||
want: v4.48.1
|
||||
method: github-release
|
||||
with:
|
||||
repo: mikefarah/yq
|
||||
|
||||
@ -9,6 +9,9 @@ permit:
|
||||
- Unlicense
|
||||
|
||||
ignore-packages:
|
||||
# https://github.com/sorairolake/lzip-go/blob/34a2615d2abf740175c6b0a835baa08364e09430/go.sum.license#L3
|
||||
# has `SPDX-License-Identifier: Apache-2.0 OR MIT`, both of which are acceptable
|
||||
- github.com/sorairolake/lzip-go
|
||||
# packageurl-go is released under the MIT license located in the root of the repo at /mit.LICENSE
|
||||
- github.com/anchore/packageurl-go
|
||||
|
||||
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@ -47,7 +47,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@e296a935590eb16afc0c0108289f68c87e2a89a5 #v3.29.5
|
||||
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb #v3.29.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@ -58,7 +58,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@e296a935590eb16afc0c0108289f68c87e2a89a5 #v3.29.5
|
||||
uses: github/codeql-action/autobuild@4e94bd11f71e507f7f87df81788dff88d1dacbfb #v3.29.5
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
@ -72,4 +72,4 @@ jobs:
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@e296a935590eb16afc0c0108289f68c87e2a89a5 #v3.29.5
|
||||
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb #v3.29.5
|
||||
|
||||
12
.github/workflows/release.yaml
vendored
12
.github/workflows/release.yaml
vendored
@ -19,6 +19,16 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Bootstrap environment
|
||||
uses: ./.github/actions/bootstrap
|
||||
|
||||
- name: Validate Apple notarization credentials
|
||||
run: .tool/quill submission list
|
||||
env:
|
||||
QUILL_NOTARY_ISSUER: ${{ secrets.APPLE_NOTARY_ISSUER }}
|
||||
QUILL_NOTARY_KEY_ID: ${{ secrets.APPLE_NOTARY_KEY_ID }}
|
||||
QUILL_NOTARY_KEY: ${{ secrets.APPLE_NOTARY_KEY }}
|
||||
|
||||
- name: Check if running on main
|
||||
if: github.ref != 'refs/heads/main'
|
||||
# we are using the following flag when running `cosign blob-verify` for checksum signature verification:
|
||||
@ -161,7 +171,7 @@ jobs:
|
||||
# for updating brew formula in anchore/homebrew-syft
|
||||
GITHUB_BREW_TOKEN: ${{ secrets.ANCHOREOPS_GITHUB_OSS_WRITE_TOKEN }}
|
||||
|
||||
- uses: anchore/sbom-action@f8bdd1d8ac5e901a77a92f111440fdb1b593736b #v0.20.6
|
||||
- uses: anchore/sbom-action@8e94d75ddd33f69f691467e42275782e4bfefe84 #v0.20.9
|
||||
continue-on-error: true
|
||||
with:
|
||||
file: go.mod
|
||||
|
||||
2
.github/workflows/validations.yaml
vendored
2
.github/workflows/validations.yaml
vendored
@ -210,7 +210,7 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
with:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -2,6 +2,7 @@
|
||||
go.work
|
||||
go.work.sum
|
||||
.tool-versions
|
||||
.python-version
|
||||
|
||||
# app configuration
|
||||
/.syft.yaml
|
||||
|
||||
@ -116,7 +116,7 @@ Where the `formats` available are:
|
||||
- `spdx-json@2.2`: A JSON report conforming to the [SPDX 2.2 JSON Schema](https://github.com/spdx/spdx-spec/blob/v2.2/schemas/spdx-schema.json).
|
||||
- `github-json`: A JSON report conforming to GitHub's dependency snapshot format.
|
||||
- `syft-table`: A columnar summary (default).
|
||||
- `template`: Lets the user specify the output format. See ["Using templates"](#using-templates) below.
|
||||
- `template`: Lets the user specify the output format. See ["Using templates"](https://github.com/anchore/syft/wiki/using-templates) below.
|
||||
|
||||
Note that flags using the @<version> can be used for earlier versions of each specification as well.
|
||||
|
||||
@ -135,7 +135,7 @@ Note that flags using the @<version> can be used for earlier versions of each sp
|
||||
- Go (go.mod, Go binaries)
|
||||
- GitHub (workflows, actions)
|
||||
- Haskell (cabal, stack)
|
||||
- Java (jar, ear, war, par, sar, nar, native-image)
|
||||
- Java (jar, ear, war, par, sar, nar, rar, native-image)
|
||||
- JavaScript (npm, yarn)
|
||||
- Jenkins Plugins (jpi, hpi)
|
||||
- Linux kernel archives (vmlinz)
|
||||
|
||||
@ -252,7 +252,6 @@ func generateSBOMForAttestation(ctx context.Context, id clio.Identification, opt
|
||||
}
|
||||
|
||||
src, err := getSource(ctx, opts, userInput, stereoscope.RegistryTag)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@ -185,7 +185,6 @@ func runScan(ctx context.Context, id clio.Identification, opts *scanOptions, use
|
||||
}
|
||||
|
||||
src, err := getSource(ctx, &opts.Catalog, userInput, sources...)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -25,7 +25,6 @@ func BenchmarkImagePackageCatalogers(b *testing.B) {
|
||||
// get the source object for the image
|
||||
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
|
||||
require.NoError(b, err)
|
||||
|
||||
b.Cleanup(func() {
|
||||
require.NoError(b, theSource.Close())
|
||||
})
|
||||
|
||||
@ -38,11 +38,11 @@ func catalogFixtureImageWithConfig(t *testing.T, fixtureImageName string, cfg *s
|
||||
// get the source to build an SBOM against
|
||||
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Cleanup(func() {
|
||||
require.NoError(t, theSource.Close())
|
||||
})
|
||||
|
||||
// build the SBOM
|
||||
s, err := syft.CreateSBOM(context.Background(), theSource, cfg)
|
||||
|
||||
require.NoError(t, err)
|
||||
@ -66,7 +66,7 @@ func catalogDirectory(t *testing.T, dir string, catalogerSelection ...string) (s
|
||||
func catalogDirectoryWithConfig(t *testing.T, dir string, cfg *syft.CreateSBOMConfig) (sbom.SBOM, source.Source) {
|
||||
cfg.CatalogerSelection = cfg.CatalogerSelection.WithDefaults(pkgcataloging.DirectoryTag)
|
||||
|
||||
// get the source to build an sbom against
|
||||
// get the source to build an SBOM against
|
||||
theSource, err := syft.GetSource(context.Background(), dir, syft.DefaultGetSourceConfig().WithSources("dir"))
|
||||
require.NoError(t, err)
|
||||
t.Cleanup(func() {
|
||||
|
||||
@ -23,6 +23,7 @@ const defaultImage = "alpine:3.19"
|
||||
func main() {
|
||||
// automagically get a source.Source for arbitrary string input
|
||||
src := getSource(imageReference())
|
||||
defer src.Close()
|
||||
|
||||
// will catalog the given source and return a SBOM keeping in mind several configurable options
|
||||
sbom := getSBOM(src)
|
||||
@ -46,7 +47,6 @@ func getSource(input string) source.Source {
|
||||
fmt.Println("detecting source type for input:", input, "...")
|
||||
|
||||
src, err := syft.GetSource(context.Background(), input, nil)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
@ -19,6 +19,7 @@ const defaultImage = "alpine:3.19"
|
||||
func main() {
|
||||
// automagically get a source.Source for arbitrary string input
|
||||
src := getSource(imageReference())
|
||||
defer src.Close()
|
||||
|
||||
// catalog the given source and return a SBOM
|
||||
sbom := getSBOM(src)
|
||||
@ -40,7 +41,6 @@ func imageReference() string {
|
||||
|
||||
func getSource(input string) source.Source {
|
||||
src, err := syft.GetSource(context.Background(), input, nil)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
@ -19,6 +19,7 @@ const defaultImage = "alpine:3.19"
|
||||
func main() {
|
||||
// automagically get a source.Source for arbitrary string input
|
||||
src := getSource(imageReference())
|
||||
defer src.Close()
|
||||
|
||||
// catalog the given source and return a SBOM
|
||||
// let's explicitly use catalogers that are:
|
||||
@ -44,7 +45,6 @@ func imageReference() string {
|
||||
|
||||
func getSource(input string) source.Source {
|
||||
src, err := syft.GetSource(context.Background(), input, nil)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
@ -15,6 +15,7 @@ func main() {
|
||||
image := "alpine:3.19"
|
||||
|
||||
src, _ := syft.GetSource(context.Background(), image, syft.DefaultGetSourceConfig().WithSources("registry"))
|
||||
defer src.Close()
|
||||
|
||||
sbom, _ := syft.CreateSBOM(context.Background(), src, syft.DefaultCreateSBOMConfig())
|
||||
|
||||
|
||||
34
go.mod
34
go.mod
@ -15,7 +15,7 @@ require (
|
||||
github.com/anchore/bubbly v0.0.0-20231115134915-def0aba654a9
|
||||
github.com/anchore/clio v0.0.0-20250319180342-2cfe4b0cb716
|
||||
github.com/anchore/fangs v0.0.0-20250319222917-446a1e748ec2
|
||||
github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537
|
||||
github.com/anchore/go-collections v0.0.0-20251016125210-a3c352120e8c
|
||||
github.com/anchore/go-homedir v0.0.0-20250319154043-c29668562e4d
|
||||
github.com/anchore/go-logger v0.0.0-20250318195838-07ae343dd722
|
||||
github.com/anchore/go-macholibre v0.0.0-20220308212642-53e6d0aaf6fb
|
||||
@ -24,7 +24,7 @@ require (
|
||||
github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04
|
||||
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b
|
||||
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115
|
||||
github.com/anchore/stereoscope v0.1.10
|
||||
github.com/anchore/stereoscope v0.1.11
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be
|
||||
github.com/aquasecurity/go-pep440-version v0.0.1
|
||||
github.com/bitnami/go-version v0.0.0-20250131085805-b1f57a8634ef
|
||||
@ -40,7 +40,7 @@ require (
|
||||
github.com/dustin/go-humanize v1.0.1
|
||||
github.com/elliotchance/phpserialize v1.4.0
|
||||
github.com/facebookincubator/nvdtools v0.1.5
|
||||
github.com/github/go-spdx/v2 v2.3.3
|
||||
github.com/github/go-spdx/v2 v2.3.4
|
||||
github.com/gkampitakis/go-snaps v0.5.15
|
||||
github.com/go-git/go-billy/v5 v5.6.2
|
||||
github.com/go-git/go-git/v5 v5.16.3
|
||||
@ -58,11 +58,11 @@ require (
|
||||
github.com/hashicorp/hcl/v2 v2.24.0
|
||||
github.com/iancoleman/strcase v0.3.0
|
||||
github.com/invopop/jsonschema v0.7.0
|
||||
github.com/jedib0t/go-pretty/v6 v6.6.8
|
||||
github.com/jedib0t/go-pretty/v6 v6.6.9
|
||||
github.com/jinzhu/copier v0.4.0
|
||||
github.com/kastenhq/goversion v0.0.0-20230811215019-93b2f8823953
|
||||
github.com/magiconair/properties v1.8.10
|
||||
github.com/mholt/archives v0.1.3
|
||||
github.com/mholt/archives v0.1.5
|
||||
github.com/moby/sys/mountinfo v0.7.2
|
||||
github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1
|
||||
github.com/olekukonko/tablewriter v1.1.0
|
||||
@ -90,9 +90,9 @@ require (
|
||||
go.uber.org/goleak v1.3.0
|
||||
go.yaml.in/yaml/v3 v3.0.4
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
|
||||
golang.org/x/mod v0.28.0
|
||||
golang.org/x/mod v0.29.0
|
||||
golang.org/x/net v0.46.0
|
||||
modernc.org/sqlite v1.39.0
|
||||
modernc.org/sqlite v1.39.1
|
||||
)
|
||||
|
||||
require (
|
||||
@ -110,11 +110,11 @@ require (
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/Microsoft/hcsshim v0.11.7 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
||||
github.com/STARRY-S/zip v0.2.1 // indirect
|
||||
github.com/STARRY-S/zip v0.2.3 // indirect
|
||||
github.com/agext/levenshtein v1.2.1 // indirect; indirectt
|
||||
github.com/anchore/go-lzo v0.1.0 // indirect
|
||||
github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 // indirect
|
||||
github.com/andybalholm/brotli v1.1.2-0.20250424173009-453214e765f3 // indirect
|
||||
github.com/andybalholm/brotli v1.2.0 // indirect
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||
github.com/aquasecurity/go-version v0.0.1 // indirect
|
||||
github.com/atotto/clipboard v0.1.4 // indirect
|
||||
@ -122,7 +122,7 @@ require (
|
||||
github.com/becheran/wildmatch-go v1.0.0 // indirect
|
||||
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect
|
||||
github.com/bodgit/plumbing v1.3.0 // indirect
|
||||
github.com/bodgit/sevenzip v1.6.0 // indirect
|
||||
github.com/bodgit/sevenzip v1.6.1 // indirect
|
||||
github.com/bodgit/windows v1.0.1 // indirect
|
||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
|
||||
github.com/charmbracelet/harmonica v0.2.0 // indirect
|
||||
@ -144,9 +144,9 @@ require (
|
||||
github.com/containerd/typeurl/v2 v2.2.0 // indirect
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/docker/cli v28.4.0+incompatible // indirect
|
||||
github.com/docker/cli v28.5.1+incompatible // indirect
|
||||
github.com/docker/distribution v2.8.3+incompatible // indirect
|
||||
github.com/docker/docker v28.4.0+incompatible // indirect
|
||||
github.com/docker/docker v28.5.1+incompatible // indirect
|
||||
github.com/docker/docker-credential-helpers v0.9.3 // indirect
|
||||
github.com/docker/go-connections v0.6.0 // indirect
|
||||
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c // indirect
|
||||
@ -194,7 +194,7 @@ require (
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||
github.com/mikelolasagasti/xz v1.0.1 // indirect
|
||||
github.com/minio/minlz v1.0.0 // indirect
|
||||
github.com/minio/minlz v1.0.1 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||
@ -210,7 +210,7 @@ require (
|
||||
github.com/muesli/termenv v0.16.0 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/nwaples/rardecode v1.1.3 // indirect
|
||||
github.com/nwaples/rardecode/v2 v2.1.0 // indirect
|
||||
github.com/nwaples/rardecode/v2 v2.2.0 // indirect
|
||||
github.com/olekukonko/errors v1.1.0 // indirect
|
||||
github.com/olekukonko/ll v0.0.9 // indirect
|
||||
github.com/opencontainers/image-spec v1.1.1 // indirect
|
||||
@ -232,7 +232,7 @@ require (
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af // indirect
|
||||
github.com/skeema/knownhosts v1.3.1 // indirect
|
||||
github.com/sorairolake/lzip-go v0.3.5 // indirect
|
||||
github.com/sorairolake/lzip-go v0.3.8 // indirect
|
||||
github.com/sourcegraph/conc v0.3.0 // indirect
|
||||
github.com/spf13/cast v1.7.1 // indirect
|
||||
github.com/spf13/pflag v1.0.9 // indirect
|
||||
@ -271,7 +271,7 @@ require (
|
||||
golang.org/x/term v0.36.0 // indirect
|
||||
golang.org/x/text v0.30.0 // indirect
|
||||
golang.org/x/time v0.12.0 // indirect
|
||||
golang.org/x/tools v0.37.0
|
||||
golang.org/x/tools v0.38.0
|
||||
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
|
||||
google.golang.org/api v0.203.0 // indirect
|
||||
google.golang.org/genproto v0.0.0-20241118233622-e639e219e697 // indirect
|
||||
@ -281,7 +281,7 @@ require (
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/warnings.v0 v0.1.2 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
modernc.org/libc v1.66.3 // indirect
|
||||
modernc.org/libc v1.66.10 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
)
|
||||
|
||||
80
go.sum
80
go.sum
@ -94,8 +94,8 @@ github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8
|
||||
github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q=
|
||||
github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw=
|
||||
github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE=
|
||||
github.com/STARRY-S/zip v0.2.1 h1:pWBd4tuSGm3wtpoqRZZ2EAwOmcHK6XFf7bU9qcJXyFg=
|
||||
github.com/STARRY-S/zip v0.2.1/go.mod h1:xNvshLODWtC4EJ702g7cTYn13G53o1+X9BWnPFpcWV4=
|
||||
github.com/STARRY-S/zip v0.2.3 h1:luE4dMvRPDOWQdeDdUxUoZkzUIpTccdKdhHHsQJ1fm4=
|
||||
github.com/STARRY-S/zip v0.2.3/go.mod h1:lqJ9JdeRipyOQJrYSOtpNAiaesFO6zVDsE8GIGFaoSk=
|
||||
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
|
||||
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
|
||||
github.com/acobaugh/osrelease v0.1.0 h1:Yb59HQDGGNhCj4suHaFQQfBps5wyoKLSSX/J/+UifRE=
|
||||
@ -116,8 +116,8 @@ github.com/anchore/clio v0.0.0-20250319180342-2cfe4b0cb716 h1:2sIdYJlQESEnyk3Y0W
|
||||
github.com/anchore/clio v0.0.0-20250319180342-2cfe4b0cb716/go.mod h1:Utb9i4kwiCWvqAIxZaJeMIXFO9uOgQXlvH2BfbfO/zI=
|
||||
github.com/anchore/fangs v0.0.0-20250319222917-446a1e748ec2 h1:GC2QaO0YsmjpsZ4rtVKv9DnproIxqqn+qkskpc+i8MA=
|
||||
github.com/anchore/fangs v0.0.0-20250319222917-446a1e748ec2/go.mod h1:XUbUECwVKuD3qYRUj+QZIOHjyyXua2gFmVjKA40iHXA=
|
||||
github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537 h1:GjNGuwK5jWjJMyVppBjYS54eOiiSNv4Ba869k4wh72Q=
|
||||
github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537/go.mod h1:1aiktV46ATCkuVg0O573ZrH56BUawTECPETbZyBcqT8=
|
||||
github.com/anchore/go-collections v0.0.0-20251016125210-a3c352120e8c h1:eoJXyC0n7DZ4YvySG/ETdYkTar2Due7eH+UmLK6FbrA=
|
||||
github.com/anchore/go-collections v0.0.0-20251016125210-a3c352120e8c/go.mod h1:1aiktV46ATCkuVg0O573ZrH56BUawTECPETbZyBcqT8=
|
||||
github.com/anchore/go-homedir v0.0.0-20250319154043-c29668562e4d h1:gT69osH9AsdpOfqxbRwtxcNnSZ1zg4aKy2BevO3ZBdc=
|
||||
github.com/anchore/go-homedir v0.0.0-20250319154043-c29668562e4d/go.mod h1:PhSnuFYknwPZkOWKB1jXBNToChBA+l0FjwOxtViIc50=
|
||||
github.com/anchore/go-logger v0.0.0-20250318195838-07ae343dd722 h1:2SqmFgE7h+Ql4VyBzhjLkRF/3gDrcpUBj8LjvvO6OOM=
|
||||
@ -138,11 +138,11 @@ github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b h1:e1bmaoJfZV
|
||||
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E=
|
||||
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115 h1:ZyRCmiEjnoGJZ1+Ah0ZZ/mKKqNhGcUZBl0s7PTTDzvY=
|
||||
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115/go.mod h1:KoYIv7tdP5+CC9VGkeZV4/vGCKsY55VvoG+5dadg4YI=
|
||||
github.com/anchore/stereoscope v0.1.10 h1:BogafIMaW/L1lOUoVS96Hu1jTSP2JktxIayVqcxvcBI=
|
||||
github.com/anchore/stereoscope v0.1.10/go.mod h1:RWFAkQE8tp8yyaf4V83Kq1bO6hX3bzi8gpLCcKgZLIk=
|
||||
github.com/anchore/stereoscope v0.1.11 h1:YP/XUNcJyMbOOPAWPkeZNCVlKKTRO2cnBTEeUW6I40Y=
|
||||
github.com/anchore/stereoscope v0.1.11/go.mod h1:G3PZlzPbxFhylj9pQwtqfVPaahuWmy/UCtv5FTIIMvg=
|
||||
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
||||
github.com/andybalholm/brotli v1.1.2-0.20250424173009-453214e765f3 h1:8PmGpDEZl9yDpcdEr6Odf23feCxK3LNUNMxjXg41pZQ=
|
||||
github.com/andybalholm/brotli v1.1.2-0.20250424173009-453214e765f3/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
||||
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
||||
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
@ -217,8 +217,8 @@ github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/
|
||||
github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
|
||||
github.com/bodgit/plumbing v1.3.0 h1:pf9Itz1JOQgn7vEOE7v7nlEfBykYqvUYioC61TwWCFU=
|
||||
github.com/bodgit/plumbing v1.3.0/go.mod h1:JOTb4XiRu5xfnmdnDJo6GmSbSbtSyufrsyZFByMtKEs=
|
||||
github.com/bodgit/sevenzip v1.6.0 h1:a4R0Wu6/P1o1pP/3VV++aEOcyeBxeO/xE2Y9NSTrr6A=
|
||||
github.com/bodgit/sevenzip v1.6.0/go.mod h1:zOBh9nJUof7tcrlqJFv1koWRrhz3LbDbUNngkuZxLMc=
|
||||
github.com/bodgit/sevenzip v1.6.1 h1:kikg2pUMYC9ljU7W9SaqHXhym5HyKm8/M/jd31fYan4=
|
||||
github.com/bodgit/sevenzip v1.6.1/go.mod h1:GVoYQbEVbOGT8n2pfqCIMRUaRjQ8F9oSqoBEqZh5fQ8=
|
||||
github.com/bodgit/windows v1.0.1 h1:tF7K6KOluPYygXa3Z2594zxlkbKPAOvqr97etrGNIz4=
|
||||
github.com/bodgit/windows v1.0.1/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM=
|
||||
github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M=
|
||||
@ -322,12 +322,12 @@ github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5Qvfr
|
||||
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||
github.com/djherbis/times v1.6.0 h1:w2ctJ92J8fBvWPxugmXIv7Nz7Q3iDMKNx9v5ocVH20c=
|
||||
github.com/djherbis/times v1.6.0/go.mod h1:gOHeRAz2h+VJNZ5Gmc/o7iD9k4wW7NMVqieYCY99oc0=
|
||||
github.com/docker/cli v28.4.0+incompatible h1:RBcf3Kjw2pMtwui5V0DIMdyeab8glEw5QY0UUU4C9kY=
|
||||
github.com/docker/cli v28.4.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/cli v28.5.1+incompatible h1:ESutzBALAD6qyCLqbQSEf1a/U8Ybms5agw59yGVc+yY=
|
||||
github.com/docker/cli v28.5.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk=
|
||||
github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
|
||||
github.com/docker/docker v28.4.0+incompatible h1:KVC7bz5zJY/4AZe/78BIvCnPsLaC9T/zh72xnlrTTOk=
|
||||
github.com/docker/docker v28.4.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM=
|
||||
github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker-credential-helpers v0.9.3 h1:gAm/VtF9wgqJMoxzT3Gj5p4AqIjCBS4wrsOh9yRqcz8=
|
||||
github.com/docker/docker-credential-helpers v0.9.3/go.mod h1:x+4Gbw9aGmChi3qTLZj8Dfn0TD20M/fuWy0E5+WDeCo=
|
||||
github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94=
|
||||
@ -385,8 +385,8 @@ github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/github/go-spdx/v2 v2.3.3 h1:QI7evnHWEfWkT54eJwkoV/f3a0xD3gLlnVmT5wQG6LE=
|
||||
github.com/github/go-spdx/v2 v2.3.3/go.mod h1:2ZxKsOhvBp+OYBDlsGnUMcchLeo2mrpEBn2L1C+U3IQ=
|
||||
github.com/github/go-spdx/v2 v2.3.4 h1:6VNAsYWvQge+SOeoubTlH81MY21d5uekXNIRGfXMNXo=
|
||||
github.com/github/go-spdx/v2 v2.3.4/go.mod h1:7LYNCshU2Gj17qZ0heJ5CQUKWWmpd98K7o93K8fJSMk=
|
||||
github.com/gkampitakis/ciinfo v0.3.2 h1:JcuOPk8ZU7nZQjdUhctuhQofk7BGHuIy0c9Ez8BNhXs=
|
||||
github.com/gkampitakis/ciinfo v0.3.2/go.mod h1:1NIwaOcFChN4fa/B0hEBdAb6npDlFL8Bwx4dfRLRqAo=
|
||||
github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZdC4M=
|
||||
@ -609,8 +609,8 @@ github.com/invopop/jsonschema v0.7.0 h1:2vgQcBz1n256N+FpX3Jq7Y17AjYt46Ig3zIWyy77
|
||||
github.com/invopop/jsonschema v0.7.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
|
||||
github.com/jedib0t/go-pretty/v6 v6.6.8 h1:JnnzQeRz2bACBobIaa/r+nqjvws4yEhcmaZ4n1QzsEc=
|
||||
github.com/jedib0t/go-pretty/v6 v6.6.8/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
|
||||
github.com/jedib0t/go-pretty/v6 v6.6.9 h1:PQecJLK3L8ODuVyMe2223b61oRJjrKnmXAncbWTv9MY=
|
||||
github.com/jedib0t/go-pretty/v6 v6.6.9/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
|
||||
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
|
||||
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
@ -681,15 +681,15 @@ github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
||||
github.com/mholt/archives v0.1.3 h1:aEAaOtNra78G+TvV5ohmXrJOAzf++dIlYeDW3N9q458=
|
||||
github.com/mholt/archives v0.1.3/go.mod h1:LUCGp++/IbV/I0Xq4SzcIR6uwgeh2yjnQWamjRQfLTU=
|
||||
github.com/mholt/archives v0.1.5 h1:Fh2hl1j7VEhc6DZs2DLMgiBNChUux154a1G+2esNvzQ=
|
||||
github.com/mholt/archives v0.1.5/go.mod h1:3TPMmBLPsgszL+1As5zECTuKwKvIfj6YcwWPpeTAXF4=
|
||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
|
||||
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
|
||||
github.com/mikelolasagasti/xz v1.0.1 h1:Q2F2jX0RYJUG3+WsM+FJknv+6eVjsjXNDV0KJXZzkD0=
|
||||
github.com/mikelolasagasti/xz v1.0.1/go.mod h1:muAirjiOUxPRXwm9HdDtB3uoRPrGnL85XHtokL9Hcgc=
|
||||
github.com/minio/minlz v1.0.0 h1:Kj7aJZ1//LlTP1DM8Jm7lNKvvJS2m74gyyXXn3+uJWQ=
|
||||
github.com/minio/minlz v1.0.0/go.mod h1:qT0aEB35q79LLornSzeDH75LBf3aH1MV+jB5w9Wasec=
|
||||
github.com/minio/minlz v1.0.1 h1:OUZUzXcib8diiX+JYxyRLIdomyZYzHct6EShOKtQY2A=
|
||||
github.com/minio/minlz v1.0.1/go.mod h1:qT0aEB35q79LLornSzeDH75LBf3aH1MV+jB5w9Wasec=
|
||||
github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI=
|
||||
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
|
||||
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
|
||||
@ -743,8 +743,8 @@ github.com/nsf/jsondiff v0.0.0-20210926074059-1e845ec5d249 h1:NHrXEjTNQY7P0Zfx1a
|
||||
github.com/nsf/jsondiff v0.0.0-20210926074059-1e845ec5d249/go.mod h1:mpRZBD8SJ55OIICQ3iWH0Yz3cjzA61JdqMLoWXeB2+8=
|
||||
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
|
||||
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
||||
github.com/nwaples/rardecode/v2 v2.1.0 h1:JQl9ZoBPDy+nIZGb1mx8+anfHp/LV3NE2MjMiv0ct/U=
|
||||
github.com/nwaples/rardecode/v2 v2.1.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
|
||||
github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A=
|
||||
github.com/nwaples/rardecode/v2 v2.2.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
|
||||
github.com/olekukonko/errors v1.1.0 h1:RNuGIh15QdDenh+hNvKrJkmxxjV4hcS50Db478Ou5sM=
|
||||
github.com/olekukonko/errors v1.1.0/go.mod h1:ppzxA5jBKcO1vIpCXQ9ZqgDh8iwODz6OXIGKU8r5m4Y=
|
||||
github.com/olekukonko/ll v0.0.9 h1:Y+1YqDfVkqMWuEQMclsF9HUR5+a82+dxJuL1HHSRpxI=
|
||||
@ -851,8 +851,8 @@ github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af h1:Sp5TG9f7K39yf
|
||||
github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
||||
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
||||
github.com/sorairolake/lzip-go v0.3.5 h1:ms5Xri9o1JBIWvOFAorYtUNik6HI3HgBTkISiqu0Cwg=
|
||||
github.com/sorairolake/lzip-go v0.3.5/go.mod h1:N0KYq5iWrMXI0ZEXKXaS9hCyOjZUQdBDEIbXfoUwbdk=
|
||||
github.com/sorairolake/lzip-go v0.3.8 h1:j5Q2313INdTA80ureWYRhX+1K78mUXfMoPZCw/ivWik=
|
||||
github.com/sorairolake/lzip-go v0.3.8/go.mod h1:JcBqGMV0frlxwrsE9sMWXDjqn3EeVf0/54YPsw66qkU=
|
||||
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
||||
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
@ -1061,8 +1061,8 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.28.0 h1:gQBtGhjxykdjY9YhZpSlZIsbnaE2+PgjfLWUQTnoZ1U=
|
||||
golang.org/x/mod v0.28.0/go.mod h1:yfB/L0NOf/kmEbXjzCPOx1iK1fRutOydrCMsqRhEBxI=
|
||||
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
|
||||
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@ -1296,8 +1296,8 @@ golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.37.0 h1:DVSRzp7FwePZW356yEAChSdNcQo6Nsp+fex1SUW09lE=
|
||||
golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w=
|
||||
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
|
||||
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@ -1489,18 +1489,18 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
modernc.org/cc/v4 v4.26.2 h1:991HMkLjJzYBIfha6ECZdjrIYz2/1ayr+FL8GN+CNzM=
|
||||
modernc.org/cc/v4 v4.26.2/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
|
||||
modernc.org/ccgo/v4 v4.28.0 h1:rjznn6WWehKq7dG4JtLRKxb52Ecv8OUGah8+Z/SfpNU=
|
||||
modernc.org/ccgo/v4 v4.28.0/go.mod h1:JygV3+9AV6SmPhDasu4JgquwU81XAKLd3OKTUDNOiKE=
|
||||
modernc.org/fileutil v1.3.8 h1:qtzNm7ED75pd1C7WgAGcK4edm4fvhtBsEiI/0NQ54YM=
|
||||
modernc.org/fileutil v1.3.8/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
|
||||
modernc.org/cc/v4 v4.26.5 h1:xM3bX7Mve6G8K8b+T11ReenJOT+BmVqQj0FY5T4+5Y4=
|
||||
modernc.org/cc/v4 v4.26.5/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
|
||||
modernc.org/ccgo/v4 v4.28.1 h1:wPKYn5EC/mYTqBO373jKjvX2n+3+aK7+sICCv4Fjy1A=
|
||||
modernc.org/ccgo/v4 v4.28.1/go.mod h1:uD+4RnfrVgE6ec9NGguUNdhqzNIeeomeXf6CL0GTE5Q=
|
||||
modernc.org/fileutil v1.3.40 h1:ZGMswMNc9JOCrcrakF1HrvmergNLAmxOPjizirpfqBA=
|
||||
modernc.org/fileutil v1.3.40/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
|
||||
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
|
||||
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
|
||||
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
|
||||
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
|
||||
modernc.org/libc v1.66.3 h1:cfCbjTUcdsKyyZZfEUKfoHcP3S0Wkvz3jgSzByEWVCQ=
|
||||
modernc.org/libc v1.66.3/go.mod h1:XD9zO8kt59cANKvHPXpx7yS2ELPheAey0vjIuZOhOU8=
|
||||
modernc.org/libc v1.66.10 h1:yZkb3YeLx4oynyR+iUsXsybsX4Ubx7MQlSYEw4yj59A=
|
||||
modernc.org/libc v1.66.10/go.mod h1:8vGSEwvoUoltr4dlywvHqjtAqHBaw0j1jI7iFBTAr2I=
|
||||
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
|
||||
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
|
||||
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
|
||||
@ -1509,8 +1509,8 @@ modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
|
||||
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
||||
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
|
||||
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
|
||||
modernc.org/sqlite v1.39.0 h1:6bwu9Ooim0yVYA7IZn9demiQk/Ejp0BtTjBWFLymSeY=
|
||||
modernc.org/sqlite v1.39.0/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E=
|
||||
modernc.org/sqlite v1.39.1 h1:H+/wGFzuSCIEVCvXYVHX5RQglwhMOvtHSv+VtidL2r4=
|
||||
modernc.org/sqlite v1.39.1/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE=
|
||||
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
||||
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
|
||||
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
||||
|
||||
@ -3,5 +3,5 @@ package internal
|
||||
const (
|
||||
// JSONSchemaVersion is the current schema version output by the JSON encoder
|
||||
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
|
||||
JSONSchemaVersion = "16.0.40"
|
||||
JSONSchemaVersion = "16.0.41"
|
||||
)
|
||||
|
||||
@ -81,6 +81,10 @@ func Test_EnvironmentTask(t *testing.T) {
|
||||
// get the source
|
||||
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
|
||||
require.NoError(t, err)
|
||||
t.Cleanup(func() {
|
||||
require.NoError(t, theSource.Close())
|
||||
})
|
||||
|
||||
resolver, err := theSource.FileResolver(source.SquashedScope)
|
||||
require.NoError(t, err)
|
||||
|
||||
|
||||
@ -51,6 +51,7 @@ func AllTypes() []any {
|
||||
pkg.PhpPeclEntry{},
|
||||
pkg.PortageEntry{},
|
||||
pkg.PythonPackage{},
|
||||
pkg.PythonPdmLockEntry{},
|
||||
pkg.PythonPipfileLockEntry{},
|
||||
pkg.PythonPoetryLockEntry{},
|
||||
pkg.PythonRequirementsEntry{},
|
||||
|
||||
@ -102,6 +102,7 @@ var jsonTypes = makeJSONTypes(
|
||||
jsonNames(pkg.PhpPearEntry{}, "php-pear-entry"),
|
||||
jsonNames(pkg.PortageEntry{}, "portage-db-entry", "PortageMetadata"),
|
||||
jsonNames(pkg.PythonPackage{}, "python-package", "PythonPackageMetadata"),
|
||||
jsonNames(pkg.PythonPdmLockEntry{}, "python-pdm-lock-entry"),
|
||||
jsonNames(pkg.PythonPipfileLockEntry{}, "python-pipfile-lock-entry", "PythonPipfileLockMetadata"),
|
||||
jsonNames(pkg.PythonPoetryLockEntry{}, "python-poetry-lock-entry", "PythonPoetryLockMetadata"),
|
||||
jsonNames(pkg.PythonRequirementsEntry{}, "python-pip-requirements-entry", "PythonRequirementsMetadata"),
|
||||
|
||||
4011
schema/json/schema-16.0.41.json
Normal file
4011
schema/json/schema-16.0.41.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "anchore.io/schema/syft/json/16.0.40/document",
|
||||
"$id": "anchore.io/schema/syft/json/16.0.41/document",
|
||||
"$ref": "#/$defs/Document",
|
||||
"$defs": {
|
||||
"AlpmDbEntry": {
|
||||
@ -2549,6 +2549,9 @@
|
||||
{
|
||||
"$ref": "#/$defs/PythonPackage"
|
||||
},
|
||||
{
|
||||
"$ref": "#/$defs/PythonPdmLockEntry"
|
||||
},
|
||||
{
|
||||
"$ref": "#/$defs/PythonPipRequirementsEntry"
|
||||
},
|
||||
@ -3131,6 +3134,35 @@
|
||||
],
|
||||
"description": "PythonPackage represents all captured data for a python egg or wheel package (specifically as outlined in the PyPA core metadata specification https://packaging.python.org/en/latest/specifications/core-metadata/)."
|
||||
},
|
||||
"PythonPdmLockEntry": {
|
||||
"properties": {
|
||||
"summary": {
|
||||
"type": "string",
|
||||
"description": "Summary provides a description of the package"
|
||||
},
|
||||
"files": {
|
||||
"items": {
|
||||
"$ref": "#/$defs/PythonFileRecord"
|
||||
},
|
||||
"type": "array",
|
||||
"description": "Files are the package files with their paths and hash digests"
|
||||
},
|
||||
"dependencies": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array",
|
||||
"description": "Dependencies are the dependency specifications, without environment qualifiers"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": [
|
||||
"summary",
|
||||
"files",
|
||||
"dependencies"
|
||||
],
|
||||
"description": "PythonPdmLockEntry represents a single package entry within a pdm.lock file."
|
||||
},
|
||||
"PythonPipRequirementsEntry": {
|
||||
"properties": {
|
||||
"name": {
|
||||
|
||||
@ -3,6 +3,7 @@ package executable
|
||||
import (
|
||||
"debug/macho"
|
||||
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/internal/unionreader"
|
||||
)
|
||||
@ -19,20 +20,38 @@ const (
|
||||
func findMachoFeatures(data *file.Executable, reader unionreader.UnionReader) error {
|
||||
// TODO: support security features
|
||||
|
||||
// TODO: support multi-architecture binaries
|
||||
f, err := macho.NewFile(reader)
|
||||
// a universal binary may have multiple architectures, so we need to check each one
|
||||
readers, err := unionreader.GetReaders(reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
libs, err := f.ImportedLibraries()
|
||||
var libs []string
|
||||
for _, r := range readers {
|
||||
f, err := macho.NewFile(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
data.ImportedLibraries = libs
|
||||
rLibs, err := f.ImportedLibraries()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
libs = append(libs, rLibs...)
|
||||
|
||||
// TODO handle only some having entrypoints/exports? If that is even practical
|
||||
// only check for entrypoint if we don't already have one
|
||||
if !data.HasEntrypoint {
|
||||
data.HasEntrypoint = machoHasEntrypoint(f)
|
||||
}
|
||||
// only check for exports if we don't already have them
|
||||
if !data.HasExports {
|
||||
data.HasExports = machoHasExports(f)
|
||||
}
|
||||
}
|
||||
|
||||
// de-duplicate libraries
|
||||
data.ImportedLibraries = internal.NewSet(libs...).ToSlice()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -9,6 +9,7 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/internal/unionreader"
|
||||
)
|
||||
|
||||
@ -83,3 +84,39 @@ func Test_machoHasExports(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_machoUniversal(t *testing.T) {
|
||||
readerForFixture := func(t *testing.T, fixture string) unionreader.UnionReader {
|
||||
t.Helper()
|
||||
f, err := os.Open(filepath.Join("test-fixtures/shared-info", fixture))
|
||||
require.NoError(t, err)
|
||||
return f
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
fixture string
|
||||
want file.Executable
|
||||
}{
|
||||
{
|
||||
name: "universal lib",
|
||||
fixture: "bin/libhello_universal.dylib",
|
||||
want: file.Executable{HasExports: true, HasEntrypoint: false},
|
||||
},
|
||||
{
|
||||
name: "universal application",
|
||||
fixture: "bin/hello_mac_universal",
|
||||
want: file.Executable{HasExports: false, HasEntrypoint: true},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
var data file.Executable
|
||||
err := findMachoFeatures(&data, readerForFixture(t, tt.fixture))
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, tt.want.HasEntrypoint, data.HasEntrypoint)
|
||||
assert.Equal(t, tt.want.HasExports, data.HasExports)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,13 +2,13 @@
|
||||
|
||||
BIN=../../bin
|
||||
|
||||
all: $(BIN)/hello_linux $(BIN)/hello.exe $(BIN)/hello_mac
|
||||
all: $(BIN)/hello_linux $(BIN)/hello.exe $(BIN)/hello_mac $(BIN)/hello_mac_universal
|
||||
|
||||
linux: $(BIN)/libhello.so
|
||||
|
||||
windows: $(BIN)/libhello.dll
|
||||
|
||||
mac: $(BIN)/libhello.dylib
|
||||
mac: $(BIN)/libhello.dylib $(BIN)/hello_mac_universal
|
||||
|
||||
$(BIN)/hello_linux:
|
||||
gcc hello.c -o $(BIN)/hello_linux
|
||||
@ -19,5 +19,8 @@ $(BIN)/hello.exe:
|
||||
$(BIN)/hello_mac:
|
||||
o64-clang hello.c -o $(BIN)/hello_mac
|
||||
|
||||
$(BIN)/hello_mac_universal:
|
||||
o64-clang -arch arm64 -arch x86_64 hello.c -o $(BIN)/hello_mac_universal
|
||||
|
||||
clean:
|
||||
rm -f $(BIN)/hello_linux $(BIN)/hello.exe $(BIN)/hello_mac
|
||||
rm -f $(BIN)/hello_linux $(BIN)/hello.exe $(BIN)/hello_mac $(BIN)/hello_mac_universal
|
||||
|
||||
@ -2,13 +2,13 @@
|
||||
|
||||
BIN=../../bin
|
||||
|
||||
all: $(BIN)/libhello.so $(BIN)/libhello.dll $(BIN)/libhello.dylib
|
||||
all: $(BIN)/libhello.so $(BIN)/libhello.dll $(BIN)/libhello.dylib $(BIN)/libhello_universal.dylib
|
||||
|
||||
linux: $(BIN)/libhello.so
|
||||
|
||||
windows: $(BIN)/libhello.dll
|
||||
|
||||
mac: $(BIN)/libhello.dylib
|
||||
mac: $(BIN)/libhello.dylib $(BIN)/libhello_universal.dylib
|
||||
|
||||
$(BIN)/libhello.so:
|
||||
gcc -shared -fPIC -o $(BIN)/libhello.so hello.c
|
||||
@ -19,5 +19,8 @@ $(BIN)/libhello.dll:
|
||||
$(BIN)/libhello.dylib:
|
||||
o64-clang -dynamiclib -o $(BIN)/libhello.dylib hello.c
|
||||
|
||||
$(BIN)/libhello_universal.dylib:
|
||||
o64-clang -dynamiclib -arch arm64 -arch x86_64 hello.c -o $(BIN)/libhello_universal.dylib
|
||||
|
||||
clean:
|
||||
rm -f $(BIN)/libhello.so $(BIN)/hello.dll $(BIN)/libhello.dylib $(BIN)/libhello.a
|
||||
rm -f $(BIN)/libhello.so $(BIN)/hello.dll $(BIN)/libhello.dylib $(BIN)/libhello.a $(BIN)/libhello_universal.dylib
|
||||
|
||||
@ -87,6 +87,9 @@ func toGithubManifests(s *sbom.SBOM) Manifests {
|
||||
}
|
||||
|
||||
name := dependencyName(p)
|
||||
if name == "" || p.PURL == "" {
|
||||
continue
|
||||
}
|
||||
manifest.Resolved[name] = DependencyNode{
|
||||
PackageURL: p.PURL,
|
||||
Metadata: toDependencyMetadata(p),
|
||||
|
||||
@ -16,11 +16,6 @@
|
||||
"source_location": "redacted/some/path/some/path/pkg1"
|
||||
},
|
||||
"resolved": {
|
||||
"": {
|
||||
"package_url": "a-purl-2",
|
||||
"relationship": "direct",
|
||||
"scope": "runtime"
|
||||
},
|
||||
"pkg:deb/debian/package-2@2.0.1": {
|
||||
"package_url": "pkg:deb/debian/package-2@2.0.1",
|
||||
"relationship": "direct",
|
||||
|
||||
@ -17,13 +17,6 @@
|
||||
},
|
||||
"metadata": {
|
||||
"syft:filesystem":"redacted"
|
||||
},
|
||||
"resolved": {
|
||||
"": {
|
||||
"package_url": "a-purl-1",
|
||||
"relationship": "direct",
|
||||
"scope": "runtime"
|
||||
}
|
||||
}
|
||||
},
|
||||
"user-image-input:/somefile-2.txt": {
|
||||
|
||||
@ -29,6 +29,7 @@ func Backfill(p *pkg.Package) {
|
||||
|
||||
var cpes []cpe.CPE
|
||||
epoch := ""
|
||||
rpmmod := ""
|
||||
|
||||
for _, qualifier := range purl.Qualifiers {
|
||||
switch qualifier.Key {
|
||||
@ -44,6 +45,8 @@ func Backfill(p *pkg.Package) {
|
||||
}
|
||||
case pkg.PURLQualifierEpoch:
|
||||
epoch = qualifier.Value
|
||||
case pkg.PURLQualifierRpmModularity:
|
||||
rpmmod = qualifier.Value
|
||||
}
|
||||
}
|
||||
|
||||
@ -63,6 +66,10 @@ func Backfill(p *pkg.Package) {
|
||||
setJavaMetadataFromPurl(p, purl)
|
||||
}
|
||||
|
||||
if p.Type == pkg.RpmPkg {
|
||||
setRpmMetadataFromPurl(p, rpmmod)
|
||||
}
|
||||
|
||||
for _, c := range cpes {
|
||||
if slices.Contains(p.CPEs, c) {
|
||||
continue
|
||||
@ -82,6 +89,35 @@ func setJavaMetadataFromPurl(p *pkg.Package, _ packageurl.PackageURL) {
|
||||
}
|
||||
}
|
||||
|
||||
func setRpmMetadataFromPurl(p *pkg.Package, rpmmod string) {
|
||||
if p.Type != pkg.RpmPkg {
|
||||
return
|
||||
}
|
||||
if rpmmod == "" {
|
||||
return
|
||||
}
|
||||
|
||||
if p.Metadata == nil {
|
||||
p.Metadata = pkg.RpmDBEntry{
|
||||
ModularityLabel: &rpmmod,
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
switch m := p.Metadata.(type) {
|
||||
case pkg.RpmDBEntry:
|
||||
if m.ModularityLabel == nil {
|
||||
m.ModularityLabel = &rpmmod
|
||||
p.Metadata = m
|
||||
}
|
||||
case pkg.RpmArchive:
|
||||
if m.ModularityLabel == nil {
|
||||
m.ModularityLabel = &rpmmod
|
||||
p.Metadata = m
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setVersionFromPurl(p *pkg.Package, purl packageurl.PackageURL, epoch string) {
|
||||
if p.Version == "" {
|
||||
p.Version = purl.Version
|
||||
|
||||
@ -53,6 +53,21 @@ func Test_Backfill(t *testing.T) {
|
||||
Version: "1:1.12.8-26.el8",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "rpm with rpmmod",
|
||||
in: pkg.Package{
|
||||
PURL: "pkg:rpm/redhat/httpd@2.4.37-51?arch=x86_64&distro=rhel-8.7&rpmmod=httpd:2.4",
|
||||
},
|
||||
expected: pkg.Package{
|
||||
PURL: "pkg:rpm/redhat/httpd@2.4.37-51?arch=x86_64&distro=rhel-8.7&rpmmod=httpd:2.4",
|
||||
Type: pkg.RpmPkg,
|
||||
Name: "httpd",
|
||||
Version: "2.4.37-51",
|
||||
Metadata: pkg.RpmDBEntry{
|
||||
ModularityLabel: strRef("httpd:2.4"),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad cpe",
|
||||
in: pkg.Package{
|
||||
@ -171,3 +186,7 @@ func Test_nameFromPurl(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func strRef(s string) *string {
|
||||
return &s
|
||||
}
|
||||
|
||||
@ -42,6 +42,7 @@ func Test_OriginatorSupplier(t *testing.T) {
|
||||
pkg.PhpPeclEntry{},
|
||||
pkg.PortageEntry{},
|
||||
pkg.PythonPipfileLockEntry{},
|
||||
pkg.PythonPdmLockEntry{},
|
||||
pkg.PythonRequirementsEntry{},
|
||||
pkg.PythonPoetryLockEntry{},
|
||||
pkg.PythonUvLockEntry{},
|
||||
@ -342,6 +343,25 @@ func Test_OriginatorSupplier(t *testing.T) {
|
||||
originator: "Person: auth (auth@auth.gov)",
|
||||
supplier: "Person: auth (auth@auth.gov)",
|
||||
},
|
||||
{
|
||||
name: "from python PDM lock",
|
||||
input: pkg.Package{
|
||||
Metadata: pkg.PythonPdmLockEntry{
|
||||
Files: []pkg.PythonFileRecord{
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651",
|
||||
},
|
||||
},
|
||||
},
|
||||
Summary: "A test package",
|
||||
},
|
||||
},
|
||||
originator: "",
|
||||
supplier: "",
|
||||
},
|
||||
{
|
||||
name: "from r -- maintainer > author",
|
||||
input: pkg.Package{
|
||||
|
||||
37
syft/get_source_config_test.go
Normal file
37
syft/get_source_config_test.go
Normal file
@ -0,0 +1,37 @@
|
||||
package syft
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/anchore/stereoscope"
|
||||
"github.com/anchore/syft/syft/source/sourceproviders"
|
||||
)
|
||||
|
||||
func TestGetProviders_DefaultImagePullSource(t *testing.T) {
|
||||
userInput := ""
|
||||
cfg := &GetSourceConfig{DefaultImagePullSource: stereoscope.RegistryTag}
|
||||
allSourceProviders := sourceproviders.All(userInput, cfg.SourceProviderConfig)
|
||||
|
||||
providers, err := cfg.getProviders(userInput)
|
||||
if err != nil {
|
||||
t.Errorf("Expected no error for DefaultImagePullSource parameter, got: %v", err)
|
||||
}
|
||||
|
||||
if len(providers) != len(allSourceProviders) {
|
||||
t.Errorf("Expected %d providers, got %d", len(allSourceProviders), len(providers))
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetProviders_Sources(t *testing.T) {
|
||||
userInput := ""
|
||||
cfg := &GetSourceConfig{Sources: []string{stereoscope.RegistryTag}}
|
||||
|
||||
providers, err := cfg.getProviders(userInput)
|
||||
if err != nil {
|
||||
t.Errorf("Expected no error for Sources parameter, got: %v", err)
|
||||
}
|
||||
|
||||
if len(providers) != 1 {
|
||||
t.Errorf("Expected 1 providers, got %d", len(providers))
|
||||
}
|
||||
}
|
||||
@ -19,16 +19,16 @@ type Directory struct {
|
||||
indexer *directoryIndexer
|
||||
}
|
||||
|
||||
func NewFromDirectory(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
|
||||
r, err := newFromDirectoryWithoutIndex(root, base, pathFilters...)
|
||||
func NewFromDirectory(root, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
|
||||
resolver, err := newFromDirectoryWithoutIndex(root, base, pathFilters...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return r, r.buildIndex()
|
||||
return resolver, resolver.buildIndex()
|
||||
}
|
||||
|
||||
func newFromDirectoryWithoutIndex(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
|
||||
func newFromDirectoryWithoutIndex(root, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
|
||||
chroot, err := NewChrootContextFromCWD(root, base)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to interpret chroot context: %w", err)
|
||||
@ -66,6 +66,6 @@ func (r *Directory) buildIndex() error {
|
||||
}
|
||||
|
||||
// Stringer to represent a directory path data source
|
||||
func (r Directory) String() string {
|
||||
func (r *Directory) String() string {
|
||||
return fmt.Sprintf("dir:%s", r.path)
|
||||
}
|
||||
|
||||
@ -322,7 +322,7 @@ func (r directoryIndexer) addDirectoryToIndex(p string, info os.FileInfo) error
|
||||
return err
|
||||
}
|
||||
|
||||
metadata := file.NewMetadataFromPath(p, info)
|
||||
metadata := NewMetadataFromPath(p, info)
|
||||
r.index.Add(*ref, metadata)
|
||||
|
||||
return nil
|
||||
@ -334,7 +334,7 @@ func (r directoryIndexer) addFileToIndex(p string, info os.FileInfo) error {
|
||||
return err
|
||||
}
|
||||
|
||||
metadata := file.NewMetadataFromPath(p, info)
|
||||
metadata := NewMetadataFromPath(p, info)
|
||||
r.index.Add(*ref, metadata)
|
||||
|
||||
return nil
|
||||
@ -416,7 +416,7 @@ func (r directoryIndexer) addSymlinkToIndex(p string, info os.FileInfo) (string,
|
||||
targetAbsPath = filepath.Clean(filepath.Join(path.Dir(p), linkTarget))
|
||||
}
|
||||
|
||||
metadata := file.NewMetadataFromPath(p, info)
|
||||
metadata := NewMetadataFromPath(p, info)
|
||||
metadata.LinkDestination = linkTarget
|
||||
r.index.Add(*ref, metadata)
|
||||
|
||||
|
||||
@ -17,17 +17,31 @@ type File struct {
|
||||
indexer *fileIndexer
|
||||
}
|
||||
|
||||
// parent should be the symlink free absolute path to the parent directory
|
||||
// NewFromFile single file analyser
|
||||
// path is the filepath of the file we're creating content access for
|
||||
func NewFromFile(parent, path string, pathFilters ...PathIndexVisitor) (*File, error) {
|
||||
chroot, err := NewChrootContextFromCWD(parent, parent)
|
||||
func NewFromFile(path string, pathFilters ...PathIndexVisitor) (*File, error) {
|
||||
resolver, err := newFromFileWithoutIndex(path, pathFilters...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return resolver, resolver.buildIndex()
|
||||
}
|
||||
|
||||
func newFromFileWithoutIndex(path string, pathFilters ...PathIndexVisitor) (*File, error) {
|
||||
absParentDir, err := absoluteSymlinkFreePathToParent(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
chroot, err := NewChrootContextFromCWD(absParentDir, absParentDir)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to interpret chroot context: %w", err)
|
||||
}
|
||||
|
||||
cleanBase := chroot.Base()
|
||||
|
||||
file := &File{
|
||||
return &File{
|
||||
path: path,
|
||||
FiletreeResolver: FiletreeResolver{
|
||||
Chroot: *chroot,
|
||||
@ -36,9 +50,7 @@ func NewFromFile(parent, path string, pathFilters ...PathIndexVisitor) (*File, e
|
||||
Opener: nativeOSFileOpener,
|
||||
},
|
||||
indexer: newFileIndexer(path, cleanBase, pathFilters...),
|
||||
}
|
||||
|
||||
return file, file.buildIndex()
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *File) buildIndex() error {
|
||||
@ -58,6 +70,6 @@ func (r *File) buildIndex() error {
|
||||
}
|
||||
|
||||
// Stringer to represent a file path data source
|
||||
func (r File) String() string {
|
||||
func (r *File) String() string {
|
||||
return fmt.Sprintf("file:%s", r.path)
|
||||
}
|
||||
|
||||
@ -173,7 +173,7 @@ func (r *fileIndexer) addDirectoryToIndex(path string, info os.FileInfo) error {
|
||||
return err
|
||||
}
|
||||
|
||||
metadata := file.NewMetadataFromPath(path, info)
|
||||
metadata := NewMetadataFromPath(path, info)
|
||||
r.index.Add(*ref, metadata)
|
||||
|
||||
return nil
|
||||
@ -185,7 +185,7 @@ func (r *fileIndexer) addFileToIndex(path string, info os.FileInfo) error {
|
||||
return err
|
||||
}
|
||||
|
||||
metadata := file.NewMetadataFromPath(path, info)
|
||||
metadata := NewMetadataFromPath(path, info)
|
||||
r.index.Add(*ref, metadata)
|
||||
|
||||
return nil
|
||||
|
||||
@ -1384,9 +1384,10 @@ func TestFileResolver_FilesByPath(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, parentPath)
|
||||
|
||||
resolver, err := NewFromFile(parentPath, tt.filePath)
|
||||
resolver, err := NewFromFile(tt.filePath)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resolver)
|
||||
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||
|
||||
refs, err := resolver.FilesByPath(tt.fileByPathInput)
|
||||
require.NoError(t, err)
|
||||
@ -1431,8 +1432,11 @@ func TestFileResolver_MultipleFilesByPath(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, parentPath)
|
||||
|
||||
resolver, err := NewFromFile(parentPath, filePath)
|
||||
resolver, err := NewFromFile(filePath)
|
||||
assert.NoError(t, err)
|
||||
require.NotNil(t, resolver)
|
||||
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||
|
||||
refs, err := resolver.FilesByPath(tt.input...)
|
||||
assert.NoError(t, err)
|
||||
|
||||
@ -1449,8 +1453,11 @@ func TestFileResolver_FilesByGlob(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, parentPath)
|
||||
|
||||
resolver, err := NewFromFile(parentPath, filePath)
|
||||
resolver, err := NewFromFile(filePath)
|
||||
assert.NoError(t, err)
|
||||
require.NotNil(t, resolver)
|
||||
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||
|
||||
refs, err := resolver.FilesByGlob("**/*.txt")
|
||||
assert.NoError(t, err)
|
||||
|
||||
@ -1476,8 +1483,11 @@ func Test_fileResolver_FilesByMIMEType(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, parentPath)
|
||||
|
||||
resolver, err := NewFromFile(parentPath, filePath)
|
||||
resolver, err := NewFromFile(filePath)
|
||||
assert.NoError(t, err)
|
||||
require.NotNil(t, resolver)
|
||||
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||
|
||||
locations, err := resolver.FilesByMIMEType(test.mimeType)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, test.expectedPaths.Size(), len(locations))
|
||||
@ -1497,10 +1507,12 @@ func Test_fileResolver_FileContentsByLocation(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, parentPath)
|
||||
|
||||
r, err := NewFromFile(parentPath, filePath)
|
||||
resolver, err := NewFromFile(filePath)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resolver)
|
||||
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||
|
||||
exists, existingPath, err := r.Tree.File(stereoscopeFile.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt")))
|
||||
exists, existingPath, err := resolver.Tree.File(stereoscopeFile.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt")))
|
||||
require.True(t, exists)
|
||||
require.NoError(t, err)
|
||||
require.True(t, existingPath.HasReference())
|
||||
@ -1525,7 +1537,7 @@ func Test_fileResolver_FileContentsByLocation(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
|
||||
actual, err := r.FileContentsByLocation(test.location)
|
||||
actual, err := resolver.FileContentsByLocation(test.location)
|
||||
if test.err {
|
||||
require.Error(t, err)
|
||||
return
|
||||
@ -1546,8 +1558,11 @@ func TestFileResolver_AllLocations_errorOnDirRequest(t *testing.T) {
|
||||
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, parentPath)
|
||||
resolver, err := NewFromFile(parentPath, filePath)
|
||||
|
||||
resolver, err := NewFromFile(filePath)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resolver)
|
||||
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||
|
||||
var dirLoc *file.Location
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
@ -1575,8 +1590,11 @@ func TestFileResolver_AllLocations(t *testing.T) {
|
||||
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, parentPath)
|
||||
resolver, err := NewFromFile(parentPath, filePath)
|
||||
|
||||
resolver, err := NewFromFile(filePath)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resolver)
|
||||
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||
|
||||
paths := strset.New()
|
||||
for loc := range resolver.AllLocations(context.Background()) {
|
||||
@ -1600,8 +1618,11 @@ func Test_FileResolver_AllLocationsDoesNotLeakGoRoutine(t *testing.T) {
|
||||
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, parentPath)
|
||||
resolver, err := NewFromFile(parentPath, filePath)
|
||||
|
||||
resolver, err := NewFromFile(filePath)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resolver)
|
||||
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||
|
||||
require.NoError(t, err)
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
20
syft/internal/fileresolver/get_xid.go
Normal file
20
syft/internal/fileresolver/get_xid.go
Normal file
@ -0,0 +1,20 @@
|
||||
//go:build !windows
|
||||
|
||||
package fileresolver
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
// getXid is the UID GID system info for unix
|
||||
func getXid(info os.FileInfo) (uid, gid int) {
|
||||
uid = -1
|
||||
gid = -1
|
||||
if stat, ok := info.Sys().(*syscall.Stat_t); ok {
|
||||
uid = int(stat.Uid)
|
||||
gid = int(stat.Gid)
|
||||
}
|
||||
|
||||
return uid, gid
|
||||
}
|
||||
12
syft/internal/fileresolver/get_xid_win.go
Normal file
12
syft/internal/fileresolver/get_xid_win.go
Normal file
@ -0,0 +1,12 @@
|
||||
//go:build windows
|
||||
|
||||
package fileresolver
|
||||
|
||||
import (
|
||||
"os"
|
||||
)
|
||||
|
||||
// getXid is a placeholder for windows file information
|
||||
func getXid(info os.FileInfo) (uid, gid int) {
|
||||
return -1, -1
|
||||
}
|
||||
44
syft/internal/fileresolver/metadata.go
Normal file
44
syft/internal/fileresolver/metadata.go
Normal file
@ -0,0 +1,44 @@
|
||||
package fileresolver
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/anchore/stereoscope/pkg/file"
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/syft/internal/windows"
|
||||
)
|
||||
|
||||
func NewMetadataFromPath(path string, info os.FileInfo) file.Metadata {
|
||||
var mimeType string
|
||||
uid, gid := getXid(info)
|
||||
|
||||
ty := file.TypeFromMode(info.Mode())
|
||||
|
||||
if ty == file.TypeRegular {
|
||||
usablePath := path
|
||||
// denormalize the path back to windows so we can open the file
|
||||
if windows.HostRunningOnWindows() {
|
||||
usablePath = windows.FromPosix(usablePath)
|
||||
}
|
||||
|
||||
f, err := os.Open(usablePath)
|
||||
if err != nil {
|
||||
// TODO: it may be that the file is inaccessible, however, this is not an error or a warning. In the future we need to track these as known-unknowns
|
||||
f = nil
|
||||
} else {
|
||||
defer internal.CloseAndLogError(f, usablePath)
|
||||
}
|
||||
|
||||
mimeType = file.MIMEType(f)
|
||||
}
|
||||
|
||||
return file.Metadata{
|
||||
FileInfo: info,
|
||||
Path: path,
|
||||
Type: ty,
|
||||
// unsupported across platforms
|
||||
UserID: uid,
|
||||
GroupID: gid,
|
||||
MIMEType: mimeType,
|
||||
}
|
||||
}
|
||||
51
syft/internal/fileresolver/metadata_test.go
Normal file
51
syft/internal/fileresolver/metadata_test.go
Normal file
@ -0,0 +1,51 @@
|
||||
package fileresolver
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/anchore/stereoscope/pkg/file"
|
||||
)
|
||||
|
||||
func TestFileMetadataFromPath(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
path string
|
||||
expectedType file.Type
|
||||
expectedMIMEType string
|
||||
}{
|
||||
{
|
||||
path: "test-fixtures/symlinks-simple/readme",
|
||||
expectedType: file.TypeRegular,
|
||||
expectedMIMEType: "text/plain",
|
||||
},
|
||||
{
|
||||
path: "test-fixtures/symlinks-simple/link_to_new_readme",
|
||||
expectedType: file.TypeSymLink,
|
||||
expectedMIMEType: "",
|
||||
},
|
||||
{
|
||||
path: "test-fixtures/symlinks-simple/link_to_link_to_new_readme",
|
||||
expectedType: file.TypeSymLink,
|
||||
expectedMIMEType: "",
|
||||
},
|
||||
{
|
||||
path: "test-fixtures/symlinks-simple",
|
||||
expectedType: file.TypeDirectory,
|
||||
expectedMIMEType: "",
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
t.Run(test.path, func(t *testing.T) {
|
||||
info, err := os.Lstat(test.path)
|
||||
require.NoError(t, err)
|
||||
|
||||
actual := NewMetadataFromPath(test.path, info)
|
||||
assert.Equal(t, test.expectedMIMEType, actual.MIMEType, "unexpected MIME type for %s", test.path)
|
||||
assert.Equal(t, test.expectedType, actual.Type, "unexpected type for %s", test.path)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -24,6 +24,10 @@ import (
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
const (
|
||||
deinstallStatus string = "deinstall"
|
||||
)
|
||||
|
||||
var (
|
||||
errEndOfPackages = fmt.Errorf("no more packages to read")
|
||||
sourceRegexp = regexp.MustCompile(`(?P<name>\S+)( \((?P<version>.*)\))?`)
|
||||
@ -112,6 +116,7 @@ type dpkgExtractedMetadata struct {
|
||||
Provides string `mapstructure:"Provides"`
|
||||
Depends string `mapstructure:"Depends"`
|
||||
PreDepends string `mapstructure:"PreDepends"` // note: original doc is Pre-Depends
|
||||
Status string `mapstructure:"Status"`
|
||||
}
|
||||
|
||||
// parseDpkgStatusEntry returns an individual Dpkg entry, or returns errEndOfPackages if there are no more packages to parse from the reader.
|
||||
@ -134,6 +139,11 @@ func parseDpkgStatusEntry(reader *bufio.Reader) (*pkg.DpkgDBEntry, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Skip entries which have been removed but not purged, e.g. "rc" status in dpkg -l
|
||||
if strings.Contains(raw.Status, deinstallStatus) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
sourceName, sourceVersion := extractSourceVersion(raw.Source)
|
||||
if sourceVersion != "" {
|
||||
raw.SourceVersion = sourceVersion
|
||||
|
||||
@ -237,6 +237,37 @@ func Test_parseDpkgStatus(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "deinstall status packages are ignored",
|
||||
fixturePath: "test-fixtures/var/lib/dpkg/status.d/deinstall",
|
||||
expected: []pkg.DpkgDBEntry{
|
||||
{
|
||||
Package: "linux-image-6.14.0-1012-aws",
|
||||
Source: "linux-signed-aws-6.14",
|
||||
Version: "6.14.0-1012.12~24.04.1",
|
||||
Architecture: "amd64",
|
||||
InstalledSize: 15221,
|
||||
Maintainer: "Canonical Kernel Team <kernel-team@lists.ubuntu.com>",
|
||||
Description: `Signed kernel image aws
|
||||
A kernel image for aws. This version of it is signed with
|
||||
Canonical's signing key.`,
|
||||
Provides: []string{"fuse-module",
|
||||
"linux-image",
|
||||
"spl-dkms",
|
||||
"spl-modules",
|
||||
"v4l2loopback-dkms",
|
||||
"v4l2loopback-modules",
|
||||
"zfs-dkms",
|
||||
"zfs-modules"},
|
||||
Depends: []string{
|
||||
"kmod",
|
||||
"linux-base (>= 4.5ubuntu1~16.04.1)",
|
||||
"linux-modules-6.14.0-1012-aws",
|
||||
},
|
||||
Files: []pkg.DpkgFileRecord{},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
|
||||
@ -0,0 +1,38 @@
|
||||
Package: linux-image-6.14.0-1012-aws
|
||||
Status: install ok installed
|
||||
Priority: optional
|
||||
Section: kernel
|
||||
Installed-Size: 15221
|
||||
Maintainer: Canonical Kernel Team <kernel-team@lists.ubuntu.com>
|
||||
Architecture: amd64
|
||||
Source: linux-signed-aws-6.14
|
||||
Version: 6.14.0-1012.12~24.04.1
|
||||
Provides: fuse-module, linux-image, spl-dkms, spl-modules, v4l2loopback-dkms, v4l2loopback-modules, zfs-dkms, zfs-modules
|
||||
Depends: kmod, linux-base (>= 4.5ubuntu1~16.04.1), linux-modules-6.14.0-1012-aws
|
||||
Recommends: grub-pc | grub-efi-amd64 | grub-efi-ia32 | grub | lilo, initramfs-tools | linux-initramfs-tool
|
||||
Suggests: bpftool, linux-perf, linux-aws-6.14-doc-6.14.0 | linux-aws-6.14-source-6.14.0, linux-aws-6.14-tools, linux-headers-6.14.0-1012-aws
|
||||
Conflicts: linux-image-unsigned-6.14.0-1012-aws
|
||||
Description: Signed kernel image aws
|
||||
A kernel image for aws. This version of it is signed with
|
||||
Canonical's signing key.
|
||||
Built-Using: linux-aws-6.14 (= 6.14.0-1012.12~24.04.1)
|
||||
|
||||
Package: linux-image-6.8.0-1029-aws
|
||||
Status: deinstall ok config-files
|
||||
Priority: optional
|
||||
Section: kernel
|
||||
Installed-Size: 14591
|
||||
Maintainer: Canonical Kernel Team <kernel-team@lists.ubuntu.com>
|
||||
Architecture: amd64
|
||||
Source: linux-signed-aws
|
||||
Version: 6.8.0-1029.31
|
||||
Config-Version: 6.8.0-1029.31
|
||||
Provides: fuse-module, linux-image, spl-dkms, spl-modules, v4l2loopback-dkms, v4l2loopback-modules, zfs-dkms, zfs-modules
|
||||
Depends: kmod, linux-base (>= 4.5ubuntu1~16.04.1), linux-modules-6.8.0-1029-aws
|
||||
Recommends: grub-pc | grub-efi-amd64 | grub-efi-ia32 | grub | lilo, initramfs-tools | linux-initramfs-tool
|
||||
Suggests: fdutils, linux-aws-doc-6.8.0 | linux-aws-source-6.8.0, linux-aws-tools, linux-headers-6.8.0-1029-aws
|
||||
Conflicts: linux-image-unsigned-6.8.0-1029-aws
|
||||
Description: Signed kernel image aws
|
||||
A kernel image for aws. This version of it is signed with
|
||||
Canonical's signing key.
|
||||
Built-Using: linux-aws (= 6.8.0-1029.31)
|
||||
@ -10,7 +10,14 @@ import (
|
||||
)
|
||||
|
||||
func (c *goBinaryCataloger) newGoBinaryPackage(dep *debug.Module, m pkg.GolangBinaryBuildinfoEntry, licenses []pkg.License, locations ...file.Location) pkg.Package {
|
||||
// Similar to syft/pkg/cataloger/golang/parse_go_mod.go logic - use original path for relative replacements
|
||||
finalPath := dep.Path
|
||||
if dep.Replace != nil {
|
||||
if strings.HasPrefix(dep.Replace.Path, ".") || strings.HasPrefix(dep.Replace.Path, "/") {
|
||||
finalPath = dep.Path
|
||||
} else {
|
||||
finalPath = dep.Replace.Path
|
||||
}
|
||||
dep = dep.Replace
|
||||
}
|
||||
|
||||
@ -23,10 +30,10 @@ func (c *goBinaryCataloger) newGoBinaryPackage(dep *debug.Module, m pkg.GolangBi
|
||||
}
|
||||
|
||||
p := pkg.Package{
|
||||
Name: dep.Path,
|
||||
Name: finalPath,
|
||||
Version: version,
|
||||
Licenses: pkg.NewLicenseSet(licenses...),
|
||||
PURL: packageURL(dep.Path, version),
|
||||
PURL: packageURL(finalPath, version),
|
||||
Language: pkg.Go,
|
||||
Type: pkg.GoModulePkg,
|
||||
Locations: file.NewLocationSet(locations...),
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
package golang
|
||||
|
||||
import (
|
||||
"runtime/debug"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
@ -54,3 +55,67 @@ func Test_packageURL(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_newGoBinaryPackage_relativeReplace(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
dep *debug.Module
|
||||
expectedName string
|
||||
}{
|
||||
{
|
||||
name: "relative replace with ../",
|
||||
dep: &debug.Module{
|
||||
Path: "github.com/aws/aws-sdk-go-v2",
|
||||
Version: "(devel)",
|
||||
Replace: &debug.Module{
|
||||
Path: "../../",
|
||||
Version: "(devel)",
|
||||
},
|
||||
},
|
||||
expectedName: "github.com/aws/aws-sdk-go-v2", // should use original path, not relative
|
||||
},
|
||||
{
|
||||
name: "relative replace with ./",
|
||||
dep: &debug.Module{
|
||||
Path: "github.com/example/module",
|
||||
Version: "v1.0.0",
|
||||
Replace: &debug.Module{
|
||||
Path: "./local",
|
||||
Version: "v0.0.0",
|
||||
},
|
||||
},
|
||||
expectedName: "github.com/example/module", // should use original path
|
||||
},
|
||||
{
|
||||
name: "absolute replace",
|
||||
dep: &debug.Module{
|
||||
Path: "github.com/old/module",
|
||||
Version: "v1.0.0",
|
||||
Replace: &debug.Module{
|
||||
Path: "github.com/new/module",
|
||||
Version: "v2.0.0",
|
||||
},
|
||||
},
|
||||
expectedName: "github.com/new/module", // should use replacement path
|
||||
},
|
||||
{
|
||||
name: "no replace",
|
||||
dep: &debug.Module{
|
||||
Path: "github.com/normal/module",
|
||||
Version: "v1.0.0",
|
||||
},
|
||||
expectedName: "github.com/normal/module", // should use original path
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
cataloger := &goBinaryCataloger{}
|
||||
result := cataloger.newGoBinaryPackage(test.dep, pkg.GolangBinaryBuildinfoEntry{}, nil)
|
||||
|
||||
assert.Equal(t, test.expectedName, result.Name)
|
||||
assert.Equal(t, pkg.Go, result.Language)
|
||||
assert.Equal(t, pkg.GoModulePkg, result.Type)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -352,6 +352,9 @@ func (p *CatalogTester) assertPkgs(t *testing.T, pkgs []pkg.Package, relationshi
|
||||
opts = append(opts, p.compareOptions...)
|
||||
opts = append(opts, cmp.Reporter(&r))
|
||||
|
||||
// ignore the "FoundBy" field on relationships as it is set in the generic cataloger before it's presence on the relationship
|
||||
opts = append(opts, cmpopts.IgnoreFields(pkg.Package{}, "FoundBy"))
|
||||
|
||||
// order should not matter
|
||||
relationship.Sort(p.expectedRelationships)
|
||||
relationship.Sort(relationships)
|
||||
|
||||
@ -108,7 +108,7 @@ func (a archiveFilename) extension() string {
|
||||
|
||||
func (a archiveFilename) pkgType() pkg.Type {
|
||||
switch strings.ToLower(a.extension()) {
|
||||
case "jar", "war", "ear", "lpkg", "par", "sar", "nar", "kar":
|
||||
case "jar", "war", "ear", "lpkg", "par", "sar", "nar", "kar", "rar":
|
||||
return pkg.JavaPkg
|
||||
case "jpi", "hpi":
|
||||
return pkg.JenkinsPluginPkg
|
||||
|
||||
@ -187,6 +187,13 @@ func TestExtractInfoFromJavaArchiveFilename(t *testing.T) {
|
||||
name: "gradle-build-cache",
|
||||
ty: pkg.JavaPkg,
|
||||
},
|
||||
{
|
||||
filename: "pkg-extra-field-maven-4.3.2-rc1.rar",
|
||||
version: "4.3.2-rc1",
|
||||
extension: "rar",
|
||||
name: "pkg-extra-field-maven",
|
||||
ty: pkg.JavaPkg,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
|
||||
@ -37,6 +37,7 @@ var archiveFormatGlobs = []string{
|
||||
"**/*.jpi",
|
||||
"**/*.hpi",
|
||||
"**/*.kar",
|
||||
"**/*.far",
|
||||
"**/*.lpkg", // Zip-compressed package used to deploy applications
|
||||
// (aka plugins) to Liferay Portal server. Those files contains .JAR(s) and a .PROPERTIES file, the latter
|
||||
// has information about the application and installation requirements.
|
||||
@ -45,6 +46,7 @@ var archiveFormatGlobs = []string{
|
||||
// LifeRay makes it pretty cumbersome to make a such plugins; their docs are
|
||||
// out of date, and they charge for their IDE. If you find an example
|
||||
// project that we can build in CI feel free to include it
|
||||
"**/*.rar", // Java Resource Adapter Archive
|
||||
}
|
||||
|
||||
// javaArchiveHashes are all the current hash algorithms used to calculate archive digests
|
||||
@ -257,10 +259,14 @@ func (j *archiveParser) discoverMainPackage(ctx context.Context) (*pkg.Package,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
name, version, lics, err := j.discoverNameVersionLicense(ctx, manifest)
|
||||
name, version, lics, parsedPom, err := j.discoverNameVersionLicense(ctx, manifest)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var pkgPomProject *pkg.JavaPomProject
|
||||
if parsedPom != nil {
|
||||
pkgPomProject = newPomProject(ctx, j.maven, parsedPom.path, parsedPom.project)
|
||||
}
|
||||
|
||||
return &pkg.Package{
|
||||
// TODO: maybe select name should just have a pom properties in it?
|
||||
@ -275,12 +281,13 @@ func (j *archiveParser) discoverMainPackage(ctx context.Context) (*pkg.Package,
|
||||
Metadata: pkg.JavaArchive{
|
||||
VirtualPath: j.location.Path(),
|
||||
Manifest: manifest,
|
||||
PomProject: pkgPomProject,
|
||||
ArchiveDigests: digests,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (j *archiveParser) discoverNameVersionLicense(ctx context.Context, manifest *pkg.JavaManifest) (string, string, []pkg.License, error) {
|
||||
func (j *archiveParser) discoverNameVersionLicense(ctx context.Context, manifest *pkg.JavaManifest) (string, string, []pkg.License, *parsedPomProject, error) {
|
||||
// we use j.location because we want to associate the license declaration with where we discovered the contents in the manifest
|
||||
// TODO: when we support locations of paths within archives we should start passing the specific manifest location object instead of the top jar
|
||||
lics := pkg.NewLicensesFromLocationWithContext(ctx, j.location, selectLicenses(manifest)...)
|
||||
@ -302,7 +309,7 @@ func (j *archiveParser) discoverNameVersionLicense(ctx context.Context, manifest
|
||||
if len(lics) == 0 {
|
||||
fileLicenses, err := j.getLicenseFromFileInArchive(ctx)
|
||||
if err != nil {
|
||||
return "", "", nil, err
|
||||
return "", "", nil, parsedPom, err
|
||||
}
|
||||
if fileLicenses != nil {
|
||||
lics = append(lics, fileLicenses...)
|
||||
@ -317,7 +324,7 @@ func (j *archiveParser) discoverNameVersionLicense(ctx context.Context, manifest
|
||||
lics = j.findLicenseFromJavaMetadata(ctx, groupID, artifactID, version, parsedPom, manifest)
|
||||
}
|
||||
|
||||
return artifactID, version, lics, nil
|
||||
return artifactID, version, lics, parsedPom, nil
|
||||
}
|
||||
|
||||
// findLicenseFromJavaMetadata attempts to find license information from all available maven metadata properties and pom info
|
||||
@ -387,43 +394,93 @@ type parsedPomProject struct {
|
||||
|
||||
// discoverMainPackageFromPomInfo attempts to resolve maven groupId, artifactId, version and other info from found pom information
|
||||
func (j *archiveParser) discoverMainPackageFromPomInfo(ctx context.Context) (group, name, version string, parsedPom *parsedPomProject) {
|
||||
var pomProperties pkg.JavaPomProperties
|
||||
|
||||
// Find the pom.properties/pom.xml if the names seem like a plausible match
|
||||
properties, _ := pomPropertiesByParentPath(j.archivePath, j.location, j.fileManifest.GlobMatch(false, pomPropertiesGlob))
|
||||
projects, _ := pomProjectByParentPath(j.archivePath, j.location, j.fileManifest.GlobMatch(false, pomXMLGlob))
|
||||
|
||||
// map of all the artifacts in the pom properties, in order to chek exact match with the filename
|
||||
artifactsMap := j.buildArtifactsMap(properties)
|
||||
pomProperties, parsedPom := j.findBestPomMatch(properties, projects, artifactsMap)
|
||||
|
||||
parsedPom = j.handleSinglePomXML(properties, projects, parsedPom)
|
||||
|
||||
return j.resolveIdentity(ctx, pomProperties, parsedPom)
|
||||
}
|
||||
|
||||
func (j *archiveParser) buildArtifactsMap(properties map[string]pkg.JavaPomProperties) *strset.Set {
|
||||
artifactsMap := strset.New()
|
||||
for _, propertiesObj := range properties {
|
||||
artifactsMap.Add(propertiesObj.ArtifactID)
|
||||
}
|
||||
return artifactsMap
|
||||
}
|
||||
|
||||
func (j *archiveParser) findBestPomMatch(properties map[string]pkg.JavaPomProperties,
|
||||
projects map[string]*parsedPomProject, artifactsMap *strset.Set) (pkg.JavaPomProperties, *parsedPomProject) {
|
||||
var pomProperties pkg.JavaPomProperties
|
||||
var parsedPom *parsedPomProject
|
||||
|
||||
for parentPath, propertiesObj := range sortedIter(properties) {
|
||||
// the logic for selecting the best name is as follows:
|
||||
// if we find an artifact id AND group id which are both contained in the filename
|
||||
// OR if we have an artifact id that exactly matches the filename, prefer this
|
||||
// OTHERWISE track the first matching pom properties with a pom.xml
|
||||
// FINALLY return the first matching pom properties
|
||||
if artifactIDMatchesFilename(propertiesObj.ArtifactID, j.fileInfo.name, artifactsMap) {
|
||||
if pomProperties.ArtifactID == "" { // keep the first match, or overwrite if we find more specific entries
|
||||
pomProperties = propertiesObj
|
||||
if !artifactIDMatchesFilename(propertiesObj.ArtifactID, j.fileInfo.name, artifactsMap) {
|
||||
continue
|
||||
}
|
||||
if proj, exists := projects[parentPath]; exists {
|
||||
if parsedPom == nil { // keep the first matching artifact if we don't find an exact match or groupid + artfiact id match
|
||||
pomProperties = propertiesObj // set this, as it may not be the first entry found
|
||||
parsedPom = proj
|
||||
}
|
||||
// if artifact ID is the entire filename or BOTH artifactID and groupID are contained in the artifact, prefer this match
|
||||
if strings.Contains(j.fileInfo.name, propertiesObj.GroupID) || j.fileInfo.name == propertiesObj.ArtifactID {
|
||||
pomProperties = propertiesObj // this is an exact match, use it
|
||||
parsedPom = proj
|
||||
|
||||
pomProperties, parsedPom = j.updateMatchIfBetter(pomProperties, parsedPom, propertiesObj, parentPath, projects)
|
||||
|
||||
if j.isExactMatch(propertiesObj, parsedPom) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return pomProperties, parsedPom
|
||||
}
|
||||
|
||||
func (j *archiveParser) updateMatchIfBetter(currentProps pkg.JavaPomProperties, currentPom *parsedPomProject,
|
||||
newProps pkg.JavaPomProperties, parentPath string, projects map[string]*parsedPomProject) (pkg.JavaPomProperties, *parsedPomProject) {
|
||||
// Keep the first match
|
||||
if currentProps.ArtifactID == "" {
|
||||
proj, hasProject := projects[parentPath]
|
||||
if hasProject {
|
||||
return newProps, proj
|
||||
}
|
||||
return newProps, currentPom
|
||||
}
|
||||
|
||||
proj, hasProject := projects[parentPath]
|
||||
if !hasProject {
|
||||
return currentProps, currentPom
|
||||
}
|
||||
|
||||
// Keep the first matching artifact with a pom.xml
|
||||
if currentPom == nil {
|
||||
return newProps, proj
|
||||
}
|
||||
|
||||
// Prefer exact matches
|
||||
if j.isExactMatch(newProps, proj) {
|
||||
return newProps, proj
|
||||
}
|
||||
|
||||
return currentProps, currentPom
|
||||
}
|
||||
|
||||
func (j *archiveParser) isExactMatch(props pkg.JavaPomProperties, pom *parsedPomProject) bool {
|
||||
if pom == nil {
|
||||
return false
|
||||
}
|
||||
return strings.Contains(j.fileInfo.name, props.GroupID) || j.fileInfo.name == props.ArtifactID
|
||||
}
|
||||
|
||||
func (j *archiveParser) handleSinglePomXML(properties map[string]pkg.JavaPomProperties,
|
||||
projects map[string]*parsedPomProject, currentPom *parsedPomProject) *parsedPomProject {
|
||||
if len(properties) == 0 && len(projects) == 1 {
|
||||
for _, projectsObj := range projects {
|
||||
return projectsObj
|
||||
}
|
||||
}
|
||||
return currentPom
|
||||
}
|
||||
|
||||
func (j *archiveParser) resolveIdentity(ctx context.Context, pomProperties pkg.JavaPomProperties,
|
||||
parsedPom *parsedPomProject) (group, name, version string, pom *parsedPomProject) {
|
||||
group = pomProperties.GroupID
|
||||
name = pomProperties.ArtifactID
|
||||
version = pomProperties.Version
|
||||
|
||||
@ -79,9 +79,12 @@ func TestSearchMavenForLicenses(t *testing.T) {
|
||||
ReadCloser: fixture,
|
||||
}, tc.detectNested, tc.config)
|
||||
defer cleanupFn()
|
||||
require.NoError(t, err)
|
||||
|
||||
// assert licenses are discovered from upstream
|
||||
_, _, _, parsedPom := ap.discoverMainPackageFromPomInfo(context.Background())
|
||||
require.NotNil(t, parsedPom, "expected to find pom information in the fixture")
|
||||
require.NotNil(t, parsedPom.project, "expected parsedPom to have a project")
|
||||
resolvedLicenses, _ := ap.maven.ResolveLicenses(context.Background(), parsedPom.project)
|
||||
assert.Equal(t, tc.expectedLicenses, toPkgLicenses(ctx, nil, resolvedLicenses))
|
||||
})
|
||||
@ -148,10 +151,23 @@ func TestParseJar(t *testing.T) {
|
||||
},
|
||||
PomProperties: &pkg.JavaPomProperties{
|
||||
Path: "META-INF/maven/io.jenkins.plugins/example-jenkins-plugin/pom.properties",
|
||||
Name: "",
|
||||
GroupID: "io.jenkins.plugins",
|
||||
ArtifactID: "example-jenkins-plugin",
|
||||
Version: "1.0-SNAPSHOT",
|
||||
},
|
||||
PomProject: &pkg.JavaPomProject{
|
||||
Path: "META-INF/maven/io.jenkins.plugins/example-jenkins-plugin/pom.xml",
|
||||
Name: "Example Jenkins Plugin",
|
||||
GroupID: "io.jenkins.plugins",
|
||||
ArtifactID: "example-jenkins-plugin",
|
||||
Version: "1.0-SNAPSHOT",
|
||||
Parent: &pkg.JavaPomParent{
|
||||
GroupID: "org.jenkins-ci.plugins",
|
||||
ArtifactID: "plugin",
|
||||
Version: "4.46",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -189,6 +205,14 @@ func TestParseJar(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
// PomProject: &pkg.JavaPomProject{
|
||||
// Path: "META-INF/maven/io.jenkins.plugins/example-jenkins-plugin/pom.xml",
|
||||
// Parent: &pkg.JavaPomParent{GroupID: "org.jenkins-ci.plugins", ArtifactID: "plugin", Version: "4.46"},
|
||||
// GroupID: "io.jenkins.plugins",
|
||||
// ArtifactID: "example-jenkins-plugin",
|
||||
// Version: "1.0-SNAPSHOT",
|
||||
// Name: "Example Jenkins Plugin",
|
||||
// },
|
||||
},
|
||||
},
|
||||
"joda-time": {
|
||||
@ -286,6 +310,12 @@ func TestParseJar(t *testing.T) {
|
||||
ArtifactID: "example-java-app-maven",
|
||||
Version: "0.1.0",
|
||||
},
|
||||
PomProject: &pkg.JavaPomProject{
|
||||
Path: "META-INF/maven/org.anchore/example-java-app-maven/pom.xml",
|
||||
GroupID: "org.anchore",
|
||||
ArtifactID: "example-java-app-maven",
|
||||
Version: "0.1.0",
|
||||
},
|
||||
},
|
||||
},
|
||||
"joda-time": {
|
||||
@ -1127,6 +1157,13 @@ func Test_parseJavaArchive_regressions(t *testing.T) {
|
||||
GroupID: "org.apache.directory.api",
|
||||
ArtifactID: "api-all",
|
||||
Version: "2.0.0",
|
||||
}, PomProject: &pkg.JavaPomProject{
|
||||
Path: "META-INF/maven/org.apache.directory.api/api-all/pom.xml",
|
||||
ArtifactID: "api-all",
|
||||
GroupID: "org.apache.directory.api",
|
||||
Version: "2.0.0",
|
||||
Name: "Apache Directory API All",
|
||||
Parent: &pkg.JavaPomParent{GroupID: "org.apache.directory.api", ArtifactID: "api-parent", Version: "2.0.0"},
|
||||
},
|
||||
},
|
||||
}
|
||||
@ -1163,6 +1200,46 @@ func Test_parseJavaArchive_regressions(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
micronautAop := pkg.Package{
|
||||
Name: "micronaut-aop",
|
||||
Version: "4.9.11",
|
||||
PURL: "pkg:maven/io.micronaut/micronaut-aop@4.9.11",
|
||||
Locations: file.NewLocationSet(file.NewLocation("test-fixtures/jar-metadata/cache/micronaut-aop-4.9.11.jar")),
|
||||
Type: pkg.JavaPkg,
|
||||
Language: pkg.Java,
|
||||
Metadata: pkg.JavaArchive{
|
||||
VirtualPath: "test-fixtures/jar-metadata/cache/micronaut-aop-4.9.11.jar",
|
||||
Manifest: &pkg.JavaManifest{
|
||||
Main: []pkg.KeyValue{
|
||||
{
|
||||
Key: "Manifest-Version",
|
||||
Value: "1.0",
|
||||
},
|
||||
{
|
||||
Key: "Automatic-Module-Name",
|
||||
Value: "io.micronaut.micronaut_aop",
|
||||
},
|
||||
{
|
||||
Key: "Implementation-Version",
|
||||
Value: "4.9.11",
|
||||
},
|
||||
{
|
||||
Key: "Implementation-Title",
|
||||
Value: "Micronaut Core",
|
||||
},
|
||||
},
|
||||
}, PomProject: &pkg.JavaPomProject{
|
||||
Path: "META-INF/maven/io.micronaut/micronaut-aop/pom.xml",
|
||||
ArtifactID: "micronaut-aop",
|
||||
GroupID: "io.micronaut",
|
||||
Version: "4.9.11",
|
||||
Name: "Micronaut Core",
|
||||
Description: "Core components supporting the Micronaut Framework",
|
||||
URL: "https://micronaut.io",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
fixtureName string
|
||||
@ -1220,6 +1297,16 @@ func Test_parseJavaArchive_regressions(t *testing.T) {
|
||||
{Key: "Specification-Version", Value: "2.15.2"},
|
||||
},
|
||||
},
|
||||
PomProject: &pkg.JavaPomProject{
|
||||
Path: "META-INF/maven/com.fasterxml.jackson.core/jackson-core/pom.xml",
|
||||
ArtifactID: "jackson-core",
|
||||
GroupID: "com.fasterxml.jackson.core",
|
||||
Version: "2.15.2",
|
||||
Name: "Jackson-core",
|
||||
Description: "Core Jackson processing abstractions (aka Streaming API), implementation for JSON",
|
||||
URL: "https://github.com/FasterXML/jackson-core",
|
||||
Parent: &pkg.JavaPomParent{GroupID: "com.fasterxml.jackson", ArtifactID: "jackson-base", Version: "2.15.2"},
|
||||
},
|
||||
// not under test
|
||||
//ArchiveDigests: []file.Digest{{Algorithm: "sha1", Value: "d8bc1d9c428c96fe447e2c429fc4304d141024df"}},
|
||||
},
|
||||
@ -1275,6 +1362,16 @@ func Test_parseJavaArchive_regressions(t *testing.T) {
|
||||
{Key: "Specification-Version", Value: "2.15.2"},
|
||||
},
|
||||
},
|
||||
PomProject: &pkg.JavaPomProject{
|
||||
Path: "META-INF/maven/com.fasterxml.jackson.core/jackson-core/pom.xml",
|
||||
ArtifactID: "jackson-core",
|
||||
GroupID: "com.fasterxml.jackson.core",
|
||||
Version: "2.15.2",
|
||||
Name: "Jackson-core",
|
||||
Description: "Core Jackson processing abstractions (aka Streaming API), implementation for JSON",
|
||||
URL: "https://github.com/FasterXML/jackson-core",
|
||||
Parent: &pkg.JavaPomParent{GroupID: "com.fasterxml.jackson", ArtifactID: "jackson-base", Version: "2.15.2"},
|
||||
},
|
||||
// not under test
|
||||
//ArchiveDigests: []file.Digest{{Algorithm: "sha1", Value: "abd3e329270fc54a2acaceb45420fd5710ecefd5"}},
|
||||
},
|
||||
@ -1341,6 +1438,14 @@ func Test_parseJavaArchive_regressions(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "micronaut-aop",
|
||||
fixtureName: "micronaut-aop-4.9.11",
|
||||
fileExtension: "jar",
|
||||
expectedPkgs: []pkg.Package{
|
||||
micronautAop,
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
@ -1527,3 +1632,26 @@ func Test_corruptJarArchive(t *testing.T) {
|
||||
WithError().
|
||||
TestParser(t, ap.parseJavaArchive)
|
||||
}
|
||||
|
||||
func Test_jarPomPropertyResolutionDoesNotPanic(t *testing.T) {
|
||||
jarName := generateJavaMetadataJarFixture(t, "commons-lang3-3.12.0", "jar")
|
||||
fixture, err := os.Open(jarName)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.TODO()
|
||||
// setup parser
|
||||
ap, cleanupFn, err := newJavaArchiveParser(
|
||||
ctx,
|
||||
file.LocationReadCloser{
|
||||
Location: file.NewLocation(fixture.Name()),
|
||||
ReadCloser: fixture,
|
||||
}, false, ArchiveCatalogerConfig{
|
||||
UseMavenLocalRepository: true,
|
||||
MavenLocalRepositoryDir: "internal/maven/test-fixtures/maven-repo",
|
||||
})
|
||||
defer cleanupFn()
|
||||
require.NoError(t, err)
|
||||
|
||||
_, _, err = ap.parse(ctx, nil)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
@ -29,7 +29,9 @@ func Test_ArchiveCataloger_Globs(t *testing.T) {
|
||||
"java-archives/example.kar",
|
||||
"java-archives/example.jpi",
|
||||
"java-archives/example.hpi",
|
||||
"java-archives/example.far",
|
||||
"java-archives/example.lpkg",
|
||||
"java-archives/example.rar",
|
||||
"archives/example.zip",
|
||||
"archives/example.tar",
|
||||
"archives/example.tar.gz",
|
||||
|
||||
@ -0,0 +1 @@
|
||||
example archive
|
||||
@ -0,0 +1 @@
|
||||
example archive
|
||||
@ -13,7 +13,8 @@ API_ALL_SOURCES = api-all-2.0.0-sources
|
||||
SPRING_INSTRUMENTATION = spring-instrumentation-4.3.0-1.0
|
||||
MULTIPLE_MATCHING = multiple-matching-2.11.5
|
||||
ORG_MULTIPLE_THENAME = org.multiple-thename
|
||||
|
||||
MICRONAUT_AOP = micronaut-aop-4.9.11
|
||||
COMMONS_LANG3 = commons-lang3-3.12.0
|
||||
|
||||
.DEFAULT_GOAL := fixtures
|
||||
|
||||
@ -23,7 +24,7 @@ fixtures: $(CACHE_DIR)
|
||||
# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
|
||||
fingerprint: $(FINGERPRINT_FILE)
|
||||
|
||||
$(CACHE_DIR): $(CACHE_DIR)/$(JACKSON_CORE).jar $(CACHE_DIR)/$(SBT_JACKSON_CORE).jar $(CACHE_DIR)/$(OPENSAML_CORE).jar $(CACHE_DIR)/$(API_ALL_SOURCES).jar $(CACHE_DIR)/$(SPRING_INSTRUMENTATION).jar $(CACHE_DIR)/$(MULTIPLE_MATCHING).jar
|
||||
$(CACHE_DIR): $(CACHE_DIR)/$(JACKSON_CORE).jar $(CACHE_DIR)/$(SBT_JACKSON_CORE).jar $(CACHE_DIR)/$(OPENSAML_CORE).jar $(CACHE_DIR)/$(API_ALL_SOURCES).jar $(CACHE_DIR)/$(SPRING_INSTRUMENTATION).jar $(CACHE_DIR)/$(MULTIPLE_MATCHING).jar $(CACHE_DIR)/$(MICRONAUT_AOP).jar $(CACHE_DIR)/$(COMMONS_LANG3).jar
|
||||
|
||||
$(CACHE_DIR)/$(JACKSON_CORE).jar:
|
||||
mkdir -p $(CACHE_DIR)
|
||||
@ -53,6 +54,14 @@ $(CACHE_DIR)/$(ORG_MULTIPLE_THENAME).jar:
|
||||
mkdir -p $(CACHE_DIR)
|
||||
cd $(ORG_MULTIPLE_THENAME) && zip -r $(CACHE_PATH)/$(ORG_MULTIPLE_THENAME).jar .
|
||||
|
||||
$(CACHE_DIR)/$(MICRONAUT_AOP).jar:
|
||||
mkdir -p $(CACHE_DIR)
|
||||
cd $(MICRONAUT_AOP) && zip -r $(CACHE_PATH)/$(MICRONAUT_AOP).jar .
|
||||
|
||||
$(CACHE_DIR)/$(COMMONS_LANG3).jar:
|
||||
mkdir -p $(CACHE_DIR)
|
||||
cd $(COMMONS_LANG3) && zip -r $(CACHE_PATH)/$(COMMONS_LANG3).jar .
|
||||
|
||||
# Jenkins plugins typically do not have the version included in the archive name,
|
||||
# so it is important to not include it in the generated test fixture
|
||||
$(CACHE_DIR)/gradle.hpi:
|
||||
|
||||
@ -0,0 +1 @@
|
||||
Manifest-Version: 1.0
|
||||
@ -0,0 +1,33 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-parent</artifactId>
|
||||
<version>54</version>
|
||||
</parent>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>${commons.release.version}</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>JUnit 5 (Bill of Materials)</name>
|
||||
<licenses>
|
||||
<license>
|
||||
<name>Eclipse Public License v2.0</name>
|
||||
<url>https://www.eclipse.org/legal/epl-v20.html</url>
|
||||
</license>
|
||||
</licenses>
|
||||
<scm>
|
||||
<connection>scm:git:git://github.com/junit-team/junit5.git</connection>
|
||||
<developerConnection>scm:git:git://github.com/junit-team/junit5.git</developerConnection>
|
||||
<url>https://github.com/junit-team/junit5</url>
|
||||
</scm>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<version>${commons.release.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
@ -0,0 +1,5 @@
|
||||
Manifest-Version: 1.0
|
||||
Automatic-Module-Name: io.micronaut.micronaut_aop
|
||||
Implementation-Version: 4.9.11
|
||||
Implementation-Title: Micronaut Core
|
||||
|
||||
@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<!-- This module was also published with a richer model, Gradle metadata, -->
|
||||
<!-- which should be used instead. Do not delete the following line which -->
|
||||
<!-- is to indicate to Gradle or any Gradle module metadata file consumer -->
|
||||
<!-- that they should prefer consuming it instead. -->
|
||||
<!-- do_not_remove: published-with-gradle-metadata -->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>io.micronaut</groupId>
|
||||
<artifactId>micronaut-aop</artifactId>
|
||||
<version>4.9.11</version>
|
||||
<name>Micronaut Core</name>
|
||||
<description>Core components supporting the Micronaut Framework</description>
|
||||
<url>https://micronaut.io</url>
|
||||
</project>
|
||||
@ -18,8 +18,9 @@ func NewPackageCataloger() pkg.Cataloger {
|
||||
func NewLockCataloger(cfg CatalogerConfig) pkg.Cataloger {
|
||||
yarnLockAdapter := newGenericYarnLockAdapter(cfg)
|
||||
packageLockAdapter := newGenericPackageLockAdapter(cfg)
|
||||
pnpmLockAdapter := newGenericPnpmLockAdapter(cfg)
|
||||
return generic.NewCataloger("javascript-lock-cataloger").
|
||||
WithParserByGlobs(packageLockAdapter.parsePackageLock, "**/package-lock.json").
|
||||
WithParserByGlobs(yarnLockAdapter.parseYarnLock, "**/yarn.lock").
|
||||
WithParserByGlobs(parsePnpmLock, "**/pnpm-lock.yaml")
|
||||
WithParserByGlobs(pnpmLockAdapter.parsePnpmLock, "**/pnpm-lock.yaml")
|
||||
}
|
||||
|
||||
@ -107,7 +107,7 @@ func newPackageLockV1Package(ctx context.Context, cfg CatalogerConfig, resolver
|
||||
licenseSet = pkg.NewLicenseSet(licenses...)
|
||||
}
|
||||
if err != nil {
|
||||
log.Debugf("unable to extract licenses from javascript yarn.lock for package %s:%s: %+v", name, version, err)
|
||||
log.Debugf("unable to extract licenses from javascript package-lock.json for package %s:%s: %+v", name, version, err)
|
||||
}
|
||||
}
|
||||
|
||||
@ -140,7 +140,7 @@ func newPackageLockV2Package(ctx context.Context, cfg CatalogerConfig, resolver
|
||||
licenseSet = pkg.NewLicenseSet(licenses...)
|
||||
}
|
||||
if err != nil {
|
||||
log.Debugf("unable to extract licenses from javascript yarn.lock for package %s:%s: %+v", name, u.Version, err)
|
||||
log.Debugf("unable to extract licenses from javascript package-lock.json for package %s:%s: %+v", name, u.Version, err)
|
||||
}
|
||||
}
|
||||
|
||||
@ -161,7 +161,19 @@ func newPackageLockV2Package(ctx context.Context, cfg CatalogerConfig, resolver
|
||||
)
|
||||
}
|
||||
|
||||
func newPnpmPackage(ctx context.Context, resolver file.Resolver, location file.Location, name, version string) pkg.Package {
|
||||
func newPnpmPackage(ctx context.Context, cfg CatalogerConfig, resolver file.Resolver, location file.Location, name, version string) pkg.Package {
|
||||
var licenseSet pkg.LicenseSet
|
||||
|
||||
if cfg.SearchRemoteLicenses {
|
||||
license, err := getLicenseFromNpmRegistry(cfg.NPMBaseURL, name, version)
|
||||
if err == nil && license != "" {
|
||||
licenses := pkg.NewLicensesFromValuesWithContext(ctx, license)
|
||||
licenseSet = pkg.NewLicenseSet(licenses...)
|
||||
}
|
||||
if err != nil {
|
||||
log.Debugf("unable to extract licenses from javascript pnpm-lock.yaml for package %s:%s: %+v", name, version, err)
|
||||
}
|
||||
}
|
||||
return finalizeLockPkg(
|
||||
ctx,
|
||||
resolver,
|
||||
@ -169,6 +181,7 @@ func newPnpmPackage(ctx context.Context, resolver file.Resolver, location file.L
|
||||
pkg.Package{
|
||||
Name: name,
|
||||
Version: version,
|
||||
Licenses: licenseSet,
|
||||
Locations: file.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
|
||||
PURL: packageURL(name, version),
|
||||
Language: pkg.JavaScript,
|
||||
|
||||
@ -18,9 +18,6 @@ import (
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// integrity check
|
||||
var _ generic.Parser = parsePnpmLock
|
||||
|
||||
// pnpmPackage holds the raw name and version extracted from the lockfile.
|
||||
type pnpmPackage struct {
|
||||
Name string
|
||||
@ -45,6 +42,16 @@ type pnpmV9LockYaml struct {
|
||||
Packages map[string]interface{} `yaml:"packages"`
|
||||
}
|
||||
|
||||
type genericPnpmLockAdapter struct {
|
||||
cfg CatalogerConfig
|
||||
}
|
||||
|
||||
func newGenericPnpmLockAdapter(cfg CatalogerConfig) genericPnpmLockAdapter {
|
||||
return genericPnpmLockAdapter{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
// Parse implements the pnpmLockfileParser interface for v6-v8 lockfiles.
|
||||
func (p *pnpmV6LockYaml) Parse(version float64, data []byte) ([]pnpmPackage, error) {
|
||||
if err := yaml.Unmarshal(data, p); err != nil {
|
||||
@ -116,7 +123,7 @@ func newPnpmLockfileParser(version float64) pnpmLockfileParser {
|
||||
}
|
||||
|
||||
// parsePnpmLock is the main parser function for pnpm-lock.yaml files.
|
||||
func parsePnpmLock(ctx context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
func (a genericPnpmLockAdapter) parsePnpmLock(ctx context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
data, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to load pnpm-lock.yaml file: %w", err)
|
||||
@ -142,7 +149,7 @@ func parsePnpmLock(ctx context.Context, resolver file.Resolver, _ *generic.Envir
|
||||
|
||||
packages := make([]pkg.Package, len(pnpmPkgs))
|
||||
for i, p := range pnpmPkgs {
|
||||
packages[i] = newPnpmPackage(ctx, resolver, reader.Location, p.Name, p.Version)
|
||||
packages[i] = newPnpmPackage(ctx, a.cfg, resolver, reader.Location, p.Name, p.Version)
|
||||
}
|
||||
|
||||
return packages, nil, unknown.IfEmptyf(packages, "unable to determine packages")
|
||||
|
||||
@ -1,6 +1,11 @@
|
||||
package javascript
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
@ -50,7 +55,8 @@ func TestParsePnpmLock(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
pkgtest.TestFileParser(t, fixture, parsePnpmLock, expectedPkgs, expectedRelationships)
|
||||
adapter := newGenericPnpmLockAdapter(CatalogerConfig{})
|
||||
pkgtest.TestFileParser(t, fixture, adapter.parsePnpmLock, expectedPkgs, expectedRelationships)
|
||||
}
|
||||
|
||||
func TestParsePnpmV6Lock(t *testing.T) {
|
||||
@ -142,7 +148,8 @@ func TestParsePnpmV6Lock(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
pkgtest.TestFileParser(t, fixture, parsePnpmLock, expectedPkgs, expectedRelationships)
|
||||
adapter := newGenericPnpmLockAdapter(CatalogerConfig{})
|
||||
pkgtest.TestFileParser(t, fixture, adapter.parsePnpmLock, expectedPkgs, expectedRelationships)
|
||||
}
|
||||
|
||||
func TestParsePnpmLockV9(t *testing.T) {
|
||||
@ -184,14 +191,101 @@ func TestParsePnpmLockV9(t *testing.T) {
|
||||
Type: pkg.NpmPkg,
|
||||
},
|
||||
}
|
||||
|
||||
adapter := newGenericPnpmLockAdapter(CatalogerConfig{})
|
||||
// TODO: no relationships are under test
|
||||
pkgtest.TestFileParser(t, fixture, parsePnpmLock, expected, expectedRelationships)
|
||||
pkgtest.TestFileParser(t, fixture, adapter.parsePnpmLock, expected, expectedRelationships)
|
||||
}
|
||||
|
||||
func TestSearchPnpmForLicenses(t *testing.T) {
|
||||
ctx := context.TODO()
|
||||
fixture := "test-fixtures/pnpm-remote/pnpm-lock.yaml"
|
||||
locations := file.NewLocationSet(file.NewLocation(fixture))
|
||||
mux, url, teardown := setupNpmRegistry()
|
||||
defer teardown()
|
||||
tests := []struct {
|
||||
name string
|
||||
fixture string
|
||||
config CatalogerConfig
|
||||
requestHandlers []handlerPath
|
||||
expectedPackages []pkg.Package
|
||||
}{
|
||||
{
|
||||
name: "search remote licenses returns the expected licenses when search is set to true",
|
||||
config: CatalogerConfig{SearchRemoteLicenses: true},
|
||||
requestHandlers: []handlerPath{
|
||||
{
|
||||
// https://registry.npmjs.org/nanoid/3.3.4
|
||||
path: "/nanoid/3.3.4",
|
||||
handler: generateMockNpmRegistryHandler("test-fixtures/pnpm-remote/registry_response.json"),
|
||||
},
|
||||
},
|
||||
expectedPackages: []pkg.Package{
|
||||
{
|
||||
Name: "nanoid",
|
||||
Version: "3.3.4",
|
||||
Locations: locations,
|
||||
PURL: "pkg:npm/nanoid@3.3.4",
|
||||
Licenses: pkg.NewLicenseSet(pkg.NewLicenseWithContext(ctx, "MIT")),
|
||||
Language: pkg.JavaScript,
|
||||
Type: pkg.NpmPkg,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
// set up the mock server
|
||||
for _, handler := range tc.requestHandlers {
|
||||
mux.HandleFunc(handler.path, handler.handler)
|
||||
}
|
||||
tc.config.NPMBaseURL = url
|
||||
adapter := newGenericPnpmLockAdapter(tc.config)
|
||||
pkgtest.TestFileParser(t, fixture, adapter.parsePnpmLock, tc.expectedPackages, nil)
|
||||
})
|
||||
}
|
||||
}
|
||||
func Test_corruptPnpmLock(t *testing.T) {
|
||||
adapter := newGenericPnpmLockAdapter(CatalogerConfig{})
|
||||
pkgtest.NewCatalogTester().
|
||||
FromFile(t, "test-fixtures/corrupt/pnpm-lock.yaml").
|
||||
WithError().
|
||||
TestParser(t, parsePnpmLock)
|
||||
TestParser(t, adapter.parsePnpmLock)
|
||||
}
|
||||
|
||||
func generateMockNpmRegistryHandler(responseFixture string) func(w http.ResponseWriter, r *http.Request) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
// Copy the file's content to the response writer
|
||||
file, err := os.Open(responseFixture)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
_, err = io.Copy(w, file)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// setup sets up a test HTTP server for mocking requests to a particular registry.
|
||||
// The returned url is injected into the Config so the client uses the test server.
|
||||
// Tests should register handlers on mux to simulate the expected request/response structure
|
||||
func setupNpmRegistry() (mux *http.ServeMux, serverURL string, teardown func()) {
|
||||
// mux is the HTTP request multiplexer used with the test server.
|
||||
mux = http.NewServeMux()
|
||||
|
||||
// We want to ensure that tests catch mistakes where the endpoint URL is
|
||||
// specified as absolute rather than relative. It only makes a difference
|
||||
// when there's a non-empty base URL path. So, use that. See issue #752.
|
||||
apiHandler := http.NewServeMux()
|
||||
apiHandler.Handle("/", mux)
|
||||
// server is a test HTTP server used to provide mock API responses.
|
||||
server := httptest.NewServer(apiHandler)
|
||||
|
||||
return mux, server.URL, server.Close
|
||||
}
|
||||
|
||||
@ -239,7 +239,7 @@ func TestSearchYarnForLicenses(t *testing.T) {
|
||||
ctx := context.TODO()
|
||||
fixture := "test-fixtures/yarn-remote/yarn.lock"
|
||||
locations := file.NewLocationSet(file.NewLocation(fixture))
|
||||
mux, url, teardown := setup()
|
||||
mux, url, teardown := setupYarnRegistry()
|
||||
defer teardown()
|
||||
tests := []struct {
|
||||
name string
|
||||
@ -255,7 +255,7 @@ func TestSearchYarnForLicenses(t *testing.T) {
|
||||
{
|
||||
// https://registry.yarnpkg.com/@babel/code-frame/7.10.4
|
||||
path: "/@babel/code-frame/7.10.4",
|
||||
handler: generateMockNPMHandler("test-fixtures/yarn-remote/registry_response.json"),
|
||||
handler: generateMockYarnRegistryHandler("test-fixtures/yarn-remote/registry_response.json"),
|
||||
},
|
||||
},
|
||||
expectedPackages: []pkg.Package{
|
||||
@ -449,7 +449,7 @@ func TestParseYarnFindPackageVersions(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func generateMockNPMHandler(responseFixture string) func(w http.ResponseWriter, r *http.Request) {
|
||||
func generateMockYarnRegistryHandler(responseFixture string) func(w http.ResponseWriter, r *http.Request) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
// Copy the file's content to the response writer
|
||||
@ -468,10 +468,10 @@ func generateMockNPMHandler(responseFixture string) func(w http.ResponseWriter,
|
||||
}
|
||||
}
|
||||
|
||||
// setup sets up a test HTTP server for mocking requests to maven central.
|
||||
// setup sets up a test HTTP server for mocking requests to a particular registry.
|
||||
// The returned url is injected into the Config so the client uses the test server.
|
||||
// Tests should register handlers on mux to simulate the expected request/response structure
|
||||
func setup() (mux *http.ServeMux, serverURL string, teardown func()) {
|
||||
func setupYarnRegistry() (mux *http.ServeMux, serverURL string, teardown func()) {
|
||||
// mux is the HTTP request multiplexer used with the test server.
|
||||
mux = http.NewServeMux()
|
||||
|
||||
|
||||
11
syft/pkg/cataloger/javascript/test-fixtures/pnpm-remote/pnpm-lock.yaml
generated
Normal file
11
syft/pkg/cataloger/javascript/test-fixtures/pnpm-remote/pnpm-lock.yaml
generated
Normal file
@ -0,0 +1,11 @@
|
||||
lockfileVersion: 5.4
|
||||
|
||||
specifiers:
|
||||
nanoid: ^3.3.4
|
||||
|
||||
dependencies:
|
||||
nanoid: 3.3.4
|
||||
|
||||
packages:
|
||||
/nanoid/3.3.4:
|
||||
resolution: {integrity: sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==}
|
||||
@ -0,0 +1,106 @@
|
||||
{
|
||||
"name": "nanoid",
|
||||
"version": "3.3.4",
|
||||
"keywords": [
|
||||
"uuid",
|
||||
"random",
|
||||
"id",
|
||||
"url"
|
||||
],
|
||||
"author": {
|
||||
"name": "Andrey Sitnik",
|
||||
"email": "andrey@sitnik.ru"
|
||||
},
|
||||
"license": "MIT",
|
||||
"_id": "nanoid@3.3.4",
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "ai",
|
||||
"email": "andrey@sitnik.ru"
|
||||
}
|
||||
],
|
||||
"homepage": "https://github.com/ai/nanoid#readme",
|
||||
"bugs": {
|
||||
"url": "https://github.com/ai/nanoid/issues"
|
||||
},
|
||||
"bin": {
|
||||
"nanoid": "bin/nanoid.cjs"
|
||||
},
|
||||
"dist": {
|
||||
"shasum": "730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab",
|
||||
"tarball": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz",
|
||||
"fileCount": 24,
|
||||
"integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==",
|
||||
"signatures": [
|
||||
{
|
||||
"sig": "MEQCIEXG2ta5bIaT6snvQFKV+m1KjuF4DaCpp186tcPo8vsRAiB2Eg9/6nKRi4lZOfwQC1fgq4EzrFjU8T+uqwGxWEQE8A==",
|
||||
"keyid": "SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA"
|
||||
}
|
||||
],
|
||||
"unpackedSize": 21583,
|
||||
"npm-signature": "-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJicQqNACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2Vmp6rw/+IRvv2zOtwi8goF3h1VctIQVWtTtYrobDIVC2W++jyxdbgZoP\r\n2CDj1YWjrr+eM6O6sI1Bj+bF+yoqQ+z8ojtfW3vtRPpjzUf/7Sgs4F2ANshp\r\ne3rqdaQLjpHPriHf6HmPJy3YNJ+7n5TPPGoTEGXAe4eCZdko3XidCMWZdHlf\r\nYQU9CVYiG6mjjORkWw1sYctt8exdcGFMh0QoQq7BEp04QWm04JwvHjUiAgvf\r\nmEQLrNrf9nwzjpnubAJD+1z6fKOc9vUE44MOj2PkPoOr6a+iBBBgwBf45cnj\r\ng8R2G5xzxsRRB0a8XZdp67y3WA8rIaYaUuBFtEWYp7QFoA/tp6AGmHEAhjLa\r\nQKTquG7ejBu21ZsQaxpGc/3WWLEm+7F78GF8CXpQdtg0Kg1eugRotSNnU0SO\r\nPLiyYV4Mw6kXnbVchS5Y+HmcDVEcSBMTve/f1KpmIhJueJ20RCg4MGYZWgI9\r\nNJ1KgH2h4djX4XuoXpcsKnX3oVfinHEMke8sLWXHsMAtOxDipEWgW9cE9hk0\r\n71Y6LAAPBu34pmaj73B0qZiIY7wXxoGWQOCl2STS/VyDG/K9w1T+WiYROu+8\r\nE9Gd+f4qXmdi7Jw6May86DDfauCwBP3gnrB5aeOktCjWsgrrdClN3Hv2pIAN\r\noJcjS3IURf6oeV4+Yw1B5GoJu1Y/6U75fOU=\r\n=IMnM\r\n-----END PGP SIGNATURE-----\r\n"
|
||||
},
|
||||
"main": "index.cjs",
|
||||
"type": "module",
|
||||
"types": "./index.d.ts",
|
||||
"module": "index.js",
|
||||
"browser": {
|
||||
"./index.js": "./index.browser.js",
|
||||
"./index.cjs": "./index.browser.cjs",
|
||||
"./async/index.js": "./async/index.browser.js",
|
||||
"./async/index.cjs": "./async/index.browser.cjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./index.d.ts",
|
||||
"import": "./index.js",
|
||||
"browser": "./index.browser.js",
|
||||
"default": "./index.js",
|
||||
"require": "./index.cjs"
|
||||
},
|
||||
"./async": {
|
||||
"import": "./async/index.js",
|
||||
"browser": "./async/index.browser.js",
|
||||
"default": "./async/index.js",
|
||||
"require": "./async/index.cjs"
|
||||
},
|
||||
"./index.d.ts": "./index.d.ts",
|
||||
"./non-secure": {
|
||||
"import": "./non-secure/index.js",
|
||||
"default": "./non-secure/index.js",
|
||||
"require": "./non-secure/index.cjs"
|
||||
},
|
||||
"./package.json": "./package.json",
|
||||
"./url-alphabet": {
|
||||
"import": "./url-alphabet/index.js",
|
||||
"default": "./url-alphabet/index.js",
|
||||
"require": "./url-alphabet/index.cjs"
|
||||
},
|
||||
"./async/package.json": "./async/package.json",
|
||||
"./non-secure/package.json": "./non-secure/package.json",
|
||||
"./url-alphabet/package.json": "./url-alphabet/package.json"
|
||||
},
|
||||
"gitHead": "fc5bd0dbba830b1e6f3e572da8e2bc9ddc1b4b44",
|
||||
"_npmUser": {
|
||||
"name": "ai",
|
||||
"email": "andrey@sitnik.ru"
|
||||
},
|
||||
"repository": {
|
||||
"url": "git+https://github.com/ai/nanoid.git",
|
||||
"type": "git"
|
||||
},
|
||||
"_npmVersion": "8.6.0",
|
||||
"description": "A tiny (116 bytes), secure URL-friendly unique string ID generator",
|
||||
"directories": {},
|
||||
"sideEffects": false,
|
||||
"_nodeVersion": "18.0.0",
|
||||
"react-native": "index.js",
|
||||
"_hasShrinkwrap": false,
|
||||
"_npmOperationalInternal": {
|
||||
"tmp": "tmp/nanoid_3.3.4_1651575437375_0.2288595018362154",
|
||||
"host": "s3://npm-registry-packages"
|
||||
}
|
||||
}
|
||||
@ -30,7 +30,8 @@ func NewPackageCataloger(cfg CatalogerConfig) pkg.Cataloger {
|
||||
WithParserByGlobs(parsePoetryLock, "**/poetry.lock").
|
||||
WithParserByGlobs(parsePipfileLock, "**/Pipfile.lock").
|
||||
WithParserByGlobs(parseSetup, "**/setup.py").
|
||||
WithParserByGlobs(parseUvLock, "**/uv.lock")
|
||||
WithParserByGlobs(parseUvLock, "**/uv.lock").
|
||||
WithParserByGlobs(parsePdmLock, "**/pdm.lock")
|
||||
}
|
||||
|
||||
// NewInstalledPackageCataloger returns a new cataloger for python packages within egg or wheel installation directories.
|
||||
|
||||
@ -454,6 +454,7 @@ func Test_IndexCataloger_Globs(t *testing.T) {
|
||||
"src/poetry.lock",
|
||||
"src/Pipfile.lock",
|
||||
"src/uv.lock",
|
||||
"src/pdm.lock",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
140
syft/pkg/cataloger/python/parse_pdm_lock.go
Normal file
140
syft/pkg/cataloger/python/parse_pdm_lock.go
Normal file
@ -0,0 +1,140 @@
|
||||
package python
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/scylladb/go-set/strset"
|
||||
|
||||
"github.com/anchore/syft/internal/unknown"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
type pdmLock struct {
|
||||
Metadata struct {
|
||||
Groups []string `toml:"groups"`
|
||||
Strategy []string `toml:"strategy"`
|
||||
LockVersion string `toml:"lock_version"`
|
||||
ContentHash string `toml:"content_hash"`
|
||||
} `toml:"metadata"`
|
||||
Package []pdmLockPackage `toml:"package"`
|
||||
}
|
||||
|
||||
type pdmLockPackage struct {
|
||||
Name string `toml:"name"`
|
||||
Version string `toml:"version"`
|
||||
RequiresPython string `toml:"requires_python"`
|
||||
Summary string `toml:"summary"`
|
||||
Dependencies []string `toml:"dependencies"`
|
||||
Files []pdmLockPackageFile `toml:"files"`
|
||||
}
|
||||
|
||||
type pdmLockPackageFile struct {
|
||||
File string `toml:"file"`
|
||||
Hash string `toml:"hash"`
|
||||
}
|
||||
|
||||
var _ generic.Parser = parsePdmLock
|
||||
|
||||
// parsePdmLock is a parser function for pdm.lock contents, returning python packages discovered.
|
||||
func parsePdmLock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
var lock pdmLock
|
||||
_, err := toml.NewDecoder(reader).Decode(&lock)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to parse pdm.lock file: %w", err)
|
||||
}
|
||||
|
||||
var pkgs []pkg.Package
|
||||
for _, p := range lock.Package {
|
||||
var files []pkg.PythonFileRecord
|
||||
for _, f := range p.Files {
|
||||
if colonIndex := strings.Index(f.Hash, ":"); colonIndex != -1 {
|
||||
algorithm := f.Hash[:colonIndex]
|
||||
value := f.Hash[colonIndex+1:]
|
||||
|
||||
files = append(files, pkg.PythonFileRecord{
|
||||
Path: f.File,
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: algorithm,
|
||||
Value: value,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// only store used part of the dependency information
|
||||
var deps []string
|
||||
for _, dep := range p.Dependencies {
|
||||
// remove environment markers (after semicolon)
|
||||
dep = strings.Split(dep, ";")[0]
|
||||
dep = strings.TrimSpace(dep)
|
||||
if dep != "" {
|
||||
deps = append(deps, dep)
|
||||
}
|
||||
}
|
||||
|
||||
pythonPkgMetadata := pkg.PythonPdmLockEntry{
|
||||
Files: files,
|
||||
Summary: p.Summary,
|
||||
Dependencies: deps,
|
||||
}
|
||||
|
||||
pkgs = append(pkgs, newPackageForIndexWithMetadata(
|
||||
p.Name,
|
||||
p.Version,
|
||||
pythonPkgMetadata,
|
||||
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
|
||||
))
|
||||
}
|
||||
|
||||
relationships := buildPdmRelationships(pkgs)
|
||||
|
||||
return pkgs, relationships, unknown.IfEmptyf(pkgs, "unable to determine packages")
|
||||
}
|
||||
|
||||
func buildPdmRelationships(pkgs []pkg.Package) []artifact.Relationship {
|
||||
pkgMap := make(map[string]pkg.Package, len(pkgs))
|
||||
for _, p := range pkgs {
|
||||
pkgMap[p.Name] = p
|
||||
}
|
||||
|
||||
var relationships []artifact.Relationship
|
||||
for _, p := range pkgs {
|
||||
meta, ok := p.Metadata.(pkg.PythonPdmLockEntry)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// collect unique dependencies
|
||||
added := strset.New()
|
||||
|
||||
for _, depName := range meta.Dependencies {
|
||||
// Handle version specifiers
|
||||
depName = strings.Split(depName, "<")[0]
|
||||
depName = strings.Split(depName, ">")[0]
|
||||
depName = strings.Split(depName, "=")[0]
|
||||
depName = strings.Split(depName, "~")[0]
|
||||
depName = strings.TrimSpace(depName)
|
||||
|
||||
if depName == "" || added.Has(depName) {
|
||||
continue
|
||||
}
|
||||
added.Add(depName)
|
||||
|
||||
if dep, exists := pkgMap[depName]; exists {
|
||||
relationships = append(relationships, artifact.Relationship{
|
||||
From: dep,
|
||||
To: p,
|
||||
Type: artifact.DependencyOfRelationship,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return relationships
|
||||
}
|
||||
363
syft/pkg/cataloger/python/parse_pdm_lock_test.go
Normal file
363
syft/pkg/cataloger/python/parse_pdm_lock_test.go
Normal file
@ -0,0 +1,363 @@
|
||||
package python
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
|
||||
)
|
||||
|
||||
func TestParsePdmLock(t *testing.T) {
|
||||
|
||||
fixture := "test-fixtures/pdm-lock/pdm.lock"
|
||||
locations := file.NewLocationSet(file.NewLocation(fixture))
|
||||
expectedPkgs := []pkg.Package{
|
||||
{
|
||||
Name: "certifi",
|
||||
Version: "2025.1.31",
|
||||
PURL: "pkg:pypi/certifi@2025.1.31",
|
||||
Locations: locations,
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
Metadata: pkg.PythonPdmLockEntry{
|
||||
Summary: "Python package for providing Mozilla's CA Bundle.",
|
||||
Files: []pkg.PythonFileRecord{
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651",
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "chardet",
|
||||
Version: "3.0.4",
|
||||
PURL: "pkg:pypi/chardet@3.0.4",
|
||||
Locations: locations,
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
Metadata: pkg.PythonPdmLockEntry{
|
||||
Summary: "Universal encoding detector for Python 2 and 3",
|
||||
Files: []pkg.PythonFileRecord{
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691",
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "charset-normalizer",
|
||||
Version: "2.0.12",
|
||||
PURL: "pkg:pypi/charset-normalizer@2.0.12",
|
||||
Locations: locations,
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
Metadata: pkg.PythonPdmLockEntry{
|
||||
Summary: "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.",
|
||||
Files: []pkg.PythonFileRecord{
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df",
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "colorama",
|
||||
Version: "0.3.9",
|
||||
PURL: "pkg:pypi/colorama@0.3.9",
|
||||
Locations: locations,
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
Metadata: pkg.PythonPdmLockEntry{
|
||||
Summary: "Cross-platform colored terminal text.",
|
||||
Files: []pkg.PythonFileRecord{
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda",
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "idna",
|
||||
Version: "2.7",
|
||||
PURL: "pkg:pypi/idna@2.7",
|
||||
Locations: locations,
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
Metadata: pkg.PythonPdmLockEntry{
|
||||
Summary: "Internationalized Domain Names in Applications (IDNA)",
|
||||
Files: []pkg.PythonFileRecord{
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e",
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "py",
|
||||
Version: "1.4.34",
|
||||
PURL: "pkg:pypi/py@1.4.34",
|
||||
Locations: locations,
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
Metadata: pkg.PythonPdmLockEntry{
|
||||
Summary: "library with cross-python path, ini-parsing, io, code, log facilities",
|
||||
Files: []pkg.PythonFileRecord{
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "2ccb79b01769d99115aa600d7eed99f524bf752bba8f041dc1c184853514655a",
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "0f2d585d22050e90c7d293b6451c83db097df77871974d90efd5a30dc12fcde3",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "pytest",
|
||||
Version: "3.2.5",
|
||||
PURL: "pkg:pypi/pytest@3.2.5",
|
||||
Locations: locations,
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
Metadata: pkg.PythonPdmLockEntry{
|
||||
Summary: "pytest: simple powerful testing with Python",
|
||||
Files: []pkg.PythonFileRecord{
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "6d5bd4f7113b444c55a3bbb5c738a3dd80d43563d063fc42dcb0aaefbdd78b81",
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "241d7e7798d79192a123ceaf64c602b4d233eacf6d6e42ae27caa97f498b7dc6",
|
||||
},
|
||||
},
|
||||
},
|
||||
Dependencies: []string{
|
||||
"argparse",
|
||||
"colorama",
|
||||
"ordereddict",
|
||||
"py>=1.4.33",
|
||||
"setuptools",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "requests",
|
||||
Version: "2.27.1",
|
||||
PURL: "pkg:pypi/requests@2.27.1",
|
||||
Locations: locations,
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
Metadata: pkg.PythonPdmLockEntry{
|
||||
Summary: "Python HTTP for Humans.",
|
||||
Files: []pkg.PythonFileRecord{
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d",
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61",
|
||||
},
|
||||
},
|
||||
},
|
||||
Dependencies: []string{
|
||||
"certifi>=2017.4.17",
|
||||
"chardet<5,>=3.0.2",
|
||||
"charset-normalizer~=2.0.0",
|
||||
"idna<3,>=2.5",
|
||||
"idna<4,>=2.5",
|
||||
"urllib3<1.27,>=1.21.1",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "setuptools",
|
||||
Version: "39.2.0",
|
||||
PURL: "pkg:pypi/setuptools@39.2.0",
|
||||
Locations: locations,
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
Metadata: pkg.PythonPdmLockEntry{
|
||||
Summary: "Easily download, build, install, upgrade, and uninstall Python packages",
|
||||
Files: []pkg.PythonFileRecord{
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2",
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "urllib3",
|
||||
Version: "1.26.20",
|
||||
PURL: "pkg:pypi/urllib3@1.26.20",
|
||||
Locations: locations,
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
Metadata: pkg.PythonPdmLockEntry{
|
||||
Summary: "HTTP library with thread-safe connection pooling, file post, and more.",
|
||||
Files: []pkg.PythonFileRecord{
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e",
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "",
|
||||
Digest: &pkg.PythonFileDigest{
|
||||
Algorithm: "sha256",
|
||||
Value: "40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Create a map for easy lookup of packages by name
|
||||
pkgMap := make(map[string]pkg.Package)
|
||||
for _, p := range expectedPkgs {
|
||||
pkgMap[p.Name] = p
|
||||
}
|
||||
|
||||
expectedRelationships := []artifact.Relationship{
|
||||
// pytest dependencies
|
||||
{
|
||||
From: pkgMap["colorama"],
|
||||
To: pkgMap["pytest"],
|
||||
Type: artifact.DependencyOfRelationship,
|
||||
},
|
||||
{
|
||||
From: pkgMap["py"],
|
||||
To: pkgMap["pytest"],
|
||||
Type: artifact.DependencyOfRelationship,
|
||||
},
|
||||
{
|
||||
From: pkgMap["setuptools"],
|
||||
To: pkgMap["pytest"],
|
||||
Type: artifact.DependencyOfRelationship,
|
||||
},
|
||||
// requests dependencies
|
||||
{
|
||||
From: pkgMap["certifi"],
|
||||
To: pkgMap["requests"],
|
||||
Type: artifact.DependencyOfRelationship,
|
||||
},
|
||||
{
|
||||
From: pkgMap["chardet"],
|
||||
To: pkgMap["requests"],
|
||||
Type: artifact.DependencyOfRelationship,
|
||||
},
|
||||
{
|
||||
From: pkgMap["charset-normalizer"],
|
||||
To: pkgMap["requests"],
|
||||
Type: artifact.DependencyOfRelationship,
|
||||
},
|
||||
{
|
||||
From: pkgMap["urllib3"],
|
||||
To: pkgMap["requests"],
|
||||
Type: artifact.DependencyOfRelationship,
|
||||
},
|
||||
{
|
||||
From: pkgMap["idna"],
|
||||
To: pkgMap["requests"],
|
||||
Type: artifact.DependencyOfRelationship,
|
||||
},
|
||||
}
|
||||
|
||||
pkgtest.TestFileParser(t, fixture, parsePdmLock, expectedPkgs, expectedRelationships)
|
||||
}
|
||||
|
||||
func Test_corruptPdmLock(t *testing.T) {
|
||||
pkgtest.NewCatalogTester().
|
||||
FromFile(t, "test-fixtures/glob-paths/src/pdm.lock").
|
||||
WithError().
|
||||
TestParser(t, parsePdmLock)
|
||||
}
|
||||
@ -22,6 +22,7 @@ var _ generic.Parser = parseSetup
|
||||
// "mypy==v0.770", --> match(name=mypy version=v0.770)
|
||||
// " mypy2 == v0.770", ' mypy3== v0.770', --> match(name=mypy2 version=v0.770), match(name=mypy3, version=v0.770)
|
||||
var pinnedDependency = regexp.MustCompile(`['"]\W?(\w+\W?==\W?[\w.]*)`)
|
||||
var unquotedPinnedDependency = regexp.MustCompile(`^\s*(\w+)\s*==\s*([\w\.\-]+)`)
|
||||
|
||||
func parseSetup(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
var packages []pkg.Package
|
||||
@ -32,42 +33,89 @@ func parseSetup(_ context.Context, _ file.Resolver, _ *generic.Environment, read
|
||||
line := scanner.Text()
|
||||
line = strings.TrimRight(line, "\n")
|
||||
|
||||
for _, match := range pinnedDependency.FindAllString(line, -1) {
|
||||
parts := strings.Split(match, "==")
|
||||
if len(parts) != 2 {
|
||||
continue
|
||||
}
|
||||
name := strings.Trim(parts[0], "'\"")
|
||||
name = strings.TrimSpace(name)
|
||||
name = strings.Trim(name, "'\"")
|
||||
|
||||
version := strings.TrimSpace(parts[len(parts)-1])
|
||||
version = strings.Trim(version, "'\"")
|
||||
|
||||
if hasTemplateDirective(name) || hasTemplateDirective(version) {
|
||||
// this can happen in more dynamic setup.py where there is templating
|
||||
continue
|
||||
}
|
||||
|
||||
if name == "" || version == "" {
|
||||
log.WithFields("path", reader.RealPath).Debugf("unable to parse package in setup.py line: %q", line)
|
||||
continue
|
||||
}
|
||||
|
||||
packages = append(
|
||||
packages,
|
||||
newPackageForIndex(
|
||||
name,
|
||||
version,
|
||||
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
|
||||
),
|
||||
)
|
||||
}
|
||||
packages = processQuotedDependencies(line, reader, packages)
|
||||
packages = processUnquotedDependency(line, reader, packages)
|
||||
}
|
||||
|
||||
return packages, nil, nil
|
||||
}
|
||||
|
||||
func processQuotedDependencies(line string, reader file.LocationReadCloser, packages []pkg.Package) []pkg.Package {
|
||||
for _, match := range pinnedDependency.FindAllString(line, -1) {
|
||||
if p, ok := parseQuotedDependency(match, line, reader); ok {
|
||||
packages = append(packages, p)
|
||||
}
|
||||
}
|
||||
return packages
|
||||
}
|
||||
|
||||
func parseQuotedDependency(match, line string, reader file.LocationReadCloser) (pkg.Package, bool) {
|
||||
parts := strings.Split(match, "==")
|
||||
if len(parts) != 2 {
|
||||
return pkg.Package{}, false
|
||||
}
|
||||
|
||||
name := cleanDependencyString(parts[0])
|
||||
version := cleanDependencyString(parts[len(parts)-1])
|
||||
|
||||
return validateAndCreatePackage(name, version, line, reader)
|
||||
}
|
||||
|
||||
// processUnquotedDependency extracts and processes an unquoted dependency from a line
|
||||
func processUnquotedDependency(line string, reader file.LocationReadCloser, packages []pkg.Package) []pkg.Package {
|
||||
matches := unquotedPinnedDependency.FindStringSubmatch(line)
|
||||
if len(matches) != 3 {
|
||||
return packages
|
||||
}
|
||||
|
||||
name := strings.TrimSpace(matches[1])
|
||||
version := strings.TrimSpace(matches[2])
|
||||
|
||||
if p, ok := validateAndCreatePackage(name, version, line, reader); ok {
|
||||
if !isDuplicatePackage(p, packages) {
|
||||
packages = append(packages, p)
|
||||
}
|
||||
}
|
||||
|
||||
return packages
|
||||
}
|
||||
|
||||
func cleanDependencyString(s string) string {
|
||||
s = strings.Trim(s, "'\"")
|
||||
s = strings.TrimSpace(s)
|
||||
s = strings.Trim(s, "'\"")
|
||||
return s
|
||||
}
|
||||
|
||||
func validateAndCreatePackage(name, version, line string, reader file.LocationReadCloser) (pkg.Package, bool) {
|
||||
if hasTemplateDirective(name) || hasTemplateDirective(version) {
|
||||
// this can happen in more dynamic setup.py where there is templating
|
||||
return pkg.Package{}, false
|
||||
}
|
||||
|
||||
if name == "" || version == "" {
|
||||
log.WithFields("path", reader.RealPath).Debugf("unable to parse package in setup.py line: %q", line)
|
||||
return pkg.Package{}, false
|
||||
}
|
||||
|
||||
p := newPackageForIndex(
|
||||
name,
|
||||
version,
|
||||
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
|
||||
)
|
||||
|
||||
return p, true
|
||||
}
|
||||
|
||||
func isDuplicatePackage(p pkg.Package, packages []pkg.Package) bool {
|
||||
for _, existing := range packages {
|
||||
if existing.Name == p.Name && existing.Version == p.Version {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func hasTemplateDirective(s string) bool {
|
||||
return strings.Contains(s, `%s`) || strings.Contains(s, `{`) || strings.Contains(s, `}`)
|
||||
}
|
||||
|
||||
@ -61,6 +61,94 @@ func TestParseSetup(t *testing.T) {
|
||||
fixture: "test-fixtures/setup/dynamic-setup.py",
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
fixture: "test-fixtures/setup/multiline-split-setup.py",
|
||||
expected: []pkg.Package{
|
||||
{
|
||||
Name: "black",
|
||||
Version: "23.12.1",
|
||||
PURL: "pkg:pypi/black@23.12.1",
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
},
|
||||
{
|
||||
Name: "cairosvg",
|
||||
Version: "2.7.1",
|
||||
PURL: "pkg:pypi/cairosvg@2.7.1",
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
},
|
||||
{
|
||||
Name: "celery",
|
||||
Version: "5.3.4",
|
||||
PURL: "pkg:pypi/celery@5.3.4",
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
},
|
||||
{
|
||||
Name: "django",
|
||||
Version: "4.2.23",
|
||||
PURL: "pkg:pypi/django@4.2.23",
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
},
|
||||
{
|
||||
Name: "mypy",
|
||||
Version: "1.7.1",
|
||||
PURL: "pkg:pypi/mypy@1.7.1",
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
},
|
||||
{
|
||||
Name: "pillow",
|
||||
Version: "11.0.0",
|
||||
PURL: "pkg:pypi/pillow@11.0.0",
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
},
|
||||
{
|
||||
Name: "pytest",
|
||||
Version: "7.4.3",
|
||||
PURL: "pkg:pypi/pytest@7.4.3",
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
},
|
||||
{
|
||||
Name: "requests",
|
||||
Version: "2.31.0",
|
||||
PURL: "pkg:pypi/requests@2.31.0",
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
// Test mixed quoted and unquoted dependencies - ensure no duplicates
|
||||
fixture: "test-fixtures/setup/mixed-format-setup.py",
|
||||
expected: []pkg.Package{
|
||||
{
|
||||
Name: "requests",
|
||||
Version: "2.31.0",
|
||||
PURL: "pkg:pypi/requests@2.31.0",
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
},
|
||||
{
|
||||
Name: "django",
|
||||
Version: "4.2.23",
|
||||
PURL: "pkg:pypi/django@4.2.23",
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
},
|
||||
{
|
||||
Name: "flask",
|
||||
Version: "3.0.0",
|
||||
PURL: "pkg:pypi/flask@3.0.0",
|
||||
Language: pkg.Python,
|
||||
Type: pkg.PythonPkg,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
1
syft/pkg/cataloger/python/test-fixtures/glob-paths/src/pdm.lock
generated
Normal file
1
syft/pkg/cataloger/python/test-fixtures/glob-paths/src/pdm.lock
generated
Normal file
@ -0,0 +1 @@
|
||||
bogus
|
||||
137
syft/pkg/cataloger/python/test-fixtures/pdm-lock/pdm.lock
generated
Normal file
137
syft/pkg/cataloger/python/test-fixtures/pdm-lock/pdm.lock
generated
Normal file
@ -0,0 +1,137 @@
|
||||
# This file is @generated by PDM.
|
||||
# It is not intended for manual editing.
|
||||
|
||||
[metadata]
|
||||
groups = ["default", "security", "tests"]
|
||||
strategy = ["inherit_metadata", "static_urls"]
|
||||
lock_version = "4.5.0"
|
||||
content_hash = "sha256:2584886ac58a0ae70aa36bc0318b62c3e2c89acc9c21ebb9aee74147c0a9dc06"
|
||||
|
||||
[[metadata.targets]]
|
||||
requires_python = ">=3.3"
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.1.31"
|
||||
requires_python = ">=3.6"
|
||||
summary = "Python package for providing Mozilla's CA Bundle."
|
||||
groups = ["security"]
|
||||
marker = "python_version >= \"3.6\""
|
||||
files = [
|
||||
{url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"},
|
||||
{url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chardet"
|
||||
version = "3.0.4"
|
||||
summary = "Universal encoding detector for Python 2 and 3"
|
||||
groups = ["default"]
|
||||
marker = "os_name == \"nt\""
|
||||
files = [
|
||||
{url = "https://files.pythonhosted.org/packages/bc/a9/01ffebfb562e4274b6487b4bb1ddec7ca55ec7510b22e4c51f14098443b8/chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"},
|
||||
{url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "2.0.12"
|
||||
requires_python = ">=3.5.0"
|
||||
summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
groups = ["security"]
|
||||
marker = "python_version >= \"3.6\""
|
||||
files = [
|
||||
{url = "https://files.pythonhosted.org/packages/06/b3/24afc8868eba069a7f03650ac750a778862dc34941a4bebeb58706715726/charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
|
||||
{url = "https://files.pythonhosted.org/packages/56/31/7bcaf657fafb3c6db8c787a865434290b726653c912085fbd371e9b92e1c/charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.3.9"
|
||||
summary = "Cross-platform colored terminal text."
|
||||
groups = ["tests"]
|
||||
marker = "sys_platform == \"win32\""
|
||||
files = [
|
||||
{url = "https://files.pythonhosted.org/packages/db/c8/7dcf9dbcb22429512708fe3a547f8b6101c0d02137acbd892505aee57adf/colorama-0.3.9-py2.py3-none-any.whl", hash = "sha256:463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda"},
|
||||
{url = "https://files.pythonhosted.org/packages/e6/76/257b53926889e2835355d74fec73d82662100135293e17d382e2b74d1669/colorama-0.3.9.tar.gz", hash = "sha256:48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "2.7"
|
||||
summary = "Internationalized Domain Names in Applications (IDNA)"
|
||||
groups = ["default", "security"]
|
||||
files = [
|
||||
{url = "https://files.pythonhosted.org/packages/4b/2a/0276479a4b3caeb8a8c1af2f8e4355746a97fab05a372e4a2c6a6b876165/idna-2.7-py2.py3-none-any.whl", hash = "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e"},
|
||||
{url = "https://files.pythonhosted.org/packages/65/c4/80f97e9c9628f3cac9b98bfca0402ede54e0563b56482e3e6e45c43c4935/idna-2.7.tar.gz", hash = "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "py"
|
||||
version = "1.4.34"
|
||||
summary = "library with cross-python path, ini-parsing, io, code, log facilities"
|
||||
groups = ["tests"]
|
||||
files = [
|
||||
{url = "https://files.pythonhosted.org/packages/53/67/9620edf7803ab867b175e4fd23c7b8bd8eba11cb761514dcd2e726ef07da/py-1.4.34-py2.py3-none-any.whl", hash = "sha256:2ccb79b01769d99115aa600d7eed99f524bf752bba8f041dc1c184853514655a"},
|
||||
{url = "https://files.pythonhosted.org/packages/68/35/58572278f1c097b403879c1e9369069633d1cbad5239b9057944bb764782/py-1.4.34.tar.gz", hash = "sha256:0f2d585d22050e90c7d293b6451c83db097df77871974d90efd5a30dc12fcde3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "3.2.5"
|
||||
summary = "pytest: simple powerful testing with Python"
|
||||
groups = ["tests"]
|
||||
dependencies = [
|
||||
"argparse; python_version == \"2.6\"",
|
||||
"colorama; sys_platform == \"win32\"",
|
||||
"ordereddict; python_version == \"2.6\"",
|
||||
"py>=1.4.33",
|
||||
"setuptools",
|
||||
]
|
||||
files = [
|
||||
{url = "https://files.pythonhosted.org/packages/1f/f8/8cd74c16952163ce0db0bd95fdd8810cbf093c08be00e6e665ebf0dc3138/pytest-3.2.5.tar.gz", hash = "sha256:6d5bd4f7113b444c55a3bbb5c738a3dd80d43563d063fc42dcb0aaefbdd78b81"},
|
||||
{url = "https://files.pythonhosted.org/packages/ef/41/d8a61f1b2ba308e96b36106e95024977e30129355fd12087f23e4b9852a1/pytest-3.2.5-py2.py3-none-any.whl", hash = "sha256:241d7e7798d79192a123ceaf64c602b4d233eacf6d6e42ae27caa97f498b7dc6"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.27.1"
|
||||
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
summary = "Python HTTP for Humans."
|
||||
groups = ["security"]
|
||||
marker = "python_version >= \"3.6\""
|
||||
dependencies = [
|
||||
"certifi>=2017.4.17",
|
||||
"chardet<5,>=3.0.2; python_version < \"3\"",
|
||||
"charset-normalizer~=2.0.0; python_version >= \"3\"",
|
||||
"idna<3,>=2.5; python_version < \"3\"",
|
||||
"idna<4,>=2.5; python_version >= \"3\"",
|
||||
"urllib3<1.27,>=1.21.1",
|
||||
]
|
||||
files = [
|
||||
{url = "https://files.pythonhosted.org/packages/2d/61/08076519c80041bc0ffa1a8af0cbd3bf3e2b62af10435d269a9d0f40564d/requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
|
||||
{url = "https://files.pythonhosted.org/packages/60/f3/26ff3767f099b73e0efa138a9998da67890793bfa475d8278f84a30fec77/requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "39.2.0"
|
||||
requires_python = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*"
|
||||
summary = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||
groups = ["tests"]
|
||||
files = [
|
||||
{url = "https://files.pythonhosted.org/packages/1a/04/d6f1159feaccdfc508517dba1929eb93a2854de729fa68da9d5c6b48fa00/setuptools-39.2.0.zip", hash = "sha256:f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2"},
|
||||
{url = "https://files.pythonhosted.org/packages/7f/e1/820d941153923aac1d49d7fc37e17b6e73bfbd2904959fffbad77900cf92/setuptools-39.2.0-py2.py3-none-any.whl", hash = "sha256:8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.20"
|
||||
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||
summary = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
groups = ["security"]
|
||||
marker = "python_version >= \"3.6\""
|
||||
files = [
|
||||
{url = "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"},
|
||||
{url = "https://files.pythonhosted.org/packages/e4/e8/6ff5e6bc22095cfc59b6ea711b687e2b7ed4bdb373f7eeec370a97d7392f/urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"},
|
||||
]
|
||||
@ -0,0 +1,17 @@
|
||||
from setuptools import setup
|
||||
|
||||
# Test case to ensure duplicate detection works correctly
|
||||
# when same dependencies appear in both quoted and unquoted forms
|
||||
|
||||
setup(
|
||||
name='mixed-format-project',
|
||||
version='1.0.0',
|
||||
install_requires=[
|
||||
# Quoted dependencies (should be caught by pinnedDependency regex)
|
||||
"requests==2.31.0",
|
||||
"django==4.2.23",
|
||||
] + """
|
||||
requests==2.31.0
|
||||
flask==3.0.0
|
||||
""".split(),
|
||||
)
|
||||
@ -0,0 +1,23 @@
|
||||
from setuptools import setup
|
||||
|
||||
# Example setup.py using multiline string with .split() pattern
|
||||
# This pattern is commonly seen in projects like mayan-edms
|
||||
|
||||
setup(
|
||||
name='example-project',
|
||||
version='1.0.0',
|
||||
install_requires="""
|
||||
django==4.2.23
|
||||
CairoSVG==2.7.1
|
||||
Pillow==11.0.0
|
||||
requests==2.31.0
|
||||
celery==5.3.4
|
||||
""".split(),
|
||||
extras_require={
|
||||
'dev': """
|
||||
pytest==7.4.3
|
||||
black==23.12.1
|
||||
mypy==1.7.1
|
||||
""".split(),
|
||||
},
|
||||
)
|
||||
@ -13,7 +13,7 @@ import (
|
||||
func Test_noSQLiteDriverError(t *testing.T) {
|
||||
// this test package does must not import the sqlite library
|
||||
file := "../test-fixtures/Packages"
|
||||
resolver, err := fileresolver.NewFromFile(file, file)
|
||||
resolver, err := fileresolver.NewFromFile(file)
|
||||
require.NoError(t, err)
|
||||
|
||||
cataloger := redhat.NewDBCataloger()
|
||||
|
||||
@ -79,6 +79,16 @@ func (m PythonPackage) OwnedFiles() (result []string) {
|
||||
return result
|
||||
}
|
||||
|
||||
// PythonPdmLockEntry represents a single package entry within a pdm.lock file.
|
||||
type PythonPdmLockEntry struct {
|
||||
// Summary provides a description of the package
|
||||
Summary string `mapstructure:"summary" json:"summary" toml:"summary"`
|
||||
// Files are the package files with their paths and hash digests
|
||||
Files []PythonFileRecord `mapstructure:"files" json:"files" toml:"files"`
|
||||
// Dependencies are the dependency specifications, without environment qualifiers
|
||||
Dependencies []string `mapstructure:"dependencies" json:"dependencies" toml:"dependencies"`
|
||||
}
|
||||
|
||||
// PythonPipfileLockEntry represents a single package entry within a Pipfile.lock file.
|
||||
type PythonPipfileLockEntry struct {
|
||||
// Hashes are the package file hash values in the format "algorithm:digest" for integrity verification.
|
||||
|
||||
@ -18,6 +18,9 @@ const (
|
||||
// PURLQualifierUpstream this qualifier is not in the pURL spec, but is used by grype to perform indirect matching based on source information
|
||||
PURLQualifierUpstream = "upstream"
|
||||
|
||||
// PURLQualifierRpmModularity this qualifier is not in the pURL spec, but is used to specify RPM modularity information
|
||||
PURLQualifierRpmModularity = "rpmmod"
|
||||
|
||||
purlCargoPkgType = "cargo"
|
||||
purlGradlePkgType = "gradle"
|
||||
)
|
||||
|
||||
@ -30,24 +30,21 @@ type Config struct {
|
||||
type directorySource struct {
|
||||
id artifact.ID
|
||||
config Config
|
||||
resolver *fileresolver.Directory
|
||||
resolver file.Resolver
|
||||
mutex *sync.Mutex
|
||||
}
|
||||
|
||||
func NewFromPath(path string) (source.Source, error) {
|
||||
cfg := Config{
|
||||
Path: path,
|
||||
}
|
||||
return New(cfg)
|
||||
return New(Config{Path: path})
|
||||
}
|
||||
|
||||
func New(cfg Config) (source.Source, error) {
|
||||
fi, err := os.Stat(cfg.Path)
|
||||
fileMeta, err := os.Stat(cfg.Path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to stat path=%q: %w", cfg.Path, err)
|
||||
}
|
||||
|
||||
if !fi.IsDir() {
|
||||
if !fileMeta.IsDir() {
|
||||
return nil, fmt.Errorf("given path is not a directory: %q", cfg.Path)
|
||||
}
|
||||
|
||||
@ -58,53 +55,6 @@ func New(cfg Config) (source.Source, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// deriveIDFromDirectory generates an artifact ID from the given directory config. If an alias is provided, then
|
||||
// the artifact ID is derived exclusively from the alias name and version. Otherwise, the artifact ID is derived
|
||||
// from the path provided with an attempt to prune a prefix if a base is given. Since the contents of the directory
|
||||
// are not considered, there is no semantic meaning to the artifact ID -- this is why the alias is preferred without
|
||||
// consideration for the path.
|
||||
func deriveIDFromDirectory(cfg Config) artifact.ID {
|
||||
var info string
|
||||
if !cfg.Alias.IsEmpty() {
|
||||
// don't use any of the path information -- instead use the alias name and version as the artifact ID.
|
||||
// why? this allows the user to set a dependable stable value for the artifact ID in case the
|
||||
// scanning root changes (e.g. a user scans a directory, then moves it to a new location and scans again).
|
||||
info = fmt.Sprintf("%s@%s", cfg.Alias.Name, cfg.Alias.Version)
|
||||
} else {
|
||||
log.Warn("no explicit name and version provided for directory source, deriving artifact ID from the given path (which is not ideal)")
|
||||
info = cleanDirPath(cfg.Path, cfg.Base)
|
||||
}
|
||||
|
||||
return internal.ArtifactIDFromDigest(digest.SHA256.FromString(filepath.Clean(info)).String())
|
||||
}
|
||||
|
||||
func cleanDirPath(path, base string) string {
|
||||
if path == base {
|
||||
return path
|
||||
}
|
||||
|
||||
if base != "" {
|
||||
cleanRoot, rootErr := fileresolver.NormalizeRootDirectory(path)
|
||||
cleanBase, baseErr := fileresolver.NormalizeBaseDirectory(base)
|
||||
|
||||
if rootErr == nil && baseErr == nil {
|
||||
// allows for normalizing inputs:
|
||||
// cleanRoot: /var/folders/8x/gw98pp6535s4r8drc374tb1r0000gn/T/TestDirectoryEncoder1121632790/001/some/path
|
||||
// cleanBase: /var/folders/8x/gw98pp6535s4r8drc374tb1r0000gn/T/TestDirectoryEncoder1121632790/001
|
||||
// normalized: some/path
|
||||
|
||||
relPath, err := filepath.Rel(cleanBase, cleanRoot)
|
||||
if err == nil {
|
||||
path = relPath
|
||||
}
|
||||
// this is odd, but this means we can't use base
|
||||
}
|
||||
// if the base is not a valid chroot, then just use the path as-is
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
func (s directorySource) ID() artifact.ID {
|
||||
return s.id
|
||||
}
|
||||
@ -118,9 +68,11 @@ func (s directorySource) Describe() source.Description {
|
||||
if a.Name != "" {
|
||||
name = a.Name
|
||||
}
|
||||
|
||||
if a.Version != "" {
|
||||
version = a.Version
|
||||
}
|
||||
|
||||
if a.Supplier != "" {
|
||||
supplier = a.Supplier
|
||||
}
|
||||
@ -141,7 +93,10 @@ func (s *directorySource) FileResolver(_ source.Scope) (file.Resolver, error) {
|
||||
s.mutex.Lock()
|
||||
defer s.mutex.Unlock()
|
||||
|
||||
if s.resolver == nil {
|
||||
if s.resolver != nil {
|
||||
return s.resolver, nil
|
||||
}
|
||||
|
||||
exclusionFunctions, err := GetDirectoryExclusionFunctions(s.config.Path, s.config.Exclude.Paths)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -156,14 +111,13 @@ func (s *directorySource) FileResolver(_ source.Scope) (file.Resolver, error) {
|
||||
}
|
||||
|
||||
s.resolver = res
|
||||
}
|
||||
|
||||
return s.resolver, nil
|
||||
}
|
||||
|
||||
func (s *directorySource) Close() error {
|
||||
s.mutex.Lock()
|
||||
defer s.mutex.Unlock()
|
||||
|
||||
s.resolver = nil
|
||||
return nil
|
||||
}
|
||||
@ -221,3 +175,50 @@ func GetDirectoryExclusionFunctions(root string, exclusions []string) ([]fileres
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// deriveIDFromDirectory generates an artifact ID from the given directory config. If an alias is provided, then
|
||||
// the artifact ID is derived exclusively from the alias name and version. Otherwise, the artifact ID is derived
|
||||
// from the path provided with an attempt to prune a prefix if a base is given. Since the contents of the directory
|
||||
// are not considered, there is no semantic meaning to the artifact ID -- this is why the alias is preferred without
|
||||
// consideration for the path.
|
||||
func deriveIDFromDirectory(cfg Config) artifact.ID {
|
||||
var info string
|
||||
if !cfg.Alias.IsEmpty() {
|
||||
// don't use any of the path information -- instead use the alias name and version as the artifact ID.
|
||||
// why? this allows the user to set a dependable stable value for the artifact ID in case the
|
||||
// scanning root changes (e.g. a user scans a directory, then moves it to a new location and scans again).
|
||||
info = fmt.Sprintf("%s@%s", cfg.Alias.Name, cfg.Alias.Version)
|
||||
} else {
|
||||
log.Warn("no explicit name and version provided for directory source, deriving artifact ID from the given path (which is not ideal)")
|
||||
info = cleanDirPath(cfg.Path, cfg.Base)
|
||||
}
|
||||
|
||||
return internal.ArtifactIDFromDigest(digest.SHA256.FromString(filepath.Clean(info)).String())
|
||||
}
|
||||
|
||||
func cleanDirPath(path, base string) string {
|
||||
if path == base {
|
||||
return path
|
||||
}
|
||||
|
||||
if base != "" {
|
||||
cleanRoot, rootErr := fileresolver.NormalizeRootDirectory(path)
|
||||
cleanBase, baseErr := fileresolver.NormalizeBaseDirectory(base)
|
||||
|
||||
if rootErr == nil && baseErr == nil {
|
||||
// allows for normalizing inputs:
|
||||
// cleanRoot: /var/folders/8x/gw98pp6535s4r8drc374tb1r0000gn/T/TestDirectoryEncoder1121632790/001/some/path
|
||||
// cleanBase: /var/folders/8x/gw98pp6535s4r8drc374tb1r0000gn/T/TestDirectoryEncoder1121632790/001
|
||||
// normalized: some/path
|
||||
|
||||
relPath, err := filepath.Rel(cleanBase, cleanRoot)
|
||||
if err == nil {
|
||||
path = relPath
|
||||
}
|
||||
// this is odd, but this means we can't use base
|
||||
}
|
||||
// if the base is not a valid chroot, then just use the path as-is
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
@ -6,7 +6,6 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
|
||||
"github.com/opencontainers/go-digest"
|
||||
@ -50,7 +49,13 @@ func NewFromPath(path string) (source.Source, error) {
|
||||
}
|
||||
|
||||
func New(cfg Config) (source.Source, error) {
|
||||
fileMeta, err := os.Stat(cfg.Path)
|
||||
f, err := os.Open(cfg.Path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to open file=%q: %w", cfg.Path, err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
fileMeta, err := f.Stat()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to stat path=%q: %w", cfg.Path, err)
|
||||
}
|
||||
@ -59,33 +64,19 @@ func New(cfg Config) (source.Source, error) {
|
||||
return nil, fmt.Errorf("given path is a directory: %q", cfg.Path)
|
||||
}
|
||||
|
||||
analysisPath, cleanupFn, err := fileAnalysisPath(cfg.Path, cfg.SkipExtractArchive)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to extract file analysis path=%q: %w", cfg.Path, err)
|
||||
}
|
||||
|
||||
var digests []file.Digest
|
||||
if len(cfg.DigestAlgorithms) > 0 {
|
||||
fh, err := os.Open(cfg.Path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to open file=%q: %w", cfg.Path, err)
|
||||
}
|
||||
|
||||
defer fh.Close()
|
||||
|
||||
digests, err = intFile.NewDigestsFromFile(context.TODO(), fh, cfg.DigestAlgorithms)
|
||||
digests, err = intFile.NewDigestsFromFile(context.TODO(), f, cfg.DigestAlgorithms)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to calculate digests for file=%q: %w", cfg.Path, err)
|
||||
}
|
||||
}
|
||||
|
||||
fh, err := os.Open(cfg.Path)
|
||||
analysisPath, cleanupFn, err := fileAnalysisPath(cfg.Path, cfg.SkipExtractArchive)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to open file=%q: %w", cfg.Path, err)
|
||||
return nil, fmt.Errorf("unable to extract file analysis path=%q: %w", cfg.Path, err)
|
||||
}
|
||||
|
||||
defer fh.Close()
|
||||
|
||||
id, versionDigest := deriveIDFromFile(cfg)
|
||||
|
||||
return &fileSource{
|
||||
@ -96,26 +87,10 @@ func New(cfg Config) (source.Source, error) {
|
||||
analysisPath: analysisPath,
|
||||
digestForVersion: versionDigest,
|
||||
digests: digests,
|
||||
mimeType: stereoFile.MIMEType(fh),
|
||||
mimeType: stereoFile.MIMEType(f),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// deriveIDFromFile derives an artifact ID from the contents of a file. If an alias is provided, it will be included
|
||||
// in the ID derivation (along with contents). This way if the user scans the same item but is considered to be
|
||||
// logically different, then ID will express that.
|
||||
func deriveIDFromFile(cfg Config) (artifact.ID, string) {
|
||||
d := digestOfFileContents(cfg.Path)
|
||||
info := d
|
||||
|
||||
if !cfg.Alias.IsEmpty() {
|
||||
// if the user provided an alias, we want to consider that in the artifact ID. This way if the user
|
||||
// scans the same item but is considered to be logically different, then ID will express that.
|
||||
info += fmt.Sprintf(":%s@%s", cfg.Alias.Name, cfg.Alias.Version)
|
||||
}
|
||||
|
||||
return internal.ArtifactIDFromDigest(digest.SHA256.FromString(info).String()), d
|
||||
}
|
||||
|
||||
func (s fileSource) ID() artifact.ID {
|
||||
return s.id
|
||||
}
|
||||
@ -168,52 +143,56 @@ func (s fileSource) FileResolver(_ source.Scope) (file.Resolver, error) {
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to stat path=%q: %w", s.analysisPath, err)
|
||||
}
|
||||
isArchiveAnalysis := fi.IsDir()
|
||||
|
||||
absParentDir, err := absoluteSymlinkFreePathToParent(s.analysisPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if isArchiveAnalysis {
|
||||
if isArchiveAnalysis := fi.IsDir(); isArchiveAnalysis {
|
||||
// this is an analysis of an archive file... we should scan the directory where the archive contents
|
||||
res, err := fileresolver.NewFromDirectory(s.analysisPath, "", exclusionFunctions...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create directory resolver: %w", err)
|
||||
}
|
||||
|
||||
s.resolver = res
|
||||
return s.resolver, nil
|
||||
}
|
||||
|
||||
// This is analysis of a single file. Use file indexer.
|
||||
res, err := fileresolver.NewFromFile(absParentDir, s.analysisPath, exclusionFunctions...)
|
||||
res, err := fileresolver.NewFromFile(s.analysisPath, exclusionFunctions...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create file resolver: %w", err)
|
||||
}
|
||||
|
||||
s.resolver = res
|
||||
return s.resolver, nil
|
||||
}
|
||||
|
||||
func absoluteSymlinkFreePathToParent(path string) (string, error) {
|
||||
absAnalysisPath, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("unable to get absolute path for analysis path=%q: %w", path, err)
|
||||
}
|
||||
dereferencedAbsAnalysisPath, err := filepath.EvalSymlinks(absAnalysisPath)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("unable to get absolute path for analysis path=%q: %w", path, err)
|
||||
}
|
||||
return filepath.Dir(dereferencedAbsAnalysisPath), nil
|
||||
}
|
||||
|
||||
func (s *fileSource) Close() error {
|
||||
s.mutex.Lock()
|
||||
defer s.mutex.Unlock()
|
||||
|
||||
if s.closer == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
s.resolver = nil
|
||||
return s.closer()
|
||||
}
|
||||
|
||||
// deriveIDFromFile derives an artifact ID from the contents of a file. If an alias is provided, it will be included
|
||||
// in the ID derivation (along with contents). This way if the user scans the same item but is considered to be
|
||||
// logically different, then ID will express that.
|
||||
func deriveIDFromFile(cfg Config) (artifact.ID, string) {
|
||||
d := digestOfFileContents(cfg.Path)
|
||||
info := d
|
||||
|
||||
if !cfg.Alias.IsEmpty() {
|
||||
// if the user provided an alias, we want to consider that in the artifact ID. This way if the user
|
||||
// scans the same item but is considered to be logically different, then ID will express that.
|
||||
info += fmt.Sprintf(":%s@%s", cfg.Alias.Name, cfg.Alias.Version)
|
||||
}
|
||||
|
||||
return internal.ArtifactIDFromDigest(digest.SHA256.FromString(info).String()), d
|
||||
}
|
||||
|
||||
// fileAnalysisPath returns the path given, or in the case the path is an archive, the location where the archive
|
||||
// contents have been made available. A cleanup function is provided for any temp files created (if any).
|
||||
// Users can disable unpacking archives, allowing individual cataloguers to extract them instead (where
|
||||
@ -231,11 +210,14 @@ func fileAnalysisPath(path string, skipExtractArchive bool) (string, func() erro
|
||||
// unarchived.
|
||||
envelopedUnarchiver, err := archiver.ByExtension(path)
|
||||
if unarchiver, ok := envelopedUnarchiver.(archiver.Unarchiver); err == nil && ok {
|
||||
if tar, ok := unarchiver.(*archiver.Tar); ok {
|
||||
// when tar files are extracted, if there are multiple entries at the same
|
||||
// when tar/zip files are extracted, if there are multiple entries at the same
|
||||
// location, the last entry wins
|
||||
// NOTE: this currently does not display any messages if an overwrite happens
|
||||
tar.OverwriteExisting = true
|
||||
switch v := unarchiver.(type) {
|
||||
case *archiver.Tar:
|
||||
v.OverwriteExisting = true
|
||||
case *archiver.Zip:
|
||||
v.OverwriteExisting = true
|
||||
}
|
||||
|
||||
analysisPath, cleanupFn, err = unarchiveToTmp(path, unarchiver)
|
||||
@ -250,15 +232,17 @@ func fileAnalysisPath(path string, skipExtractArchive bool) (string, func() erro
|
||||
}
|
||||
|
||||
func digestOfFileContents(path string) string {
|
||||
file, err := os.Open(path)
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return digest.SHA256.FromString(path).String()
|
||||
}
|
||||
defer file.Close()
|
||||
di, err := digest.SHA256.FromReader(file)
|
||||
defer f.Close()
|
||||
|
||||
di, err := digest.SHA256.FromReader(f)
|
||||
if err != nil {
|
||||
return digest.SHA256.FromString(path).String()
|
||||
}
|
||||
|
||||
return di.String()
|
||||
}
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user