mirror of
https://github.com/anchore/syft.git
synced 2025-11-17 08:23:15 +01:00
Compare commits
64 Commits
5853129c07
...
bfe63bb006
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bfe63bb006 | ||
|
|
1a8562554a | ||
|
|
ec978f01c5 | ||
|
|
80319572cf | ||
|
|
efcfecb2db | ||
|
|
7ed34c81f8 | ||
|
|
2976df5b92 | ||
|
|
9609ce2b36 | ||
|
|
56761cee6f | ||
|
|
b80592f735 | ||
|
|
cdb41b0c76 | ||
|
|
b18f7bb7a8 | ||
|
|
6daea43c32 | ||
|
|
9b31c0480f | ||
|
|
9a2a45f91d | ||
|
|
38c0e6e899 | ||
|
|
64dc451345 | ||
|
|
c689dcfeef | ||
|
|
f664f9eaf2 | ||
|
|
08c0572fb7 | ||
|
|
b702952c8c | ||
|
|
bcd47d109a | ||
|
|
1ad4a2752a | ||
|
|
f92b7d2fc9 | ||
|
|
6ceef5fe4a | ||
|
|
2e100f33f3 | ||
|
|
b444f0c2ed | ||
|
|
102d362daf | ||
|
|
66c78d44af | ||
|
|
78a4ab8ced | ||
|
|
25ca33d20e | ||
|
|
60ca241593 | ||
|
|
0f475c8bcd | ||
|
|
199394934d | ||
|
|
8a22d394ed | ||
|
|
bbef262b8f | ||
|
|
4e06a7ab32 | ||
|
|
e5711e9b42 | ||
|
|
f69b1db099 | ||
|
|
fe1ea443c2 | ||
|
|
bfcbf266df | ||
|
|
a400c675fc | ||
|
|
7c154e7c37 | ||
|
|
4c93394bc2 | ||
|
|
3e4e82f03e | ||
|
|
793b0a346f | ||
|
|
a0dac519db | ||
|
|
34f5e521c1 | ||
|
|
774b1e97b9 | ||
|
|
538430d65d | ||
|
|
5db3a9bf55 | ||
|
|
efc2f0012c | ||
|
|
c5c1454848 | ||
|
|
f5c765192c | ||
|
|
728feea620 | ||
|
|
45fb52dca1 | ||
|
|
45bf8b14ab | ||
|
|
9478cd974b | ||
|
|
0d9ea69a66 | ||
|
|
bee78c0b16 | ||
|
|
88bbcbe9c6 | ||
|
|
e0680eb704 | ||
|
|
16f851c5d9 | ||
|
|
d5ca1ad543 |
@ -26,7 +26,7 @@ tools:
|
|||||||
# used for linting
|
# used for linting
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
version:
|
version:
|
||||||
want: v2.5.0
|
want: v2.6.1
|
||||||
method: github-release
|
method: github-release
|
||||||
with:
|
with:
|
||||||
repo: golangci/golangci-lint
|
repo: golangci/golangci-lint
|
||||||
@ -58,7 +58,7 @@ tools:
|
|||||||
# used to release all artifacts
|
# used to release all artifacts
|
||||||
- name: goreleaser
|
- name: goreleaser
|
||||||
version:
|
version:
|
||||||
want: v2.12.6
|
want: v2.12.7
|
||||||
method: github-release
|
method: github-release
|
||||||
with:
|
with:
|
||||||
repo: goreleaser/goreleaser
|
repo: goreleaser/goreleaser
|
||||||
@ -90,7 +90,7 @@ tools:
|
|||||||
# used for running all local and CI tasks
|
# used for running all local and CI tasks
|
||||||
- name: task
|
- name: task
|
||||||
version:
|
version:
|
||||||
want: v3.45.4
|
want: v3.45.5
|
||||||
method: github-release
|
method: github-release
|
||||||
with:
|
with:
|
||||||
repo: go-task/task
|
repo: go-task/task
|
||||||
@ -98,7 +98,7 @@ tools:
|
|||||||
# used for triggering a release
|
# used for triggering a release
|
||||||
- name: gh
|
- name: gh
|
||||||
version:
|
version:
|
||||||
want: v2.82.1
|
want: v2.83.0
|
||||||
method: github-release
|
method: github-release
|
||||||
with:
|
with:
|
||||||
repo: cli/cli
|
repo: cli/cli
|
||||||
|
|||||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@ -47,7 +47,7 @@ jobs:
|
|||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 #v3.29.5
|
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee #v3.29.5
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@ -58,7 +58,7 @@ jobs:
|
|||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@16140ae1a102900babc80a33c44059580f687047 #v3.29.5
|
uses: github/codeql-action/autobuild@0499de31b99561a6d14a36a5f662c2a54f91beee #v3.29.5
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 https://git.io/JvXDl
|
# 📚 https://git.io/JvXDl
|
||||||
@ -72,4 +72,4 @@ jobs:
|
|||||||
# make release
|
# make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 #v3.29.5
|
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee #v3.29.5
|
||||||
|
|||||||
12
.github/workflows/release.yaml
vendored
12
.github/workflows/release.yaml
vendored
@ -19,6 +19,16 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Bootstrap environment
|
||||||
|
uses: ./.github/actions/bootstrap
|
||||||
|
|
||||||
|
- name: Validate Apple notarization credentials
|
||||||
|
run: .tool/quill submission list
|
||||||
|
env:
|
||||||
|
QUILL_NOTARY_ISSUER: ${{ secrets.APPLE_NOTARY_ISSUER }}
|
||||||
|
QUILL_NOTARY_KEY_ID: ${{ secrets.APPLE_NOTARY_KEY_ID }}
|
||||||
|
QUILL_NOTARY_KEY: ${{ secrets.APPLE_NOTARY_KEY }}
|
||||||
|
|
||||||
- name: Check if running on main
|
- name: Check if running on main
|
||||||
if: github.ref != 'refs/heads/main'
|
if: github.ref != 'refs/heads/main'
|
||||||
# we are using the following flag when running `cosign blob-verify` for checksum signature verification:
|
# we are using the following flag when running `cosign blob-verify` for checksum signature verification:
|
||||||
@ -161,7 +171,7 @@ jobs:
|
|||||||
# for updating brew formula in anchore/homebrew-syft
|
# for updating brew formula in anchore/homebrew-syft
|
||||||
GITHUB_BREW_TOKEN: ${{ secrets.ANCHOREOPS_GITHUB_OSS_WRITE_TOKEN }}
|
GITHUB_BREW_TOKEN: ${{ secrets.ANCHOREOPS_GITHUB_OSS_WRITE_TOKEN }}
|
||||||
|
|
||||||
- uses: anchore/sbom-action@aa0e114b2e19480f157109b9922bda359bd98b90 #v0.20.8
|
- uses: anchore/sbom-action@8e94d75ddd33f69f691467e42275782e4bfefe84 #v0.20.9
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
file: go.mod
|
file: go.mod
|
||||||
|
|||||||
@ -31,11 +31,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repos: ${{ github.event.inputs.repos }}
|
repos: ${{ github.event.inputs.repos }}
|
||||||
|
|
||||||
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0
|
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 #v2.1.4
|
||||||
id: generate-token
|
id: generate-token
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.TOKEN_APP_ID }}
|
app-id: ${{ secrets.TOKEN_APP_ID }}
|
||||||
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
|
private-key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
|
||||||
|
|
||||||
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
|
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
|
||||||
with:
|
with:
|
||||||
|
|||||||
6
.github/workflows/update-bootstrap-tools.yml
vendored
6
.github/workflows/update-bootstrap-tools.yml
vendored
@ -45,11 +45,11 @@ jobs:
|
|||||||
echo "\`\`\`"
|
echo "\`\`\`"
|
||||||
} >> $GITHUB_STEP_SUMMARY
|
} >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0
|
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 #v2.1.4
|
||||||
id: generate-token
|
id: generate-token
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.TOKEN_APP_ID }}
|
app-id: ${{ secrets.TOKEN_APP_ID }}
|
||||||
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
|
private-key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
|
||||||
|
|
||||||
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
|
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
|
||||||
with:
|
with:
|
||||||
|
|||||||
@ -14,6 +14,9 @@ env:
|
|||||||
jobs:
|
jobs:
|
||||||
upgrade-cpe-dictionary-index:
|
upgrade-cpe-dictionary-index:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
if: github.repository == 'anchore/syft' # only run for main repo
|
if: github.repository == 'anchore/syft' # only run for main repo
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||||
@ -22,18 +25,31 @@ jobs:
|
|||||||
|
|
||||||
- name: Bootstrap environment
|
- name: Bootstrap environment
|
||||||
uses: ./.github/actions/bootstrap
|
uses: ./.github/actions/bootstrap
|
||||||
|
id: bootstrap
|
||||||
|
|
||||||
- name: Bootstrap environment
|
- name: Login to GitHub Container Registry
|
||||||
uses: ./.github/actions/bootstrap
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | ${{ steps.bootstrap.outputs.oras }} login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
- run: |
|
- name: Pull CPE cache from registry
|
||||||
make generate-cpe-dictionary-index
|
run: make generate:cpe-index:cache:pull
|
||||||
|
|
||||||
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0
|
- name: Update CPE cache from NVD API
|
||||||
|
run: make generate:cpe-index:cache:update
|
||||||
|
env:
|
||||||
|
NVD_API_KEY: ${{ secrets.NVD_API_KEY }}
|
||||||
|
|
||||||
|
- name: Generate CPE dictionary index
|
||||||
|
run: make generate:cpe-index:build
|
||||||
|
|
||||||
|
- name: Push updated CPE cache to registry
|
||||||
|
run: make generate:cpe-index:cache:push
|
||||||
|
|
||||||
|
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 #v2.1.4
|
||||||
id: generate-token
|
id: generate-token
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.TOKEN_APP_ID }}
|
app-id: ${{ secrets.TOKEN_APP_ID }}
|
||||||
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
|
private-key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
|
||||||
|
|
||||||
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
|
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
|
||||||
with:
|
with:
|
||||||
|
|||||||
54
.github/workflows/update-spdx-license-list.yaml
vendored
Normal file
54
.github/workflows/update-spdx-license-list.yaml
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
name: PR to update SPDX license list
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 6 * * 1" # every monday at 6 AM UTC
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
env:
|
||||||
|
SLACK_NOTIFICATIONS: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
upgrade-spdx-license-list:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.repository == 'anchore/syft' # only run for main repo
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Bootstrap environment
|
||||||
|
uses: ./.github/actions/bootstrap
|
||||||
|
|
||||||
|
- run: |
|
||||||
|
make generate-license-list
|
||||||
|
|
||||||
|
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0
|
||||||
|
id: generate-token
|
||||||
|
with:
|
||||||
|
app_id: ${{ secrets.TOKEN_APP_ID }}
|
||||||
|
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
|
||||||
|
|
||||||
|
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
|
||||||
|
with:
|
||||||
|
signoff: true
|
||||||
|
delete-branch: true
|
||||||
|
branch: auto/latest-spdx-license-list
|
||||||
|
labels: dependencies
|
||||||
|
commit-message: "chore(deps): update SPDX license list"
|
||||||
|
title: "chore(deps): update SPDX license list"
|
||||||
|
body: |
|
||||||
|
Update SPDX license list based on the latest available list from spdx.org
|
||||||
|
token: ${{ steps.generate-token.outputs.token }}
|
||||||
|
|
||||||
|
- uses: 8398a7/action-slack@77eaa4f1c608a7d68b38af4e3f739dcd8cba273e #v3.19.0
|
||||||
|
with:
|
||||||
|
status: ${{ job.status }}
|
||||||
|
fields: workflow,eventName,job
|
||||||
|
text: Syft SPDX license list update failed
|
||||||
|
env:
|
||||||
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TOOLBOX_WEBHOOK_URL }}
|
||||||
|
if: ${{ failure() && env.SLACK_NOTIFICATIONS == 'true' }}
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -73,3 +73,5 @@ cosign.pub
|
|||||||
__pycache__/
|
__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
*$py.class
|
*$py.class
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -116,7 +116,7 @@ Where the `formats` available are:
|
|||||||
- `spdx-json@2.2`: A JSON report conforming to the [SPDX 2.2 JSON Schema](https://github.com/spdx/spdx-spec/blob/v2.2/schemas/spdx-schema.json).
|
- `spdx-json@2.2`: A JSON report conforming to the [SPDX 2.2 JSON Schema](https://github.com/spdx/spdx-spec/blob/v2.2/schemas/spdx-schema.json).
|
||||||
- `github-json`: A JSON report conforming to GitHub's dependency snapshot format.
|
- `github-json`: A JSON report conforming to GitHub's dependency snapshot format.
|
||||||
- `syft-table`: A columnar summary (default).
|
- `syft-table`: A columnar summary (default).
|
||||||
- `template`: Lets the user specify the output format. See ["Using templates"](#using-templates) below.
|
- `template`: Lets the user specify the output format. See ["Using templates"](https://github.com/anchore/syft/wiki/using-templates) below.
|
||||||
|
|
||||||
Note that flags using the @<version> can be used for earlier versions of each specification as well.
|
Note that flags using the @<version> can be used for earlier versions of each specification as well.
|
||||||
|
|
||||||
@ -135,7 +135,7 @@ Note that flags using the @<version> can be used for earlier versions of each sp
|
|||||||
- Go (go.mod, Go binaries)
|
- Go (go.mod, Go binaries)
|
||||||
- GitHub (workflows, actions)
|
- GitHub (workflows, actions)
|
||||||
- Haskell (cabal, stack)
|
- Haskell (cabal, stack)
|
||||||
- Java (jar, ear, war, par, sar, nar, native-image)
|
- Java (jar, ear, war, par, sar, nar, rar, native-image)
|
||||||
- JavaScript (npm, yarn)
|
- JavaScript (npm, yarn)
|
||||||
- Jenkins Plugins (jpi, hpi)
|
- Jenkins Plugins (jpi, hpi)
|
||||||
- Linux kernel archives (vmlinz)
|
- Linux kernel archives (vmlinz)
|
||||||
|
|||||||
@ -1,5 +1,9 @@
|
|||||||
|
|
||||||
version: "3"
|
version: "3"
|
||||||
|
|
||||||
|
includes:
|
||||||
|
generate:cpe-index: ./task.d/generate/cpe-index.yaml
|
||||||
|
|
||||||
vars:
|
vars:
|
||||||
OWNER: anchore
|
OWNER: anchore
|
||||||
PROJECT: syft
|
PROJECT: syft
|
||||||
@ -511,10 +515,11 @@ tasks:
|
|||||||
- "gofmt -s -w ./internal/spdxlicense"
|
- "gofmt -s -w ./internal/spdxlicense"
|
||||||
|
|
||||||
generate-cpe-dictionary-index:
|
generate-cpe-dictionary-index:
|
||||||
desc: Generate the CPE index based off of the latest available CPE dictionary
|
desc: Generate the CPE index from local cache
|
||||||
dir: "syft/pkg/cataloger/internal/cpegenerate/dictionary"
|
|
||||||
cmds:
|
cmds:
|
||||||
- "go generate"
|
- task: generate:cpe-index:cache:pull
|
||||||
|
- task: generate:cpe-index:cache:update
|
||||||
|
- task: generate:cpe-index:build
|
||||||
|
|
||||||
|
|
||||||
## Build-related targets #################################
|
## Build-related targets #################################
|
||||||
|
|||||||
@ -253,7 +253,6 @@ func generateSBOMForAttestation(ctx context.Context, id clio.Identification, opt
|
|||||||
}
|
}
|
||||||
|
|
||||||
src, err := getSource(ctx, opts, userInput, stereoscope.RegistryTag)
|
src, err := getSource(ctx, opts, userInput, stereoscope.RegistryTag)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -87,8 +87,8 @@ func runCatalogerList(opts *catalogerListOptions) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func catalogerListReport(opts *catalogerListOptions, allTaskGroups [][]task.Task) (string, error) {
|
func catalogerListReport(opts *catalogerListOptions, allTaskGroups [][]task.Task) (string, error) {
|
||||||
defaultCatalogers := options.Flatten(opts.DefaultCatalogers)
|
defaultCatalogers := options.FlattenAndSort(opts.DefaultCatalogers)
|
||||||
selectCatalogers := options.Flatten(opts.SelectCatalogers)
|
selectCatalogers := options.FlattenAndSort(opts.SelectCatalogers)
|
||||||
selectedTaskGroups, selectionEvidence, err := task.SelectInGroups(
|
selectedTaskGroups, selectionEvidence, err := task.SelectInGroups(
|
||||||
allTaskGroups,
|
allTaskGroups,
|
||||||
cataloging.NewSelectionRequest().
|
cataloging.NewSelectionRequest().
|
||||||
|
|||||||
@ -185,7 +185,6 @@ func runScan(ctx context.Context, id clio.Identification, opts *scanOptions, use
|
|||||||
}
|
}
|
||||||
|
|
||||||
src, err := getSource(ctx, &opts.Catalog, userInput, sources...)
|
src, err := getSource(ctx, &opts.Catalog, userInput, sources...)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -198,9 +198,10 @@ func (cfg Catalog) ToPackagesConfig() pkgcataloging.Config {
|
|||||||
},
|
},
|
||||||
Nix: nix.DefaultConfig().
|
Nix: nix.DefaultConfig().
|
||||||
WithCaptureOwnedFiles(cfg.Nix.CaptureOwnedFiles),
|
WithCaptureOwnedFiles(cfg.Nix.CaptureOwnedFiles),
|
||||||
Python: python.CatalogerConfig{
|
Python: python.DefaultCatalogerConfig().
|
||||||
GuessUnpinnedRequirements: cfg.Python.GuessUnpinnedRequirements,
|
WithSearchRemoteLicenses(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Python), cfg.Python.SearchRemoteLicenses)).
|
||||||
},
|
WithPypiBaseURL(cfg.Python.PypiBaseURL).
|
||||||
|
WithGuessUnpinnedRequirements(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Python), cfg.Python.GuessUnpinnedRequirements)),
|
||||||
JavaArchive: java.DefaultArchiveCatalogerConfig().
|
JavaArchive: java.DefaultArchiveCatalogerConfig().
|
||||||
WithUseMavenLocalRepository(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Java, task.Maven), cfg.Java.UseMavenLocalRepository)).
|
WithUseMavenLocalRepository(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Java, task.Maven), cfg.Java.UseMavenLocalRepository)).
|
||||||
WithMavenLocalRepositoryDir(cfg.Java.MavenLocalRepositoryDir).
|
WithMavenLocalRepositoryDir(cfg.Java.MavenLocalRepositoryDir).
|
||||||
@ -283,10 +284,10 @@ func (cfg *Catalog) PostLoad() error {
|
|||||||
|
|
||||||
cfg.From = Flatten(cfg.From)
|
cfg.From = Flatten(cfg.From)
|
||||||
|
|
||||||
cfg.Catalogers = Flatten(cfg.Catalogers)
|
cfg.Catalogers = FlattenAndSort(cfg.Catalogers)
|
||||||
cfg.DefaultCatalogers = Flatten(cfg.DefaultCatalogers)
|
cfg.DefaultCatalogers = FlattenAndSort(cfg.DefaultCatalogers)
|
||||||
cfg.SelectCatalogers = Flatten(cfg.SelectCatalogers)
|
cfg.SelectCatalogers = FlattenAndSort(cfg.SelectCatalogers)
|
||||||
cfg.Enrich = Flatten(cfg.Enrich)
|
cfg.Enrich = FlattenAndSort(cfg.Enrich)
|
||||||
|
|
||||||
// for backwards compatibility
|
// for backwards compatibility
|
||||||
cfg.DefaultCatalogers = append(cfg.DefaultCatalogers, cfg.Catalogers...)
|
cfg.DefaultCatalogers = append(cfg.DefaultCatalogers, cfg.Catalogers...)
|
||||||
@ -311,6 +312,11 @@ func Flatten(commaSeparatedEntries []string) []string {
|
|||||||
out = append(out, strings.TrimSpace(s))
|
out = append(out, strings.TrimSpace(s))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func FlattenAndSort(commaSeparatedEntries []string) []string {
|
||||||
|
out := Flatten(commaSeparatedEntries)
|
||||||
sort.Strings(out)
|
sort.Strings(out)
|
||||||
return out
|
return out
|
||||||
}
|
}
|
||||||
@ -320,6 +326,7 @@ var publicisedEnrichmentOptions = []string{
|
|||||||
task.Golang,
|
task.Golang,
|
||||||
task.Java,
|
task.Java,
|
||||||
task.JavaScript,
|
task.JavaScript,
|
||||||
|
task.Python,
|
||||||
}
|
}
|
||||||
|
|
||||||
func enrichmentEnabled(enrichDirectives []string, features ...string) *bool {
|
func enrichmentEnabled(enrichDirectives []string, features ...string) *bool {
|
||||||
|
|||||||
@ -79,6 +79,98 @@ func TestCatalog_PostLoad(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestFlatten(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input []string
|
||||||
|
expected []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "preserves order of comma-separated values",
|
||||||
|
input: []string{"registry,docker,oci-dir"},
|
||||||
|
expected: []string{"registry", "docker", "oci-dir"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "preserves order across multiple entries",
|
||||||
|
input: []string{"registry,docker", "oci-dir"},
|
||||||
|
expected: []string{"registry", "docker", "oci-dir"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "trims whitespace",
|
||||||
|
input: []string{" registry , docker ", " oci-dir "},
|
||||||
|
expected: []string{"registry", "docker", "oci-dir"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "handles single value",
|
||||||
|
input: []string{"registry"},
|
||||||
|
expected: []string{"registry"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "handles empty input",
|
||||||
|
input: []string{},
|
||||||
|
expected: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "preserves reverse alphabetical order",
|
||||||
|
input: []string{"zebra,yankee,xray"},
|
||||||
|
expected: []string{"zebra", "yankee", "xray"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := Flatten(tt.input)
|
||||||
|
assert.Equal(t, tt.expected, got)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFlattenAndSort(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input []string
|
||||||
|
expected []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "sorts comma-separated values",
|
||||||
|
input: []string{"registry,docker,oci-dir"},
|
||||||
|
expected: []string{"docker", "oci-dir", "registry"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "sorts across multiple entries",
|
||||||
|
input: []string{"registry,docker", "oci-dir"},
|
||||||
|
expected: []string{"docker", "oci-dir", "registry"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "trims whitespace and sorts",
|
||||||
|
input: []string{" registry , docker ", " oci-dir "},
|
||||||
|
expected: []string{"docker", "oci-dir", "registry"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "handles single value",
|
||||||
|
input: []string{"registry"},
|
||||||
|
expected: []string{"registry"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "handles empty input",
|
||||||
|
input: []string{},
|
||||||
|
expected: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "sorts reverse alphabetical order",
|
||||||
|
input: []string{"zebra,yankee,xray"},
|
||||||
|
expected: []string{"xray", "yankee", "zebra"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := FlattenAndSort(tt.input)
|
||||||
|
assert.Equal(t, tt.expected, got)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func Test_enrichmentEnabled(t *testing.T) {
|
func Test_enrichmentEnabled(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
directives string
|
directives string
|
||||||
@ -139,7 +231,7 @@ func Test_enrichmentEnabled(t *testing.T) {
|
|||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
t.Run(test.directives, func(t *testing.T) {
|
t.Run(test.directives, func(t *testing.T) {
|
||||||
got := enrichmentEnabled(Flatten([]string{test.directives}), test.test)
|
got := enrichmentEnabled(FlattenAndSort([]string{test.directives}), test.test)
|
||||||
assert.Equal(t, test.expected, got)
|
assert.Equal(t, test.expected, got)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,7 +3,9 @@ package options
|
|||||||
import "github.com/anchore/clio"
|
import "github.com/anchore/clio"
|
||||||
|
|
||||||
type pythonConfig struct {
|
type pythonConfig struct {
|
||||||
GuessUnpinnedRequirements bool `json:"guess-unpinned-requirements" yaml:"guess-unpinned-requirements" mapstructure:"guess-unpinned-requirements"`
|
SearchRemoteLicenses *bool `json:"search-remote-licenses" yaml:"search-remote-licenses" mapstructure:"search-remote-licenses"`
|
||||||
|
PypiBaseURL string `json:"pypi-base-url" yaml:"pypi-base-url" mapstructure:"pypi-base-url"`
|
||||||
|
GuessUnpinnedRequirements *bool `json:"guess-unpinned-requirements" yaml:"guess-unpinned-requirements" mapstructure:"guess-unpinned-requirements"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ interface {
|
var _ interface {
|
||||||
@ -11,6 +13,8 @@ var _ interface {
|
|||||||
} = (*pythonConfig)(nil)
|
} = (*pythonConfig)(nil)
|
||||||
|
|
||||||
func (o *pythonConfig) DescribeFields(descriptions clio.FieldDescriptionSet) {
|
func (o *pythonConfig) DescribeFields(descriptions clio.FieldDescriptionSet) {
|
||||||
|
descriptions.Add(&o.SearchRemoteLicenses, `enables Syft to use the network to fill in more detailed license information`)
|
||||||
|
descriptions.Add(&o.PypiBaseURL, `base Pypi url to use`)
|
||||||
descriptions.Add(&o.GuessUnpinnedRequirements, `when running across entries in requirements.txt that do not specify a specific version
|
descriptions.Add(&o.GuessUnpinnedRequirements, `when running across entries in requirements.txt that do not specify a specific version
|
||||||
(e.g. "sqlalchemy >= 1.0.0, <= 2.0.0, != 3.0.0, <= 3.0.0"), attempt to guess what the version could
|
(e.g. "sqlalchemy >= 1.0.0, <= 2.0.0, != 3.0.0, <= 3.0.0"), attempt to guess what the version could
|
||||||
be based on the version requirements specified (e.g. "1.0.0"). When enabled the lowest expressible version
|
be based on the version requirements specified (e.g. "1.0.0"). When enabled the lowest expressible version
|
||||||
|
|||||||
@ -25,7 +25,6 @@ func BenchmarkImagePackageCatalogers(b *testing.B) {
|
|||||||
// get the source object for the image
|
// get the source object for the image
|
||||||
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
|
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
|
||||||
require.NoError(b, err)
|
require.NoError(b, err)
|
||||||
|
|
||||||
b.Cleanup(func() {
|
b.Cleanup(func() {
|
||||||
require.NoError(b, theSource.Close())
|
require.NoError(b, theSource.Close())
|
||||||
})
|
})
|
||||||
@ -88,6 +87,7 @@ func TestPkgCoverageImage(t *testing.T) {
|
|||||||
definedPkgs.Remove(string(pkg.TerraformPkg))
|
definedPkgs.Remove(string(pkg.TerraformPkg))
|
||||||
definedPkgs.Remove(string(pkg.PhpPeclPkg)) // we have coverage for pear instead
|
definedPkgs.Remove(string(pkg.PhpPeclPkg)) // we have coverage for pear instead
|
||||||
definedPkgs.Remove(string(pkg.CondaPkg))
|
definedPkgs.Remove(string(pkg.CondaPkg))
|
||||||
|
definedPkgs.Remove(string(pkg.ModelPkg))
|
||||||
|
|
||||||
var cases []testCase
|
var cases []testCase
|
||||||
cases = append(cases, commonTestCases...)
|
cases = append(cases, commonTestCases...)
|
||||||
@ -162,6 +162,7 @@ func TestPkgCoverageDirectory(t *testing.T) {
|
|||||||
definedPkgs.Remove(string(pkg.UnknownPkg))
|
definedPkgs.Remove(string(pkg.UnknownPkg))
|
||||||
definedPkgs.Remove(string(pkg.CondaPkg))
|
definedPkgs.Remove(string(pkg.CondaPkg))
|
||||||
definedPkgs.Remove(string(pkg.PhpPeclPkg)) // this is covered as pear packages
|
definedPkgs.Remove(string(pkg.PhpPeclPkg)) // this is covered as pear packages
|
||||||
|
definedPkgs.Remove(string(pkg.ModelPkg))
|
||||||
|
|
||||||
// for directory scans we should not expect to see any of the following package types
|
// for directory scans we should not expect to see any of the following package types
|
||||||
definedPkgs.Remove(string(pkg.KbPkg))
|
definedPkgs.Remove(string(pkg.KbPkg))
|
||||||
|
|||||||
@ -38,11 +38,11 @@ func catalogFixtureImageWithConfig(t *testing.T, fixtureImageName string, cfg *s
|
|||||||
// get the source to build an SBOM against
|
// get the source to build an SBOM against
|
||||||
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
|
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
t.Cleanup(func() {
|
t.Cleanup(func() {
|
||||||
require.NoError(t, theSource.Close())
|
require.NoError(t, theSource.Close())
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// build the SBOM
|
||||||
s, err := syft.CreateSBOM(context.Background(), theSource, cfg)
|
s, err := syft.CreateSBOM(context.Background(), theSource, cfg)
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@ -66,7 +66,7 @@ func catalogDirectory(t *testing.T, dir string, catalogerSelection ...string) (s
|
|||||||
func catalogDirectoryWithConfig(t *testing.T, dir string, cfg *syft.CreateSBOMConfig) (sbom.SBOM, source.Source) {
|
func catalogDirectoryWithConfig(t *testing.T, dir string, cfg *syft.CreateSBOMConfig) (sbom.SBOM, source.Source) {
|
||||||
cfg.CatalogerSelection = cfg.CatalogerSelection.WithDefaults(pkgcataloging.DirectoryTag)
|
cfg.CatalogerSelection = cfg.CatalogerSelection.WithDefaults(pkgcataloging.DirectoryTag)
|
||||||
|
|
||||||
// get the source to build an sbom against
|
// get the source to build an SBOM against
|
||||||
theSource, err := syft.GetSource(context.Background(), dir, syft.DefaultGetSourceConfig().WithSources("dir"))
|
theSource, err := syft.GetSource(context.Background(), dir, syft.DefaultGetSourceConfig().WithSources("dir"))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
t.Cleanup(func() {
|
t.Cleanup(func() {
|
||||||
|
|||||||
@ -23,6 +23,7 @@ const defaultImage = "alpine:3.19"
|
|||||||
func main() {
|
func main() {
|
||||||
// automagically get a source.Source for arbitrary string input
|
// automagically get a source.Source for arbitrary string input
|
||||||
src := getSource(imageReference())
|
src := getSource(imageReference())
|
||||||
|
defer src.Close()
|
||||||
|
|
||||||
// will catalog the given source and return a SBOM keeping in mind several configurable options
|
// will catalog the given source and return a SBOM keeping in mind several configurable options
|
||||||
sbom := getSBOM(src)
|
sbom := getSBOM(src)
|
||||||
@ -46,7 +47,6 @@ func getSource(input string) source.Source {
|
|||||||
fmt.Println("detecting source type for input:", input, "...")
|
fmt.Println("detecting source type for input:", input, "...")
|
||||||
|
|
||||||
src, err := syft.GetSource(context.Background(), input, nil)
|
src, err := syft.GetSource(context.Background(), input, nil)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -19,6 +19,7 @@ const defaultImage = "alpine:3.19"
|
|||||||
func main() {
|
func main() {
|
||||||
// automagically get a source.Source for arbitrary string input
|
// automagically get a source.Source for arbitrary string input
|
||||||
src := getSource(imageReference())
|
src := getSource(imageReference())
|
||||||
|
defer src.Close()
|
||||||
|
|
||||||
// catalog the given source and return a SBOM
|
// catalog the given source and return a SBOM
|
||||||
sbom := getSBOM(src)
|
sbom := getSBOM(src)
|
||||||
@ -40,7 +41,6 @@ func imageReference() string {
|
|||||||
|
|
||||||
func getSource(input string) source.Source {
|
func getSource(input string) source.Source {
|
||||||
src, err := syft.GetSource(context.Background(), input, nil)
|
src, err := syft.GetSource(context.Background(), input, nil)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -19,6 +19,7 @@ const defaultImage = "alpine:3.19"
|
|||||||
func main() {
|
func main() {
|
||||||
// automagically get a source.Source for arbitrary string input
|
// automagically get a source.Source for arbitrary string input
|
||||||
src := getSource(imageReference())
|
src := getSource(imageReference())
|
||||||
|
defer src.Close()
|
||||||
|
|
||||||
// catalog the given source and return a SBOM
|
// catalog the given source and return a SBOM
|
||||||
// let's explicitly use catalogers that are:
|
// let's explicitly use catalogers that are:
|
||||||
@ -44,7 +45,6 @@ func imageReference() string {
|
|||||||
|
|
||||||
func getSource(input string) source.Source {
|
func getSource(input string) source.Source {
|
||||||
src, err := syft.GetSource(context.Background(), input, nil)
|
src, err := syft.GetSource(context.Background(), input, nil)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,6 +15,7 @@ func main() {
|
|||||||
image := "alpine:3.19"
|
image := "alpine:3.19"
|
||||||
|
|
||||||
src, _ := syft.GetSource(context.Background(), image, syft.DefaultGetSourceConfig().WithSources("registry"))
|
src, _ := syft.GetSource(context.Background(), image, syft.DefaultGetSourceConfig().WithSources("registry"))
|
||||||
|
defer src.Close()
|
||||||
|
|
||||||
sbom, _ := syft.CreateSBOM(context.Background(), src, syft.DefaultCreateSBOMConfig())
|
sbom, _ := syft.CreateSBOM(context.Background(), src, syft.DefaultCreateSBOMConfig())
|
||||||
|
|
||||||
|
|||||||
40
go.mod
40
go.mod
@ -24,7 +24,7 @@ require (
|
|||||||
github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04
|
github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04
|
||||||
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b
|
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b
|
||||||
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115
|
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115
|
||||||
github.com/anchore/stereoscope v0.1.11
|
github.com/anchore/stereoscope v0.1.12
|
||||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be
|
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be
|
||||||
github.com/aquasecurity/go-pep440-version v0.0.1
|
github.com/aquasecurity/go-pep440-version v0.0.1
|
||||||
github.com/bitnami/go-version v0.0.0-20250131085805-b1f57a8634ef
|
github.com/bitnami/go-version v0.0.0-20250131085805-b1f57a8634ef
|
||||||
@ -53,19 +53,19 @@ require (
|
|||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/gookit/color v1.6.0
|
github.com/gookit/color v1.6.0
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.2
|
github.com/hashicorp/go-cleanhttp v0.5.2
|
||||||
github.com/hashicorp/go-getter v1.8.2
|
github.com/hashicorp/go-getter v1.8.3
|
||||||
github.com/hashicorp/go-multierror v1.1.1
|
github.com/hashicorp/go-multierror v1.1.1
|
||||||
github.com/hashicorp/hcl/v2 v2.24.0
|
github.com/hashicorp/hcl/v2 v2.24.0
|
||||||
github.com/iancoleman/strcase v0.3.0
|
github.com/iancoleman/strcase v0.3.0
|
||||||
github.com/invopop/jsonschema v0.7.0
|
github.com/invopop/jsonschema v0.7.0
|
||||||
github.com/jedib0t/go-pretty/v6 v6.6.8
|
github.com/jedib0t/go-pretty/v6 v6.7.1
|
||||||
github.com/jinzhu/copier v0.4.0
|
github.com/jinzhu/copier v0.4.0
|
||||||
github.com/kastenhq/goversion v0.0.0-20230811215019-93b2f8823953
|
github.com/kastenhq/goversion v0.0.0-20230811215019-93b2f8823953
|
||||||
github.com/magiconair/properties v1.8.10
|
github.com/magiconair/properties v1.8.10
|
||||||
github.com/mholt/archives v0.1.5
|
github.com/mholt/archives v0.1.5
|
||||||
github.com/moby/sys/mountinfo v0.7.2
|
github.com/moby/sys/mountinfo v0.7.2
|
||||||
github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1
|
github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1
|
||||||
github.com/olekukonko/tablewriter v1.0.9
|
github.com/olekukonko/tablewriter v1.1.1
|
||||||
github.com/opencontainers/go-digest v1.0.0
|
github.com/opencontainers/go-digest v1.0.0
|
||||||
github.com/pelletier/go-toml v1.9.5
|
github.com/pelletier/go-toml v1.9.5
|
||||||
github.com/quasilyte/go-ruleguard/dsl v0.3.23
|
github.com/quasilyte/go-ruleguard/dsl v0.3.23
|
||||||
@ -90,9 +90,9 @@ require (
|
|||||||
go.uber.org/goleak v1.3.0
|
go.uber.org/goleak v1.3.0
|
||||||
go.yaml.in/yaml/v3 v3.0.4
|
go.yaml.in/yaml/v3 v3.0.4
|
||||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
|
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
|
||||||
golang.org/x/mod v0.29.0
|
golang.org/x/mod v0.30.0
|
||||||
golang.org/x/net v0.46.0
|
golang.org/x/net v0.46.0
|
||||||
modernc.org/sqlite v1.39.1
|
modernc.org/sqlite v1.40.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
@ -131,7 +131,7 @@ require (
|
|||||||
github.com/charmbracelet/x/term v0.2.1 // indirect
|
github.com/charmbracelet/x/term v0.2.1 // indirect
|
||||||
github.com/cloudflare/circl v1.6.1 // indirect
|
github.com/cloudflare/circl v1.6.1 // indirect
|
||||||
github.com/containerd/cgroups v1.1.0 // indirect
|
github.com/containerd/cgroups v1.1.0 // indirect
|
||||||
github.com/containerd/containerd v1.7.28 // indirect
|
github.com/containerd/containerd v1.7.29 // indirect
|
||||||
github.com/containerd/containerd/api v1.8.0 // indirect
|
github.com/containerd/containerd/api v1.8.0 // indirect
|
||||||
github.com/containerd/continuity v0.4.4 // indirect
|
github.com/containerd/continuity v0.4.4 // indirect
|
||||||
github.com/containerd/errdefs v1.0.0 // indirect
|
github.com/containerd/errdefs v1.0.0 // indirect
|
||||||
@ -142,7 +142,7 @@ require (
|
|||||||
github.com/containerd/stargz-snapshotter/estargz v0.16.3 // indirect
|
github.com/containerd/stargz-snapshotter/estargz v0.16.3 // indirect
|
||||||
github.com/containerd/ttrpc v1.2.7 // indirect
|
github.com/containerd/ttrpc v1.2.7 // indirect
|
||||||
github.com/containerd/typeurl/v2 v2.2.0 // indirect
|
github.com/containerd/typeurl/v2 v2.2.0 // indirect
|
||||||
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
|
github.com/cyphar/filepath-securejoin v0.6.0 // indirect
|
||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||||
github.com/docker/cli v28.5.1+incompatible // indirect
|
github.com/docker/cli v28.5.1+incompatible // indirect
|
||||||
github.com/docker/distribution v2.8.3+incompatible // indirect
|
github.com/docker/distribution v2.8.3+incompatible // indirect
|
||||||
@ -191,7 +191,7 @@ require (
|
|||||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 // indirect
|
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 // indirect
|
||||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
github.com/mattn/go-runewidth v0.0.19 // indirect
|
||||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||||
github.com/mikelolasagasti/xz v1.0.1 // indirect
|
github.com/mikelolasagasti/xz v1.0.1 // indirect
|
||||||
github.com/minio/minlz v1.0.1 // indirect
|
github.com/minio/minlz v1.0.1 // indirect
|
||||||
@ -212,10 +212,10 @@ require (
|
|||||||
github.com/nwaples/rardecode v1.1.3 // indirect
|
github.com/nwaples/rardecode v1.1.3 // indirect
|
||||||
github.com/nwaples/rardecode/v2 v2.2.0 // indirect
|
github.com/nwaples/rardecode/v2 v2.2.0 // indirect
|
||||||
github.com/olekukonko/errors v1.1.0 // indirect
|
github.com/olekukonko/errors v1.1.0 // indirect
|
||||||
github.com/olekukonko/ll v0.0.9 // indirect
|
github.com/olekukonko/ll v0.1.2 // indirect
|
||||||
github.com/opencontainers/image-spec v1.1.1 // indirect
|
github.com/opencontainers/image-spec v1.1.1 // indirect
|
||||||
github.com/opencontainers/runtime-spec v1.1.0 // indirect
|
github.com/opencontainers/runtime-spec v1.1.0 // indirect
|
||||||
github.com/opencontainers/selinux v1.11.0 // indirect
|
github.com/opencontainers/selinux v1.13.0 // indirect
|
||||||
github.com/pborman/indent v1.2.1 // indirect
|
github.com/pborman/indent v1.2.1 // indirect
|
||||||
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
|
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
|
||||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||||
@ -270,7 +270,7 @@ require (
|
|||||||
golang.org/x/sys v0.37.0 // indirect
|
golang.org/x/sys v0.37.0 // indirect
|
||||||
golang.org/x/term v0.36.0 // indirect
|
golang.org/x/term v0.36.0 // indirect
|
||||||
golang.org/x/text v0.30.0 // indirect
|
golang.org/x/text v0.30.0 // indirect
|
||||||
golang.org/x/time v0.12.0 // indirect
|
golang.org/x/time v0.14.0
|
||||||
golang.org/x/tools v0.38.0
|
golang.org/x/tools v0.38.0
|
||||||
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
|
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
|
||||||
google.golang.org/api v0.203.0 // indirect
|
google.golang.org/api v0.203.0 // indirect
|
||||||
@ -287,6 +287,12 @@ require (
|
|||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0
|
||||||
|
github.com/gpustack/gguf-parser-go v0.22.1
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
cyphar.com/go-pathrs v0.2.1 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2 v1.36.5 // indirect
|
github.com/aws/aws-sdk-go-v2 v1.36.5 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 // indirect
|
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/config v1.29.17 // indirect
|
github.com/aws/aws-sdk-go-v2/config v1.29.17 // indirect
|
||||||
@ -305,7 +311,17 @@ require (
|
|||||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.3 // indirect
|
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.3 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/service/sts v1.34.0 // indirect
|
github.com/aws/aws-sdk-go-v2/service/sts v1.34.0 // indirect
|
||||||
github.com/aws/smithy-go v1.22.4 // indirect
|
github.com/aws/smithy-go v1.22.4 // indirect
|
||||||
|
github.com/clipperhouse/displaywidth v0.3.1 // indirect
|
||||||
|
github.com/clipperhouse/stringish v0.1.1 // indirect
|
||||||
|
github.com/clipperhouse/uax29/v2 v2.2.0 // indirect
|
||||||
github.com/hashicorp/aws-sdk-go-base/v2 v2.0.0-beta.65 // indirect
|
github.com/hashicorp/aws-sdk-go-base/v2 v2.0.0-beta.65 // indirect
|
||||||
|
github.com/henvic/httpretty v0.1.4 // indirect
|
||||||
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
|
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 // indirect
|
||||||
|
github.com/smallnest/ringbuffer v0.0.0-20241116012123-461381446e3d // indirect
|
||||||
|
gonum.org/v1/gonum v0.15.1 // indirect
|
||||||
)
|
)
|
||||||
|
|
||||||
retract (
|
retract (
|
||||||
|
|||||||
71
go.sum
71
go.sum
@ -59,6 +59,8 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX
|
|||||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||||
cloud.google.com/go/storage v1.43.0 h1:CcxnSohZwizt4LCzQHWvBf1/kvtHUn7gk9QERXPyXFs=
|
cloud.google.com/go/storage v1.43.0 h1:CcxnSohZwizt4LCzQHWvBf1/kvtHUn7gk9QERXPyXFs=
|
||||||
cloud.google.com/go/storage v1.43.0/go.mod h1:ajvxEa7WmZS1PxvKRq4bq0tFT3vMd502JwstCcYv0Q0=
|
cloud.google.com/go/storage v1.43.0/go.mod h1:ajvxEa7WmZS1PxvKRq4bq0tFT3vMd502JwstCcYv0Q0=
|
||||||
|
cyphar.com/go-pathrs v0.2.1 h1:9nx1vOgwVvX1mNBWDu93+vaceedpbsDqo+XuBGL40b8=
|
||||||
|
cyphar.com/go-pathrs v0.2.1/go.mod h1:y8f1EMG7r+hCuFf/rXsKqMJrJAUoADZGNh5/vZPKcGc=
|
||||||
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
|
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
|
||||||
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
||||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||||
@ -138,8 +140,8 @@ github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b h1:e1bmaoJfZV
|
|||||||
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E=
|
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E=
|
||||||
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115 h1:ZyRCmiEjnoGJZ1+Ah0ZZ/mKKqNhGcUZBl0s7PTTDzvY=
|
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115 h1:ZyRCmiEjnoGJZ1+Ah0ZZ/mKKqNhGcUZBl0s7PTTDzvY=
|
||||||
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115/go.mod h1:KoYIv7tdP5+CC9VGkeZV4/vGCKsY55VvoG+5dadg4YI=
|
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115/go.mod h1:KoYIv7tdP5+CC9VGkeZV4/vGCKsY55VvoG+5dadg4YI=
|
||||||
github.com/anchore/stereoscope v0.1.11 h1:YP/XUNcJyMbOOPAWPkeZNCVlKKTRO2cnBTEeUW6I40Y=
|
github.com/anchore/stereoscope v0.1.12 h1:4T/10G7Nb98UoJBKVvAIhsAtrR63lZXxMJb/Qfw5inw=
|
||||||
github.com/anchore/stereoscope v0.1.11/go.mod h1:G3PZlzPbxFhylj9pQwtqfVPaahuWmy/UCtv5FTIIMvg=
|
github.com/anchore/stereoscope v0.1.12/go.mod h1:G3PZlzPbxFhylj9pQwtqfVPaahuWmy/UCtv5FTIIMvg=
|
||||||
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
||||||
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
||||||
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
||||||
@ -227,7 +229,6 @@ github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqy
|
|||||||
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||||
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||||
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
|
||||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
@ -263,6 +264,12 @@ github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38
|
|||||||
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
|
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
|
||||||
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
|
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
|
github.com/clipperhouse/displaywidth v0.3.1 h1:k07iN9gD32177o1y4O1jQMzbLdCrsGJh+blirVYybsk=
|
||||||
|
github.com/clipperhouse/displaywidth v0.3.1/go.mod h1:tgLJKKyaDOCadywag3agw4snxS5kYEuYR6Y9+qWDDYM=
|
||||||
|
github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs=
|
||||||
|
github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA=
|
||||||
|
github.com/clipperhouse/uax29/v2 v2.2.0 h1:ChwIKnQN3kcZteTXMgb1wztSgaU+ZemkgWdohwgs8tY=
|
||||||
|
github.com/clipperhouse/uax29/v2 v2.2.0/go.mod h1:EFJ2TJMRUaplDxHKj1qAEhCtQPW2tJSwu5BF98AuoVM=
|
||||||
github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0=
|
github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0=
|
||||||
github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
|
github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
|
||||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||||
@ -277,8 +284,8 @@ github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWH
|
|||||||
github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||||
github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM=
|
github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM=
|
||||||
github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw=
|
github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw=
|
||||||
github.com/containerd/containerd v1.7.28 h1:Nsgm1AtcmEh4AHAJ4gGlNSaKgXiNccU270Dnf81FQ3c=
|
github.com/containerd/containerd v1.7.29 h1:90fWABQsaN9mJhGkoVnuzEY+o1XDPbg9BTC9QTAHnuE=
|
||||||
github.com/containerd/containerd v1.7.28/go.mod h1:azUkWcOvHrWvaiUjSQH0fjzuHIwSPg1WL5PshGP4Szs=
|
github.com/containerd/containerd v1.7.29/go.mod h1:azUkWcOvHrWvaiUjSQH0fjzuHIwSPg1WL5PshGP4Szs=
|
||||||
github.com/containerd/containerd/api v1.8.0 h1:hVTNJKR8fMc/2Tiw60ZRijntNMd1U+JVMyTRdsD2bS0=
|
github.com/containerd/containerd/api v1.8.0 h1:hVTNJKR8fMc/2Tiw60ZRijntNMd1U+JVMyTRdsD2bS0=
|
||||||
github.com/containerd/containerd/api v1.8.0/go.mod h1:dFv4lt6S20wTu/hMcP4350RL87qPWLVa/OHOwmmdnYc=
|
github.com/containerd/containerd/api v1.8.0/go.mod h1:dFv4lt6S20wTu/hMcP4350RL87qPWLVa/OHOwmmdnYc=
|
||||||
github.com/containerd/continuity v0.4.4 h1:/fNVfTJ7wIl/YPMHjf+5H32uFhl63JucB34PlCpMKII=
|
github.com/containerd/continuity v0.4.4 h1:/fNVfTJ7wIl/YPMHjf+5H32uFhl63JucB34PlCpMKII=
|
||||||
@ -304,8 +311,8 @@ github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSV
|
|||||||
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
|
github.com/cyphar/filepath-securejoin v0.6.0 h1:BtGB77njd6SVO6VztOHfPxKitJvd/VPT+OFBFMOi1Is=
|
||||||
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
|
github.com/cyphar/filepath-securejoin v0.6.0/go.mod h1:A8hd4EnAeyujCJRrICiOWqjS1AX0a9kM5XL+NwKoYSc=
|
||||||
github.com/dave/jennifer v1.7.1 h1:B4jJJDHelWcDhlRQxWeo0Npa/pYKBLrirAQoTN45txo=
|
github.com/dave/jennifer v1.7.1 h1:B4jJJDHelWcDhlRQxWeo0Npa/pYKBLrirAQoTN45txo=
|
||||||
github.com/dave/jennifer v1.7.1/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc=
|
github.com/dave/jennifer v1.7.1/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc=
|
||||||
github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
@ -541,6 +548,8 @@ github.com/gookit/assert v0.1.1/go.mod h1:jS5bmIVQZTIwk42uXl4lyj4iaaxx32tqH16CFj
|
|||||||
github.com/gookit/color v1.2.5/go.mod h1:AhIE+pS6D4Ql0SQWbBeXPHw7gY0/sjHoA4s/n1KB7xg=
|
github.com/gookit/color v1.2.5/go.mod h1:AhIE+pS6D4Ql0SQWbBeXPHw7gY0/sjHoA4s/n1KB7xg=
|
||||||
github.com/gookit/color v1.6.0 h1:JjJXBTk1ETNyqyilJhkTXJYYigHG24TM9Xa2M1xAhRA=
|
github.com/gookit/color v1.6.0 h1:JjJXBTk1ETNyqyilJhkTXJYYigHG24TM9Xa2M1xAhRA=
|
||||||
github.com/gookit/color v1.6.0/go.mod h1:9ACFc7/1IpHGBW8RwuDm/0YEnhg3dwwXpoMsmtyHfjs=
|
github.com/gookit/color v1.6.0/go.mod h1:9ACFc7/1IpHGBW8RwuDm/0YEnhg3dwwXpoMsmtyHfjs=
|
||||||
|
github.com/gpustack/gguf-parser-go v0.22.1 h1:FRnEDWqT0Rcplr/R9ctCRSN2+3DhVsf6dnR5/i9JA4E=
|
||||||
|
github.com/gpustack/gguf-parser-go v0.22.1/go.mod h1:y4TwTtDqFWTK+xvprOjRUh+dowgU2TKCX37vRKvGiZ0=
|
||||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo=
|
github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo=
|
||||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
||||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms=
|
github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms=
|
||||||
@ -556,8 +565,8 @@ github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtng
|
|||||||
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
|
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
|
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
|
||||||
github.com/hashicorp/go-getter v1.8.2 h1:CGCK+bZQLl44PYiwJweVzfpjg7bBwtuXu3AGcLiod2o=
|
github.com/hashicorp/go-getter v1.8.3 h1:gIS+oTNv3kyYAvlUVgMR46MiG0bM0KuSON/KZEvRoRg=
|
||||||
github.com/hashicorp/go-getter v1.8.2/go.mod h1:CUTt9x2bCtJ/sV8ihgrITL3IUE+0BE1j/e4n5P/GIM4=
|
github.com/hashicorp/go-getter v1.8.3/go.mod h1:CUTt9x2bCtJ/sV8ihgrITL3IUE+0BE1j/e4n5P/GIM4=
|
||||||
github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
||||||
github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
||||||
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||||
@ -590,6 +599,8 @@ github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOn
|
|||||||
github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE=
|
github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE=
|
||||||
github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
|
github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
|
||||||
github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4=
|
github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4=
|
||||||
|
github.com/henvic/httpretty v0.1.4 h1:Jo7uwIRWVFxkqOnErcoYfH90o3ddQyVrSANeS4cxYmU=
|
||||||
|
github.com/henvic/httpretty v0.1.4/go.mod h1:Dn60sQTZfbt2dYsdUSNsCljyF4AfdqnuJFDLJA1I4AM=
|
||||||
github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI=
|
github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI=
|
||||||
github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||||
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
|
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
|
||||||
@ -609,14 +620,15 @@ github.com/invopop/jsonschema v0.7.0 h1:2vgQcBz1n256N+FpX3Jq7Y17AjYt46Ig3zIWyy77
|
|||||||
github.com/invopop/jsonschema v0.7.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
|
github.com/invopop/jsonschema v0.7.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
|
||||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
||||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
|
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
|
||||||
github.com/jedib0t/go-pretty/v6 v6.6.8 h1:JnnzQeRz2bACBobIaa/r+nqjvws4yEhcmaZ4n1QzsEc=
|
github.com/jedib0t/go-pretty/v6 v6.7.1 h1:bHDSsj93NuJ563hHuM7ohk/wpX7BmRFNIsVv1ssI2/M=
|
||||||
github.com/jedib0t/go-pretty/v6 v6.6.8/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
|
github.com/jedib0t/go-pretty/v6 v6.7.1/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
|
||||||
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
|
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
|
||||||
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
|
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
|
||||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||||
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||||
|
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||||
@ -676,8 +688,8 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
|
|||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 h1:P8UmIzZMYDR+NGImiFvErt6VWfIRPuGM+vyjiEdkmIw=
|
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 h1:P8UmIzZMYDR+NGImiFvErt6VWfIRPuGM+vyjiEdkmIw=
|
||||||
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
|
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
|
||||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
|
||||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs=
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
|
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
|
||||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
||||||
@ -722,9 +734,11 @@ github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcY
|
|||||||
github.com/moby/term v0.0.0-20221205130635-1aeaba878587 h1:HfkjXDfhgVaN5rmueG8cL8KKeFNecRCXFhaJ2qZ5SKA=
|
github.com/moby/term v0.0.0-20221205130635-1aeaba878587 h1:HfkjXDfhgVaN5rmueG8cL8KKeFNecRCXFhaJ2qZ5SKA=
|
||||||
github.com/moby/term v0.0.0-20221205130635-1aeaba878587/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
|
github.com/moby/term v0.0.0-20221205130635-1aeaba878587/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
|
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
||||||
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
||||||
@ -745,12 +759,14 @@ github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9l
|
|||||||
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
||||||
github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A=
|
github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A=
|
||||||
github.com/nwaples/rardecode/v2 v2.2.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
|
github.com/nwaples/rardecode/v2 v2.2.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
|
||||||
|
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 h1:zrbMGy9YXpIeTnGj4EljqMiZsIcE09mmF8XsD5AYOJc=
|
||||||
|
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6/go.mod h1:rEKTHC9roVVicUIfZK7DYrdIoM0EOr8mK1Hj5s3JjH0=
|
||||||
github.com/olekukonko/errors v1.1.0 h1:RNuGIh15QdDenh+hNvKrJkmxxjV4hcS50Db478Ou5sM=
|
github.com/olekukonko/errors v1.1.0 h1:RNuGIh15QdDenh+hNvKrJkmxxjV4hcS50Db478Ou5sM=
|
||||||
github.com/olekukonko/errors v1.1.0/go.mod h1:ppzxA5jBKcO1vIpCXQ9ZqgDh8iwODz6OXIGKU8r5m4Y=
|
github.com/olekukonko/errors v1.1.0/go.mod h1:ppzxA5jBKcO1vIpCXQ9ZqgDh8iwODz6OXIGKU8r5m4Y=
|
||||||
github.com/olekukonko/ll v0.0.9 h1:Y+1YqDfVkqMWuEQMclsF9HUR5+a82+dxJuL1HHSRpxI=
|
github.com/olekukonko/ll v0.1.2 h1:lkg/k/9mlsy0SxO5aC+WEpbdT5K83ddnNhAepz7TQc0=
|
||||||
github.com/olekukonko/ll v0.0.9/go.mod h1:En+sEW0JNETl26+K8eZ6/W4UQ7CYSrrgg/EdIYT2H8g=
|
github.com/olekukonko/ll v0.1.2/go.mod h1:b52bVQRRPObe+yyBl0TxNfhesL0nedD4Cht0/zx55Ew=
|
||||||
github.com/olekukonko/tablewriter v1.0.9 h1:XGwRsYLC2bY7bNd93Dk51bcPZksWZmLYuaTHR0FqfL8=
|
github.com/olekukonko/tablewriter v1.1.1 h1:b3reP6GCfrHwmKkYwNRFh2rxidGHcT6cgxj/sHiDDx0=
|
||||||
github.com/olekukonko/tablewriter v1.0.9/go.mod h1:5c+EBPeSqvXnLLgkm9isDdzR3wjfBkHR9Nhfp3NWrzo=
|
github.com/olekukonko/tablewriter v1.1.1/go.mod h1:De/bIcTF+gpBDB3Alv3fEsZA+9unTsSzAg/ZGADCtn4=
|
||||||
github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k=
|
github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k=
|
||||||
github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY=
|
github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY=
|
||||||
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||||
@ -759,8 +775,8 @@ github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJw
|
|||||||
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
|
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
|
||||||
github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg=
|
github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg=
|
||||||
github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
|
github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
|
||||||
github.com/opencontainers/selinux v1.11.0 h1:+5Zbo97w3Lbmb3PeqQtpmTkMwsW5nRI3YaLpt7tQ7oU=
|
github.com/opencontainers/selinux v1.13.0 h1:Zza88GWezyT7RLql12URvoxsbLfjFx988+LGaWfbL84=
|
||||||
github.com/opencontainers/selinux v1.11.0/go.mod h1:E5dMC3VPuVvVHDYmi78qvhJp8+M586T4DlDRYpFkyec=
|
github.com/opencontainers/selinux v1.13.0/go.mod h1:XxWTed+A/s5NNq4GmYScVy+9jzXhGBVEOAyucdRUY8s=
|
||||||
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
|
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
|
||||||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||||
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||||
@ -809,7 +825,6 @@ github.com/quasilyte/go-ruleguard/dsl v0.3.23 h1:lxjt5B6ZCiBeeNO8/oQsegE6fLeCzuM
|
|||||||
github.com/quasilyte/go-ruleguard/dsl v0.3.23/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU=
|
github.com/quasilyte/go-ruleguard/dsl v0.3.23/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
|
||||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
|
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
|
||||||
@ -851,6 +866,8 @@ github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af h1:Sp5TG9f7K39yf
|
|||||||
github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||||
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
||||||
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
||||||
|
github.com/smallnest/ringbuffer v0.0.0-20241116012123-461381446e3d h1:3VwvTjiRPA7cqtgOWddEL+JrcijMlXUmj99c/6YyZoY=
|
||||||
|
github.com/smallnest/ringbuffer v0.0.0-20241116012123-461381446e3d/go.mod h1:tAG61zBM1DYRaGIPloumExGvScf08oHuo0kFoOqdbT0=
|
||||||
github.com/sorairolake/lzip-go v0.3.8 h1:j5Q2313INdTA80ureWYRhX+1K78mUXfMoPZCw/ivWik=
|
github.com/sorairolake/lzip-go v0.3.8 h1:j5Q2313INdTA80ureWYRhX+1K78mUXfMoPZCw/ivWik=
|
||||||
github.com/sorairolake/lzip-go v0.3.8/go.mod h1:JcBqGMV0frlxwrsE9sMWXDjqn3EeVf0/54YPsw66qkU=
|
github.com/sorairolake/lzip-go v0.3.8/go.mod h1:JcBqGMV0frlxwrsE9sMWXDjqn3EeVf0/54YPsw66qkU=
|
||||||
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
||||||
@ -1061,8 +1078,8 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
|||||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
|
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
|
||||||
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
|
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
|
||||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
@ -1239,8 +1256,8 @@ golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
|||||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
|
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||||
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
|
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||||
@ -1304,6 +1321,8 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T
|
|||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU=
|
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU=
|
||||||
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
|
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
|
||||||
|
gonum.org/v1/gonum v0.15.1 h1:FNy7N6OUZVUaWG9pTiD+jlhdQ3lMP+/LcTpJ6+a8sQ0=
|
||||||
|
gonum.org/v1/gonum v0.15.1/go.mod h1:eZTZuRFrzu5pcyjN5wJhcIhnUdNijYxX1T2IcrOGY0o=
|
||||||
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
||||||
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
||||||
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||||
@ -1509,8 +1528,8 @@ modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
|
|||||||
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
||||||
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
|
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
|
||||||
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
|
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
|
||||||
modernc.org/sqlite v1.39.1 h1:H+/wGFzuSCIEVCvXYVHX5RQglwhMOvtHSv+VtidL2r4=
|
modernc.org/sqlite v1.40.0 h1:bNWEDlYhNPAUdUdBzjAvn8icAs/2gaKlj4vM+tQ6KdQ=
|
||||||
modernc.org/sqlite v1.39.1/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE=
|
modernc.org/sqlite v1.40.0/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE=
|
||||||
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
||||||
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
|
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
|
||||||
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
||||||
|
|||||||
46
internal/capabilities/pkgtestobservation/model.go
Normal file
46
internal/capabilities/pkgtestobservation/model.go
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
package pkgtestobservation
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
|
// Observations represents capability observations during testing
|
||||||
|
type Observations struct {
|
||||||
|
License bool `json:"license"`
|
||||||
|
Relationships Relationship `json:"relationships"`
|
||||||
|
FileListing Count `json:"file_listing"`
|
||||||
|
FileDigests Count `json:"file_digests"`
|
||||||
|
IntegrityHash Count `json:"integrity_hash"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Relationship tracks dependency relationship observations
|
||||||
|
type Relationship struct {
|
||||||
|
Found bool `json:"found"`
|
||||||
|
Count int `json:"count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count tracks whether a capability was found and how many times
|
||||||
|
type Count struct {
|
||||||
|
Found bool `json:"found"`
|
||||||
|
Count int `json:"count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test is the root structure for test-observations.json
|
||||||
|
type Test struct {
|
||||||
|
Package string `json:"package"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
Catalogers map[string]*Cataloger `json:"catalogers"`
|
||||||
|
Parsers map[string]*Parser `json:"parsers"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parser captures all observations for a parser
|
||||||
|
type Parser struct {
|
||||||
|
MetadataTypes []string `json:"metadata_types"`
|
||||||
|
PackageTypes []string `json:"package_types"`
|
||||||
|
Observations Observations `json:"observations"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cataloger captures all observations for a cataloger
|
||||||
|
type Cataloger struct {
|
||||||
|
MetadataTypes []string `json:"metadata_types"`
|
||||||
|
PackageTypes []string `json:"package_types"`
|
||||||
|
Observations Observations `json:"observations"`
|
||||||
|
}
|
||||||
@ -3,5 +3,5 @@ package internal
|
|||||||
const (
|
const (
|
||||||
// JSONSchemaVersion is the current schema version output by the JSON encoder
|
// JSONSchemaVersion is the current schema version output by the JSON encoder
|
||||||
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
|
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
|
||||||
JSONSchemaVersion = "16.0.41"
|
JSONSchemaVersion = "16.0.43"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -81,6 +81,10 @@ func Test_EnvironmentTask(t *testing.T) {
|
|||||||
// get the source
|
// get the source
|
||||||
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
|
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
t.Cleanup(func() {
|
||||||
|
require.NoError(t, theSource.Close())
|
||||||
|
})
|
||||||
|
|
||||||
resolver, err := theSource.FileResolver(source.SquashedScope)
|
resolver, err := theSource.FileResolver(source.SquashedScope)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
|||||||
@ -27,6 +27,7 @@ func AllTypes() []any {
|
|||||||
pkg.ELFBinaryPackageNoteJSONPayload{},
|
pkg.ELFBinaryPackageNoteJSONPayload{},
|
||||||
pkg.ElixirMixLockEntry{},
|
pkg.ElixirMixLockEntry{},
|
||||||
pkg.ErlangRebarLockEntry{},
|
pkg.ErlangRebarLockEntry{},
|
||||||
|
pkg.GGUFFileHeader{},
|
||||||
pkg.GitHubActionsUseStatement{},
|
pkg.GitHubActionsUseStatement{},
|
||||||
pkg.GolangBinaryBuildinfoEntry{},
|
pkg.GolangBinaryBuildinfoEntry{},
|
||||||
pkg.GolangModuleEntry{},
|
pkg.GolangModuleEntry{},
|
||||||
@ -49,6 +50,7 @@ func AllTypes() []any {
|
|||||||
pkg.PhpComposerLockEntry{},
|
pkg.PhpComposerLockEntry{},
|
||||||
pkg.PhpPearEntry{},
|
pkg.PhpPearEntry{},
|
||||||
pkg.PhpPeclEntry{},
|
pkg.PhpPeclEntry{},
|
||||||
|
pkg.PnpmLockEntry{},
|
||||||
pkg.PortageEntry{},
|
pkg.PortageEntry{},
|
||||||
pkg.PythonPackage{},
|
pkg.PythonPackage{},
|
||||||
pkg.PythonPdmLockEntry{},
|
pkg.PythonPdmLockEntry{},
|
||||||
|
|||||||
@ -95,10 +95,11 @@ var jsonTypes = makeJSONTypes(
|
|||||||
jsonNames(pkg.NpmPackage{}, "javascript-npm-package", "NpmPackageJsonMetadata"),
|
jsonNames(pkg.NpmPackage{}, "javascript-npm-package", "NpmPackageJsonMetadata"),
|
||||||
jsonNames(pkg.NpmPackageLockEntry{}, "javascript-npm-package-lock-entry", "NpmPackageLockJsonMetadata"),
|
jsonNames(pkg.NpmPackageLockEntry{}, "javascript-npm-package-lock-entry", "NpmPackageLockJsonMetadata"),
|
||||||
jsonNames(pkg.YarnLockEntry{}, "javascript-yarn-lock-entry", "YarnLockJsonMetadata"),
|
jsonNames(pkg.YarnLockEntry{}, "javascript-yarn-lock-entry", "YarnLockJsonMetadata"),
|
||||||
|
jsonNames(pkg.PnpmLockEntry{}, "javascript-pnpm-lock-entry"),
|
||||||
jsonNames(pkg.PEBinary{}, "pe-binary"),
|
jsonNames(pkg.PEBinary{}, "pe-binary"),
|
||||||
jsonNames(pkg.PhpComposerLockEntry{}, "php-composer-lock-entry", "PhpComposerJsonMetadata"),
|
jsonNames(pkg.PhpComposerLockEntry{}, "php-composer-lock-entry", "PhpComposerJsonMetadata"),
|
||||||
jsonNamesWithoutLookup(pkg.PhpComposerInstalledEntry{}, "php-composer-installed-entry", "PhpComposerJsonMetadata"), // the legacy value is split into two types, where the other is preferred
|
jsonNamesWithoutLookup(pkg.PhpComposerInstalledEntry{}, "php-composer-installed-entry", "PhpComposerJsonMetadata"), // the legacy value is split into two types, where the other is preferred
|
||||||
jsonNames(pkg.PhpPeclEntry{}, "php-pecl-entry", "PhpPeclMetadata"),
|
jsonNames(pkg.PhpPeclEntry{}, "php-pecl-entry", "PhpPeclMetadata"), //nolint:staticcheck
|
||||||
jsonNames(pkg.PhpPearEntry{}, "php-pear-entry"),
|
jsonNames(pkg.PhpPearEntry{}, "php-pear-entry"),
|
||||||
jsonNames(pkg.PortageEntry{}, "portage-db-entry", "PortageMetadata"),
|
jsonNames(pkg.PortageEntry{}, "portage-db-entry", "PortageMetadata"),
|
||||||
jsonNames(pkg.PythonPackage{}, "python-package", "PythonPackageMetadata"),
|
jsonNames(pkg.PythonPackage{}, "python-package", "PythonPackageMetadata"),
|
||||||
@ -123,6 +124,7 @@ var jsonTypes = makeJSONTypes(
|
|||||||
jsonNames(pkg.TerraformLockProviderEntry{}, "terraform-lock-provider-entry"),
|
jsonNames(pkg.TerraformLockProviderEntry{}, "terraform-lock-provider-entry"),
|
||||||
jsonNames(pkg.DotnetPackagesLockEntry{}, "dotnet-packages-lock-entry"),
|
jsonNames(pkg.DotnetPackagesLockEntry{}, "dotnet-packages-lock-entry"),
|
||||||
jsonNames(pkg.CondaMetaPackage{}, "conda-metadata-entry", "CondaPackageMetadata"),
|
jsonNames(pkg.CondaMetaPackage{}, "conda-metadata-entry", "CondaPackageMetadata"),
|
||||||
|
jsonNames(pkg.GGUFFileHeader{}, "gguf-file-metadata"),
|
||||||
)
|
)
|
||||||
|
|
||||||
func expandLegacyNameVariants(names ...string) []string {
|
func expandLegacyNameVariants(names ...string) []string {
|
||||||
|
|||||||
@ -10,7 +10,6 @@ import (
|
|||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
"text/template"
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// This program generates license_list.go.
|
// This program generates license_list.go.
|
||||||
@ -20,8 +19,7 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var tmp = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT.
|
var tmp = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT.
|
||||||
// This file was generated by robots at {{ .Timestamp }}
|
// This file was generated using data from {{ .URL }}
|
||||||
// using data from {{ .URL }}
|
|
||||||
package spdxlicense
|
package spdxlicense
|
||||||
|
|
||||||
const Version = {{ printf "%q" .Version }}
|
const Version = {{ printf "%q" .Version }}
|
||||||
@ -78,13 +76,11 @@ func run() error {
|
|||||||
urlToLicense := buildURLToLicenseMap(result)
|
urlToLicense := buildURLToLicenseMap(result)
|
||||||
|
|
||||||
err = tmp.Execute(f, struct {
|
err = tmp.Execute(f, struct {
|
||||||
Timestamp time.Time
|
|
||||||
URL string
|
URL string
|
||||||
Version string
|
Version string
|
||||||
LicenseIDs map[string]string
|
LicenseIDs map[string]string
|
||||||
URLToLicense map[string]string
|
URLToLicense map[string]string
|
||||||
}{
|
}{
|
||||||
Timestamp: time.Now(),
|
|
||||||
URL: url,
|
URL: url,
|
||||||
Version: result.Version,
|
Version: result.Version,
|
||||||
LicenseIDs: licenseIDs,
|
LicenseIDs: licenseIDs,
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package task
|
|||||||
import (
|
import (
|
||||||
"github.com/anchore/syft/syft/cataloging/pkgcataloging"
|
"github.com/anchore/syft/syft/cataloging/pkgcataloging"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/pkg/cataloger/ai"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/alpine"
|
"github.com/anchore/syft/syft/pkg/cataloger/alpine"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/arch"
|
"github.com/anchore/syft/syft/pkg/cataloger/arch"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/binary"
|
"github.com/anchore/syft/syft/pkg/cataloger/binary"
|
||||||
@ -52,6 +53,9 @@ const (
|
|||||||
JavaScript = "javascript"
|
JavaScript = "javascript"
|
||||||
Node = "node"
|
Node = "node"
|
||||||
NPM = "npm"
|
NPM = "npm"
|
||||||
|
|
||||||
|
// Python ecosystem labels
|
||||||
|
Python = "python"
|
||||||
)
|
)
|
||||||
|
|
||||||
//nolint:funlen
|
//nolint:funlen
|
||||||
@ -109,7 +113,7 @@ func DefaultPackageTaskFactories() Factories {
|
|||||||
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
|
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
|
||||||
return python.NewPackageCataloger(cfg.PackagesConfig.Python)
|
return python.NewPackageCataloger(cfg.PackagesConfig.Python)
|
||||||
},
|
},
|
||||||
pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "python",
|
pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, Python,
|
||||||
),
|
),
|
||||||
newSimplePackageTaskFactory(ruby.NewGemFileLockCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "ruby", "gem"),
|
newSimplePackageTaskFactory(ruby.NewGemFileLockCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "ruby", "gem"),
|
||||||
newSimplePackageTaskFactory(ruby.NewGemSpecCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "ruby", "gem", "gemspec"),
|
newSimplePackageTaskFactory(ruby.NewGemSpecCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "ruby", "gem", "gemspec"),
|
||||||
@ -127,7 +131,7 @@ func DefaultPackageTaskFactories() Factories {
|
|||||||
pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "dotnet", "c#",
|
pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "dotnet", "c#",
|
||||||
),
|
),
|
||||||
newSimplePackageTaskFactory(dotnet.NewDotnetPackagesLockCataloger, pkgcataloging.DeclaredTag, pkgcataloging.ImageTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "dotnet", "c#"),
|
newSimplePackageTaskFactory(dotnet.NewDotnetPackagesLockCataloger, pkgcataloging.DeclaredTag, pkgcataloging.ImageTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "dotnet", "c#"),
|
||||||
newSimplePackageTaskFactory(python.NewInstalledPackageCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "python"),
|
newSimplePackageTaskFactory(python.NewInstalledPackageCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, Python),
|
||||||
newPackageTaskFactory(
|
newPackageTaskFactory(
|
||||||
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
|
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
|
||||||
return golang.NewGoModuleBinaryCataloger(cfg.PackagesConfig.Golang)
|
return golang.NewGoModuleBinaryCataloger(cfg.PackagesConfig.Golang)
|
||||||
@ -175,12 +179,13 @@ func DefaultPackageTaskFactories() Factories {
|
|||||||
newSimplePackageTaskFactory(homebrew.NewCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, "homebrew"),
|
newSimplePackageTaskFactory(homebrew.NewCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, "homebrew"),
|
||||||
newSimplePackageTaskFactory(conda.NewCondaMetaCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.PackageTag, "conda"),
|
newSimplePackageTaskFactory(conda.NewCondaMetaCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.PackageTag, "conda"),
|
||||||
newSimplePackageTaskFactory(snap.NewCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, "snap"),
|
newSimplePackageTaskFactory(snap.NewCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, "snap"),
|
||||||
|
newSimplePackageTaskFactory(ai.NewGGUFCataloger, pkgcataloging.DirectoryTag, pkgcataloging.ImageTag, "ai", "model", "gguf", "ml"),
|
||||||
|
|
||||||
// deprecated catalogers ////////////////////////////////////////
|
// deprecated catalogers ////////////////////////////////////////
|
||||||
// these are catalogers that should not be selectable other than specific inclusion via name or "deprecated" tag (to remain backwards compatible)
|
// these are catalogers that should not be selectable other than specific inclusion via name or "deprecated" tag (to remain backwards compatible)
|
||||||
newSimplePackageTaskFactory(dotnet.NewDotnetDepsCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
|
newSimplePackageTaskFactory(dotnet.NewDotnetDepsCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
|
||||||
newSimplePackageTaskFactory(dotnet.NewDotnetPortableExecutableCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
|
newSimplePackageTaskFactory(dotnet.NewDotnetPortableExecutableCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
|
||||||
newSimplePackageTaskFactory(php.NewPeclCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
|
newSimplePackageTaskFactory(php.NewPeclCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
|
||||||
newSimplePackageTaskFactory(nix.NewStoreCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
|
newSimplePackageTaskFactory(nix.NewStoreCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
4148
schema/json/schema-16.0.42.json
Normal file
4148
schema/json/schema-16.0.42.json
Normal file
File diff suppressed because it is too large
Load Diff
4202
schema/json/schema-16.0.43.json
Normal file
4202
schema/json/schema-16.0.43.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||||
"$id": "anchore.io/schema/syft/json/16.0.41/document",
|
"$id": "anchore.io/schema/syft/json/16.0.43/document",
|
||||||
"$ref": "#/$defs/Document",
|
"$ref": "#/$defs/Document",
|
||||||
"$defs": {
|
"$defs": {
|
||||||
"AlpmDbEntry": {
|
"AlpmDbEntry": {
|
||||||
@ -130,7 +130,8 @@
|
|||||||
"description": "Digests contains file content hashes for integrity verification"
|
"description": "Digests contains file content hashes for integrity verification"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object"
|
"type": "object",
|
||||||
|
"description": "AlpmFileRecord represents a single file entry within an Arch Linux package with its associated metadata tracked by pacman."
|
||||||
},
|
},
|
||||||
"ApkDbEntry": {
|
"ApkDbEntry": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -433,16 +434,19 @@
|
|||||||
"CPE": {
|
"CPE": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"cpe": {
|
"cpe": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Value is the CPE string identifier."
|
||||||
},
|
},
|
||||||
"source": {
|
"source": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Source is the source where this CPE was obtained or generated from."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
"cpe"
|
"cpe"
|
||||||
]
|
],
|
||||||
|
"description": "CPE represents a Common Platform Enumeration identifier used for matching packages to known vulnerabilities in security databases."
|
||||||
},
|
},
|
||||||
"ClassifierMatch": {
|
"ClassifierMatch": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -747,19 +751,23 @@
|
|||||||
"Descriptor": {
|
"Descriptor": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"name": {
|
"name": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Name is the name of the tool that generated this SBOM (e.g., \"syft\")."
|
||||||
},
|
},
|
||||||
"version": {
|
"version": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Version is the version of the tool that generated this SBOM."
|
||||||
},
|
},
|
||||||
"configuration": true
|
"configuration": {
|
||||||
|
"description": "Configuration contains the tool configuration used during SBOM generation."
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
"name",
|
"name",
|
||||||
"version"
|
"version"
|
||||||
],
|
],
|
||||||
"description": "Descriptor describes what created the document as well as surrounding metadata"
|
"description": "Descriptor identifies the tool that generated this SBOM document, including its name, version, and configuration used during catalog generation."
|
||||||
},
|
},
|
||||||
"Digest": {
|
"Digest": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -1285,58 +1293,71 @@
|
|||||||
"File": {
|
"File": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"id": {
|
"id": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "ID is a unique identifier for this file within the SBOM."
|
||||||
},
|
},
|
||||||
"location": {
|
"location": {
|
||||||
"$ref": "#/$defs/Coordinates"
|
"$ref": "#/$defs/Coordinates",
|
||||||
|
"description": "Location is the file path and layer information where this file was found."
|
||||||
},
|
},
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"$ref": "#/$defs/FileMetadataEntry"
|
"$ref": "#/$defs/FileMetadataEntry",
|
||||||
|
"description": "Metadata contains filesystem metadata such as permissions, ownership, and file type."
|
||||||
},
|
},
|
||||||
"contents": {
|
"contents": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Contents is the file contents for small files."
|
||||||
},
|
},
|
||||||
"digests": {
|
"digests": {
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "#/$defs/Digest"
|
"$ref": "#/$defs/Digest"
|
||||||
},
|
},
|
||||||
"type": "array"
|
"type": "array",
|
||||||
|
"description": "Digests contains cryptographic hashes of the file contents."
|
||||||
},
|
},
|
||||||
"licenses": {
|
"licenses": {
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "#/$defs/FileLicense"
|
"$ref": "#/$defs/FileLicense"
|
||||||
},
|
},
|
||||||
"type": "array"
|
"type": "array",
|
||||||
|
"description": "Licenses contains license information discovered within this file."
|
||||||
},
|
},
|
||||||
"executable": {
|
"executable": {
|
||||||
"$ref": "#/$defs/Executable"
|
"$ref": "#/$defs/Executable",
|
||||||
|
"description": "Executable contains executable metadata if this file is a binary."
|
||||||
},
|
},
|
||||||
"unknowns": {
|
"unknowns": {
|
||||||
"items": {
|
"items": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"type": "array"
|
"type": "array",
|
||||||
|
"description": "Unknowns contains unknown fields for forward compatibility."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
"id",
|
"id",
|
||||||
"location"
|
"location"
|
||||||
]
|
],
|
||||||
|
"description": "File represents a file discovered during cataloging with its metadata, content digests, licenses, and relationships to packages."
|
||||||
},
|
},
|
||||||
"FileLicense": {
|
"FileLicense": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"value": {
|
"value": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Value is the raw license identifier or text as found in the file."
|
||||||
},
|
},
|
||||||
"spdxExpression": {
|
"spdxExpression": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "SPDXExpression is the parsed SPDX license expression."
|
||||||
},
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Type is the license type classification (e.g., declared, concluded, discovered)."
|
||||||
},
|
},
|
||||||
"evidence": {
|
"evidence": {
|
||||||
"$ref": "#/$defs/FileLicenseEvidence"
|
"$ref": "#/$defs/FileLicenseEvidence",
|
||||||
|
"description": "Evidence contains supporting evidence for this license detection."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@ -1344,18 +1365,22 @@
|
|||||||
"value",
|
"value",
|
||||||
"spdxExpression",
|
"spdxExpression",
|
||||||
"type"
|
"type"
|
||||||
]
|
],
|
||||||
|
"description": "FileLicense represents license information discovered within a file's contents or metadata, including the matched license text and SPDX expression."
|
||||||
},
|
},
|
||||||
"FileLicenseEvidence": {
|
"FileLicenseEvidence": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"confidence": {
|
"confidence": {
|
||||||
"type": "integer"
|
"type": "integer",
|
||||||
|
"description": "Confidence is the confidence score for this license detection (0-100)."
|
||||||
},
|
},
|
||||||
"offset": {
|
"offset": {
|
||||||
"type": "integer"
|
"type": "integer",
|
||||||
|
"description": "Offset is the byte offset where the license text starts in the file."
|
||||||
},
|
},
|
||||||
"extent": {
|
"extent": {
|
||||||
"type": "integer"
|
"type": "integer",
|
||||||
|
"description": "Extent is the length of the license text in bytes."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@ -1363,30 +1388,38 @@
|
|||||||
"confidence",
|
"confidence",
|
||||||
"offset",
|
"offset",
|
||||||
"extent"
|
"extent"
|
||||||
]
|
],
|
||||||
|
"description": "FileLicenseEvidence contains supporting evidence for a license detection in a file, including the byte offset, extent, and confidence level."
|
||||||
},
|
},
|
||||||
"FileMetadataEntry": {
|
"FileMetadataEntry": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"mode": {
|
"mode": {
|
||||||
"type": "integer"
|
"type": "integer",
|
||||||
|
"description": "Mode is the Unix file permission mode in octal format."
|
||||||
},
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Type is the file type (e.g., \"RegularFile\", \"Directory\", \"SymbolicLink\")."
|
||||||
},
|
},
|
||||||
"linkDestination": {
|
"linkDestination": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "LinkDestination is the target path for symbolic links."
|
||||||
},
|
},
|
||||||
"userID": {
|
"userID": {
|
||||||
"type": "integer"
|
"type": "integer",
|
||||||
|
"description": "UserID is the file owner user ID."
|
||||||
},
|
},
|
||||||
"groupID": {
|
"groupID": {
|
||||||
"type": "integer"
|
"type": "integer",
|
||||||
|
"description": "GroupID is the file owner group ID."
|
||||||
},
|
},
|
||||||
"mimeType": {
|
"mimeType": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "MIMEType is the MIME type of the file contents."
|
||||||
},
|
},
|
||||||
"size": {
|
"size": {
|
||||||
"type": "integer"
|
"type": "integer",
|
||||||
|
"description": "Size is the file size in bytes."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@ -1397,7 +1430,59 @@
|
|||||||
"groupID",
|
"groupID",
|
||||||
"mimeType",
|
"mimeType",
|
||||||
"size"
|
"size"
|
||||||
]
|
],
|
||||||
|
"description": "FileMetadataEntry contains filesystem-level metadata attributes such as permissions, ownership, type, and size for a cataloged file."
|
||||||
|
},
|
||||||
|
"GgufFileMetadata": {
|
||||||
|
"properties": {
|
||||||
|
"ggufVersion": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "GGUFVersion is the GGUF format version (e.g., 3)"
|
||||||
|
},
|
||||||
|
"modelName": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "ModelName is the name of the model (from general.name or filename)"
|
||||||
|
},
|
||||||
|
"fileSize": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "FileSize is the size of the GGUF file in bytes (best-effort if available from resolver)"
|
||||||
|
},
|
||||||
|
"license": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "License is the license identifier (from general.license if present)"
|
||||||
|
},
|
||||||
|
"architecture": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Architecture is the model architecture (from general.architecture, e.g., \"qwen3moe\", \"llama\")"
|
||||||
|
},
|
||||||
|
"quantization": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Quantization is the quantization type (e.g., \"IQ4_NL\", \"Q4_K_M\")"
|
||||||
|
},
|
||||||
|
"parameters": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Parameters is the number of model parameters (if present in header)"
|
||||||
|
},
|
||||||
|
"tensorCount": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "TensorCount is the number of tensors in the model"
|
||||||
|
},
|
||||||
|
"header": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "Header contains the remaining key-value pairs from the GGUF header that are not already\nrepresented as typed fields above. This preserves additional metadata fields for reference\n(namespaced with general.*, llama.*, etc.) while avoiding duplication."
|
||||||
|
},
|
||||||
|
"metadataHash": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "MetadataHash is a xx64 hash of all key-value pairs from the GGUF header metadata.\nThis hash is computed over the complete header metadata (including the fields extracted\ninto typed fields above) and provides a stable identifier for the model configuration\nacross different file locations or remotes. It allows matching identical models even\nwhen stored in different repositories or with different filenames."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"ggufVersion",
|
||||||
|
"modelName",
|
||||||
|
"tensorCount"
|
||||||
|
],
|
||||||
|
"description": "GGUFFileHeader represents metadata extracted from a GGUF (GPT-Generated Unified Format) model file."
|
||||||
},
|
},
|
||||||
"GithubActionsUseStatement": {
|
"GithubActionsUseStatement": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -1545,7 +1630,8 @@
|
|||||||
"items": {
|
"items": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"type": "array"
|
"type": "array",
|
||||||
|
"description": "IDLikes represents a list of distribution IDs that this Linux distribution is similar to or derived from, as defined in os-release ID_LIKE field."
|
||||||
},
|
},
|
||||||
"JavaArchive": {
|
"JavaArchive": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -1876,15 +1962,48 @@
|
|||||||
"integrity": {
|
"integrity": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "Integrity is Subresource Integrity hash for verification using standard SRI format (sha512-... or sha1-...). npm changed from SHA-1 to SHA-512 in newer versions. For registry sources this is the integrity from registry, for remote tarballs it's SHA-512 of the file. npm verifies tarball matches this hash before unpacking, throwing EINTEGRITY error if mismatch detected."
|
"description": "Integrity is Subresource Integrity hash for verification using standard SRI format (sha512-... or sha1-...). npm changed from SHA-1 to SHA-512 in newer versions. For registry sources this is the integrity from registry, for remote tarballs it's SHA-512 of the file. npm verifies tarball matches this hash before unpacking, throwing EINTEGRITY error if mismatch detected."
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"patternProperties": {
|
||||||
|
".*": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"description": "Dependencies is a map of dependencies and their version markers, i.e. \"lodash\": \"^1.0.0\""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
"resolved",
|
"resolved",
|
||||||
"integrity"
|
"integrity",
|
||||||
|
"dependencies"
|
||||||
],
|
],
|
||||||
"description": "NpmPackageLockEntry represents a single entry within the \"packages\" section of a package-lock.json file."
|
"description": "NpmPackageLockEntry represents a single entry within the \"packages\" section of a package-lock.json file."
|
||||||
},
|
},
|
||||||
|
"JavascriptPnpmLockEntry": {
|
||||||
|
"properties": {
|
||||||
|
"resolution": {
|
||||||
|
"$ref": "#/$defs/PnpmLockResolution",
|
||||||
|
"description": "Resolution is the resolution information for the package"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"patternProperties": {
|
||||||
|
".*": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"description": "Dependencies is a map of dependencies and their versions"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"resolution",
|
||||||
|
"dependencies"
|
||||||
|
],
|
||||||
|
"description": "PnpmLockEntry represents a single entry in the \"packages\" section of a pnpm-lock.yaml file."
|
||||||
|
},
|
||||||
"JavascriptYarnLockEntry": {
|
"JavascriptYarnLockEntry": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"resolved": {
|
"resolved": {
|
||||||
@ -1894,12 +2013,22 @@
|
|||||||
"integrity": {
|
"integrity": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "Integrity is Subresource Integrity hash for verification (SRI format)"
|
"description": "Integrity is Subresource Integrity hash for verification (SRI format)"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"patternProperties": {
|
||||||
|
".*": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"description": "Dependencies is a map of dependencies and their versions"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
"resolved",
|
"resolved",
|
||||||
"integrity"
|
"integrity",
|
||||||
|
"dependencies"
|
||||||
],
|
],
|
||||||
"description": "YarnLockEntry represents a single entry section of a yarn.lock file."
|
"description": "YarnLockEntry represents a single entry section of a yarn.lock file."
|
||||||
},
|
},
|
||||||
@ -1931,28 +2060,34 @@
|
|||||||
"License": {
|
"License": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"value": {
|
"value": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Value is the raw license identifier or expression as found."
|
||||||
},
|
},
|
||||||
"spdxExpression": {
|
"spdxExpression": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "SPDXExpression is the parsed SPDX license expression."
|
||||||
},
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Type is the license type classification (e.g., declared, concluded, discovered)."
|
||||||
},
|
},
|
||||||
"urls": {
|
"urls": {
|
||||||
"items": {
|
"items": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"type": "array"
|
"type": "array",
|
||||||
|
"description": "URLs are URLs where license text or information can be found."
|
||||||
},
|
},
|
||||||
"locations": {
|
"locations": {
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "#/$defs/Location"
|
"$ref": "#/$defs/Location"
|
||||||
},
|
},
|
||||||
"type": "array"
|
"type": "array",
|
||||||
|
"description": "Locations are file locations where this license was discovered."
|
||||||
},
|
},
|
||||||
"contents": {
|
"contents": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Contents is the full license text content."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@ -1962,7 +2097,8 @@
|
|||||||
"type",
|
"type",
|
||||||
"urls",
|
"urls",
|
||||||
"locations"
|
"locations"
|
||||||
]
|
],
|
||||||
|
"description": "License represents software license information discovered for a package, including SPDX expressions and supporting evidence locations."
|
||||||
},
|
},
|
||||||
"LinuxKernelArchive": {
|
"LinuxKernelArchive": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -2087,64 +2223,84 @@
|
|||||||
"LinuxRelease": {
|
"LinuxRelease": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"prettyName": {
|
"prettyName": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "PrettyName is a human-readable operating system name with version."
|
||||||
},
|
},
|
||||||
"name": {
|
"name": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Name is the operating system name without version information."
|
||||||
},
|
},
|
||||||
"id": {
|
"id": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "ID is the lower-case operating system identifier (e.g., \"ubuntu\", \"rhel\")."
|
||||||
},
|
},
|
||||||
"idLike": {
|
"idLike": {
|
||||||
"$ref": "#/$defs/IDLikes"
|
"$ref": "#/$defs/IDLikes",
|
||||||
|
"description": "IDLike is a list of operating system IDs this distribution is similar to or derived from."
|
||||||
},
|
},
|
||||||
"version": {
|
"version": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Version is the operating system version including codename if available."
|
||||||
},
|
},
|
||||||
"versionID": {
|
"versionID": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "VersionID is the operating system version number or identifier."
|
||||||
},
|
},
|
||||||
"versionCodename": {
|
"versionCodename": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "VersionCodename is the operating system release codename (e.g., \"jammy\", \"bullseye\")."
|
||||||
},
|
},
|
||||||
"buildID": {
|
"buildID": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "BuildID is a build identifier for the operating system."
|
||||||
},
|
},
|
||||||
"imageID": {
|
"imageID": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "ImageID is an identifier for container or cloud images."
|
||||||
},
|
},
|
||||||
"imageVersion": {
|
"imageVersion": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "ImageVersion is the version for container or cloud images."
|
||||||
},
|
},
|
||||||
"variant": {
|
"variant": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Variant is the operating system variant name (e.g., \"Server\", \"Workstation\")."
|
||||||
},
|
},
|
||||||
"variantID": {
|
"variantID": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "VariantID is the lower-case operating system variant identifier."
|
||||||
},
|
},
|
||||||
"homeURL": {
|
"homeURL": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "HomeURL is the homepage URL for the operating system."
|
||||||
},
|
},
|
||||||
"supportURL": {
|
"supportURL": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "SupportURL is the support or help URL for the operating system."
|
||||||
},
|
},
|
||||||
"bugReportURL": {
|
"bugReportURL": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "BugReportURL is the bug reporting URL for the operating system."
|
||||||
},
|
},
|
||||||
"privacyPolicyURL": {
|
"privacyPolicyURL": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "PrivacyPolicyURL is the privacy policy URL for the operating system."
|
||||||
},
|
},
|
||||||
"cpeName": {
|
"cpeName": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "CPEName is the Common Platform Enumeration name for the operating system."
|
||||||
},
|
},
|
||||||
"supportEnd": {
|
"supportEnd": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "SupportEnd is the end of support date or version identifier."
|
||||||
},
|
},
|
||||||
"extendedSupport": {
|
"extendedSupport": {
|
||||||
"type": "boolean"
|
"type": "boolean",
|
||||||
|
"description": "ExtendedSupport indicates whether extended security or support is available."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object"
|
"type": "object",
|
||||||
|
"description": "LinuxRelease contains Linux distribution identification and version information extracted from /etc/os-release or similar system files."
|
||||||
},
|
},
|
||||||
"Location": {
|
"Location": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -2240,7 +2396,7 @@
|
|||||||
"product_id",
|
"product_id",
|
||||||
"kb"
|
"kb"
|
||||||
],
|
],
|
||||||
"description": "MicrosoftKbPatch is slightly odd in how it is expected to map onto data."
|
"description": "MicrosoftKbPatch represents a Windows Knowledge Base patch identifier associated with a specific Microsoft product from the MSRC (Microsoft Security Response Center)."
|
||||||
},
|
},
|
||||||
"NixDerivation": {
|
"NixDerivation": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -2474,6 +2630,9 @@
|
|||||||
{
|
{
|
||||||
"$ref": "#/$defs/ErlangRebarLockEntry"
|
"$ref": "#/$defs/ErlangRebarLockEntry"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"$ref": "#/$defs/GgufFileMetadata"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"$ref": "#/$defs/GithubActionsUseStatement"
|
"$ref": "#/$defs/GithubActionsUseStatement"
|
||||||
},
|
},
|
||||||
@ -2507,6 +2666,9 @@
|
|||||||
{
|
{
|
||||||
"$ref": "#/$defs/JavascriptNpmPackageLockEntry"
|
"$ref": "#/$defs/JavascriptNpmPackageLockEntry"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"$ref": "#/$defs/JavascriptPnpmLockEntry"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"$ref": "#/$defs/JavascriptYarnLockEntry"
|
"$ref": "#/$defs/JavascriptYarnLockEntry"
|
||||||
},
|
},
|
||||||
@ -2958,6 +3120,19 @@
|
|||||||
],
|
],
|
||||||
"description": "PhpPeclEntry represents a single package entry found within php pecl metadata files."
|
"description": "PhpPeclEntry represents a single package entry found within php pecl metadata files."
|
||||||
},
|
},
|
||||||
|
"PnpmLockResolution": {
|
||||||
|
"properties": {
|
||||||
|
"integrity": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Integrity is Subresource Integrity hash for verification (SRI format)"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"integrity"
|
||||||
|
],
|
||||||
|
"description": "PnpmLockResolution contains package resolution metadata from pnpm lockfiles, including the integrity hash used for verification."
|
||||||
|
},
|
||||||
"PortageDbEntry": {
|
"PortageDbEntry": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"installedSize": {
|
"installedSize": {
|
||||||
@ -3443,22 +3618,28 @@
|
|||||||
"Relationship": {
|
"Relationship": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"parent": {
|
"parent": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Parent is the ID of the parent artifact in this relationship."
|
||||||
},
|
},
|
||||||
"child": {
|
"child": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Child is the ID of the child artifact in this relationship."
|
||||||
},
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Type is the relationship type (e.g., \"contains\", \"dependency-of\", \"ancestor-of\")."
|
||||||
},
|
},
|
||||||
"metadata": true
|
"metadata": {
|
||||||
|
"description": "Metadata contains additional relationship-specific metadata."
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
"parent",
|
"parent",
|
||||||
"child",
|
"child",
|
||||||
"type"
|
"type"
|
||||||
]
|
],
|
||||||
|
"description": "Relationship represents a directed relationship between two artifacts in the SBOM, such as package-contains-file or package-depends-on-package."
|
||||||
},
|
},
|
||||||
"RpmArchive": {
|
"RpmArchive": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -3805,17 +3986,20 @@
|
|||||||
"Schema": {
|
"Schema": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"version": {
|
"version": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Version is the JSON schema version for this document format."
|
||||||
},
|
},
|
||||||
"url": {
|
"url": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "URL is the URL to the JSON schema definition document."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
"version",
|
"version",
|
||||||
"url"
|
"url"
|
||||||
]
|
],
|
||||||
|
"description": "Schema specifies the JSON schema version and URL reference that defines the structure and validation rules for this document format."
|
||||||
},
|
},
|
||||||
"SnapEntry": {
|
"SnapEntry": {
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -3853,21 +4037,28 @@
|
|||||||
"Source": {
|
"Source": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"id": {
|
"id": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "ID is a unique identifier for the analyzed source artifact."
|
||||||
},
|
},
|
||||||
"name": {
|
"name": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Name is the name of the analyzed artifact (e.g., image name, directory path)."
|
||||||
},
|
},
|
||||||
"version": {
|
"version": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Version is the version of the analyzed artifact (e.g., image tag)."
|
||||||
},
|
},
|
||||||
"supplier": {
|
"supplier": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Supplier is supplier information, which can be user-provided for NTIA minimum elements compliance."
|
||||||
},
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"description": "Type is the source type (e.g., \"image\", \"directory\", \"file\")."
|
||||||
},
|
},
|
||||||
"metadata": true
|
"metadata": {
|
||||||
|
"description": "Metadata contains additional source-specific metadata."
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
@ -3877,7 +4068,7 @@
|
|||||||
"type",
|
"type",
|
||||||
"metadata"
|
"metadata"
|
||||||
],
|
],
|
||||||
"description": "Instead, the Supplier can be determined by the user of syft and passed as a config or flag to help fulfill the NTIA minimum elements."
|
"description": "Source represents the artifact that was analyzed to generate this SBOM, such as a container image, directory, or file archive."
|
||||||
},
|
},
|
||||||
"SwiftPackageManagerLockEntry": {
|
"SwiftPackageManagerLockEntry": {
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|||||||
@ -15,6 +15,7 @@ import (
|
|||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/sbom"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
|
"github.com/anchore/syft/syft/source/ocimodelsource"
|
||||||
)
|
)
|
||||||
|
|
||||||
// CreateSBOMConfig specifies all parameters needed for creating an SBOM.
|
// CreateSBOMConfig specifies all parameters needed for creating an SBOM.
|
||||||
@ -483,6 +484,9 @@ func findDefaultTags(src source.Description) ([]string, error) {
|
|||||||
return []string{pkgcataloging.DirectoryTag, filecataloging.FileTag}, nil
|
return []string{pkgcataloging.DirectoryTag, filecataloging.FileTag}, nil
|
||||||
case source.SnapMetadata:
|
case source.SnapMetadata:
|
||||||
return []string{pkgcataloging.InstalledTag, filecataloging.FileTag}, nil
|
return []string{pkgcataloging.InstalledTag, filecataloging.FileTag}, nil
|
||||||
|
case *ocimodelsource.OCIModelMetadata:
|
||||||
|
// OCI model artifacts should use image-like catalogers since they provide files to scan
|
||||||
|
return []string{pkgcataloging.ImageTag, filecataloging.FileTag}, nil
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("unable to determine default cataloger tag for source type=%T", m)
|
return nil, fmt.Errorf("unable to determine default cataloger tag for source type=%T", m)
|
||||||
}
|
}
|
||||||
|
|||||||
95
syft/format/cpes/decoder.go
Normal file
95
syft/format/cpes/decoder.go
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
package cpes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/internal/log"
|
||||||
|
"github.com/anchore/syft/syft/cpe"
|
||||||
|
"github.com/anchore/syft/syft/format/internal"
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
)
|
||||||
|
|
||||||
|
const ID sbom.FormatID = "cpes"
|
||||||
|
const version = "1"
|
||||||
|
|
||||||
|
var _ sbom.FormatDecoder = (*decoder)(nil)
|
||||||
|
|
||||||
|
type decoder struct{}
|
||||||
|
|
||||||
|
func NewFormatDecoder() sbom.FormatDecoder {
|
||||||
|
return decoder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d decoder) Decode(r io.Reader) (*sbom.SBOM, sbom.FormatID, string, error) {
|
||||||
|
if r == nil {
|
||||||
|
return nil, "", "", fmt.Errorf("no reader provided")
|
||||||
|
}
|
||||||
|
s, err := toSyftModel(r)
|
||||||
|
return s, ID, version, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d decoder) Identify(r io.Reader) (sbom.FormatID, string) {
|
||||||
|
if r == nil {
|
||||||
|
return "", ""
|
||||||
|
}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(r)
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := strings.TrimSpace(scanner.Text())
|
||||||
|
if line == "" {
|
||||||
|
// skip whitespace only lines
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
err := cpe.ValidateString(line)
|
||||||
|
if err != nil {
|
||||||
|
return "", ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return ID, version
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func toSyftModel(r io.Reader) (*sbom.SBOM, error) {
|
||||||
|
var errs []error
|
||||||
|
pkgs := pkg.NewCollection()
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(r)
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := strings.TrimSpace(scanner.Text())
|
||||||
|
|
||||||
|
if line == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip invalid CPEs
|
||||||
|
c, err := cpe.New(line, "")
|
||||||
|
if err != nil {
|
||||||
|
log.WithFields("error", err, "line", line).Debug("unable to parse cpe")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
p := pkg.Package{
|
||||||
|
Name: c.Attributes.Product,
|
||||||
|
Version: c.Attributes.Version,
|
||||||
|
CPEs: []cpe.CPE{c},
|
||||||
|
}
|
||||||
|
|
||||||
|
internal.Backfill(&p)
|
||||||
|
p.SetID()
|
||||||
|
pkgs.Add(p)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
Packages: pkgs,
|
||||||
|
},
|
||||||
|
}, errors.Join(errs...)
|
||||||
|
}
|
||||||
171
syft/format/cpes/decoder_test.go
Normal file
171
syft/format/cpes/decoder_test.go
Normal file
@ -0,0 +1,171 @@
|
|||||||
|
package cpes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/google/go-cmp/cmp"
|
||||||
|
"github.com/google/go-cmp/cmp/cmpopts"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/cpe"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_CPEProvider(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
userInput string
|
||||||
|
sbom *sbom.SBOM
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "takes a single cpe",
|
||||||
|
userInput: "cpe:/a:apache:log4j:2.14.1",
|
||||||
|
sbom: &sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
Packages: pkg.NewCollection(pkg.Package{
|
||||||
|
Name: "log4j",
|
||||||
|
Version: "2.14.1",
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:/a:apache:log4j:2.14.1", ""),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "takes multiple cpes",
|
||||||
|
userInput: `cpe:/a:apache:log4j:2.14.1
|
||||||
|
cpe:2.3:a:f5:nginx:*:*:*:*:*:*:*:*;
|
||||||
|
cpe:2.3:a:f5:nginx:0.5.2:*:*:*:*:*:*:*;
|
||||||
|
cpe:2.3:a:f5:nginx:0.5.3:*:*:*:*:*:*:*;`,
|
||||||
|
sbom: &sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
Packages: pkg.NewCollection(
|
||||||
|
pkg.Package{
|
||||||
|
Name: "log4j",
|
||||||
|
Version: "2.14.1",
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:/a:apache:log4j:2.14.1", ""),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
pkg.Package{
|
||||||
|
Name: "nginx",
|
||||||
|
Version: "",
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:2.3:a:f5:nginx:*:*:*:*:*:*:*:*;", ""),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
pkg.Package{
|
||||||
|
Name: "nginx",
|
||||||
|
Version: "0.5.2",
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:2.3:a:f5:nginx:0.5.2:*:*:*:*:*:*:*;", ""),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
pkg.Package{
|
||||||
|
Name: "nginx",
|
||||||
|
Version: "0.5.3",
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:2.3:a:f5:nginx:0.5.3:*:*:*:*:*:*:*;", ""),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "takes cpe with no version",
|
||||||
|
userInput: "cpe:/a:apache:log4j",
|
||||||
|
sbom: &sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
Packages: pkg.NewCollection(pkg.Package{
|
||||||
|
Name: "log4j",
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:/a:apache:log4j", ""),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "takes CPE 2.3 format",
|
||||||
|
userInput: "cpe:2.3:a:apache:log4j:2.14.1:*:*:*:*:*:*:*",
|
||||||
|
sbom: &sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
Packages: pkg.NewCollection(pkg.Package{
|
||||||
|
Name: "log4j",
|
||||||
|
Version: "2.14.1",
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:2.3:a:apache:log4j:2.14.1:*:*:*:*:*:*:*", ""),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "deduces target SW from CPE - known target_sw",
|
||||||
|
userInput: "cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*",
|
||||||
|
sbom: &sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
Packages: pkg.NewCollection(pkg.Package{
|
||||||
|
Name: "opensearch",
|
||||||
|
Type: pkg.GemPkg,
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*", ""),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "handles unknown target_sw CPE field",
|
||||||
|
userInput: "cpe:2.3:a:amazon:opensearch:*:*:*:*:*:loremipsum:*:*",
|
||||||
|
sbom: &sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
Packages: pkg.NewCollection(pkg.Package{
|
||||||
|
Name: "opensearch",
|
||||||
|
Type: "",
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:loremipsum:*:*", ""),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid prefix",
|
||||||
|
userInput: "dir:test-fixtures/cpe",
|
||||||
|
sbom: &sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
Packages: pkg.NewCollection(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
syftPkgOpts := []cmp.Option{
|
||||||
|
cmpopts.IgnoreFields(pkg.Package{}, "id", "Language"),
|
||||||
|
cmpopts.IgnoreUnexported(pkg.Package{}, file.LocationSet{}, pkg.LicenseSet{}),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range tests {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
dec := NewFormatDecoder()
|
||||||
|
|
||||||
|
decodedSBOM, _, _, err := dec.Decode(strings.NewReader(tc.userInput))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
gotSyftPkgs := decodedSBOM.Artifacts.Packages.Sorted()
|
||||||
|
wantSyftPkgs := tc.sbom.Artifacts.Packages.Sorted()
|
||||||
|
require.Equal(t, len(gotSyftPkgs), len(wantSyftPkgs))
|
||||||
|
for idx, wantPkg := range wantSyftPkgs {
|
||||||
|
if d := cmp.Diff(wantPkg, gotSyftPkgs[idx], syftPkgOpts...); d != "" {
|
||||||
|
t.Errorf("unexpected Syft Package (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -3,6 +3,7 @@ package format
|
|||||||
import (
|
import (
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/format/cpes"
|
||||||
"github.com/anchore/syft/syft/format/cyclonedxjson"
|
"github.com/anchore/syft/syft/format/cyclonedxjson"
|
||||||
"github.com/anchore/syft/syft/format/cyclonedxxml"
|
"github.com/anchore/syft/syft/format/cyclonedxxml"
|
||||||
"github.com/anchore/syft/syft/format/purls"
|
"github.com/anchore/syft/syft/format/purls"
|
||||||
@ -26,6 +27,7 @@ func Decoders() []sbom.FormatDecoder {
|
|||||||
spdxtagvalue.NewFormatDecoder(),
|
spdxtagvalue.NewFormatDecoder(),
|
||||||
spdxjson.NewFormatDecoder(),
|
spdxjson.NewFormatDecoder(),
|
||||||
purls.NewFormatDecoder(),
|
purls.NewFormatDecoder(),
|
||||||
|
cpes.NewFormatDecoder(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -87,6 +87,9 @@ func toGithubManifests(s *sbom.SBOM) Manifests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
name := dependencyName(p)
|
name := dependencyName(p)
|
||||||
|
if name == "" || p.PURL == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
manifest.Resolved[name] = DependencyNode{
|
manifest.Resolved[name] = DependencyNode{
|
||||||
PackageURL: p.PURL,
|
PackageURL: p.PURL,
|
||||||
Metadata: toDependencyMetadata(p),
|
Metadata: toDependencyMetadata(p),
|
||||||
|
|||||||
@ -16,11 +16,6 @@
|
|||||||
"source_location": "redacted/some/path/some/path/pkg1"
|
"source_location": "redacted/some/path/some/path/pkg1"
|
||||||
},
|
},
|
||||||
"resolved": {
|
"resolved": {
|
||||||
"": {
|
|
||||||
"package_url": "a-purl-2",
|
|
||||||
"relationship": "direct",
|
|
||||||
"scope": "runtime"
|
|
||||||
},
|
|
||||||
"pkg:deb/debian/package-2@2.0.1": {
|
"pkg:deb/debian/package-2@2.0.1": {
|
||||||
"package_url": "pkg:deb/debian/package-2@2.0.1",
|
"package_url": "pkg:deb/debian/package-2@2.0.1",
|
||||||
"relationship": "direct",
|
"relationship": "direct",
|
||||||
|
|||||||
@ -17,13 +17,6 @@
|
|||||||
},
|
},
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"syft:filesystem":"redacted"
|
"syft:filesystem":"redacted"
|
||||||
},
|
|
||||||
"resolved": {
|
|
||||||
"": {
|
|
||||||
"package_url": "a-purl-1",
|
|
||||||
"relationship": "direct",
|
|
||||||
"scope": "runtime"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"user-image-input:/somefile-2.txt": {
|
"user-image-input:/somefile-2.txt": {
|
||||||
|
|||||||
@ -10,13 +10,31 @@ import (
|
|||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/cpe"
|
"github.com/anchore/syft/syft/cpe"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
cataloger "github.com/anchore/syft/syft/pkg/cataloger/common/cpe"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Backfill takes all information present in the package and attempts to fill in any missing information
|
// Backfill takes all information present in the package and attempts to fill in any missing information
|
||||||
// from any available sources, such as the Metadata and PURL.
|
// from any available sources, such as the Metadata, PURL, or CPEs.
|
||||||
//
|
//
|
||||||
// Backfill does not call p.SetID(), but this needs to be called later to ensure it's up to date
|
// Backfill does not call p.SetID(), but this needs to be called later to ensure it's up to date
|
||||||
func Backfill(p *pkg.Package) {
|
func Backfill(p *pkg.Package) {
|
||||||
|
backfillFromPurl(p)
|
||||||
|
backfillFromCPE(p)
|
||||||
|
}
|
||||||
|
|
||||||
|
func backfillFromCPE(p *pkg.Package) {
|
||||||
|
if len(p.CPEs) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c := p.CPEs[0]
|
||||||
|
|
||||||
|
if p.Type == "" {
|
||||||
|
p.Type = cataloger.TargetSoftwareToPackageType(c.Attributes.TargetSW)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func backfillFromPurl(p *pkg.Package) {
|
||||||
if p.PURL == "" {
|
if p.PURL == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -29,6 +47,7 @@ func Backfill(p *pkg.Package) {
|
|||||||
|
|
||||||
var cpes []cpe.CPE
|
var cpes []cpe.CPE
|
||||||
epoch := ""
|
epoch := ""
|
||||||
|
rpmmod := ""
|
||||||
|
|
||||||
for _, qualifier := range purl.Qualifiers {
|
for _, qualifier := range purl.Qualifiers {
|
||||||
switch qualifier.Key {
|
switch qualifier.Key {
|
||||||
@ -44,6 +63,8 @@ func Backfill(p *pkg.Package) {
|
|||||||
}
|
}
|
||||||
case pkg.PURLQualifierEpoch:
|
case pkg.PURLQualifierEpoch:
|
||||||
epoch = qualifier.Value
|
epoch = qualifier.Value
|
||||||
|
case pkg.PURLQualifierRpmModularity:
|
||||||
|
rpmmod = qualifier.Value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -63,6 +84,10 @@ func Backfill(p *pkg.Package) {
|
|||||||
setJavaMetadataFromPurl(p, purl)
|
setJavaMetadataFromPurl(p, purl)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if p.Type == pkg.RpmPkg {
|
||||||
|
setRpmMetadataFromPurl(p, rpmmod)
|
||||||
|
}
|
||||||
|
|
||||||
for _, c := range cpes {
|
for _, c := range cpes {
|
||||||
if slices.Contains(p.CPEs, c) {
|
if slices.Contains(p.CPEs, c) {
|
||||||
continue
|
continue
|
||||||
@ -82,6 +107,35 @@ func setJavaMetadataFromPurl(p *pkg.Package, _ packageurl.PackageURL) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func setRpmMetadataFromPurl(p *pkg.Package, rpmmod string) {
|
||||||
|
if p.Type != pkg.RpmPkg {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if rpmmod == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.Metadata == nil {
|
||||||
|
p.Metadata = pkg.RpmDBEntry{
|
||||||
|
ModularityLabel: &rpmmod,
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
switch m := p.Metadata.(type) {
|
||||||
|
case pkg.RpmDBEntry:
|
||||||
|
if m.ModularityLabel == nil {
|
||||||
|
m.ModularityLabel = &rpmmod
|
||||||
|
p.Metadata = m
|
||||||
|
}
|
||||||
|
case pkg.RpmArchive:
|
||||||
|
if m.ModularityLabel == nil {
|
||||||
|
m.ModularityLabel = &rpmmod
|
||||||
|
p.Metadata = m
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func setVersionFromPurl(p *pkg.Package, purl packageurl.PackageURL, epoch string) {
|
func setVersionFromPurl(p *pkg.Package, purl packageurl.PackageURL, epoch string) {
|
||||||
if p.Version == "" {
|
if p.Version == "" {
|
||||||
p.Version = purl.Version
|
p.Version = purl.Version
|
||||||
|
|||||||
@ -53,6 +53,21 @@ func Test_Backfill(t *testing.T) {
|
|||||||
Version: "1:1.12.8-26.el8",
|
Version: "1:1.12.8-26.el8",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "rpm with rpmmod",
|
||||||
|
in: pkg.Package{
|
||||||
|
PURL: "pkg:rpm/redhat/httpd@2.4.37-51?arch=x86_64&distro=rhel-8.7&rpmmod=httpd:2.4",
|
||||||
|
},
|
||||||
|
expected: pkg.Package{
|
||||||
|
PURL: "pkg:rpm/redhat/httpd@2.4.37-51?arch=x86_64&distro=rhel-8.7&rpmmod=httpd:2.4",
|
||||||
|
Type: pkg.RpmPkg,
|
||||||
|
Name: "httpd",
|
||||||
|
Version: "2.4.37-51",
|
||||||
|
Metadata: pkg.RpmDBEntry{
|
||||||
|
ModularityLabel: strRef("httpd:2.4"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: "bad cpe",
|
name: "bad cpe",
|
||||||
in: pkg.Package{
|
in: pkg.Package{
|
||||||
@ -106,6 +121,20 @@ func Test_Backfill(t *testing.T) {
|
|||||||
Metadata: pkg.JavaArchive{},
|
Metadata: pkg.JavaArchive{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "target-sw from CPE",
|
||||||
|
in: pkg.Package{
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*", ""),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: pkg.Package{
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*", ""),
|
||||||
|
},
|
||||||
|
Type: pkg.GemPkg,
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
@ -171,3 +200,7 @@ func Test_nameFromPurl(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func strRef(s string) *string {
|
||||||
|
return &s
|
||||||
|
}
|
||||||
|
|||||||
@ -40,8 +40,11 @@ func EncodeComponent(p pkg.Package, supplier string, locationSorter func(a, b fi
|
|||||||
}
|
}
|
||||||
|
|
||||||
componentType := cyclonedx.ComponentTypeLibrary
|
componentType := cyclonedx.ComponentTypeLibrary
|
||||||
if p.Type == pkg.BinaryPkg {
|
switch p.Type {
|
||||||
|
case pkg.BinaryPkg:
|
||||||
componentType = cyclonedx.ComponentTypeApplication
|
componentType = cyclonedx.ComponentTypeApplication
|
||||||
|
case pkg.ModelPkg:
|
||||||
|
componentType = cyclonedx.ComponentTypeMachineLearningModel
|
||||||
}
|
}
|
||||||
|
|
||||||
return cyclonedx.Component{
|
return cyclonedx.Component{
|
||||||
|
|||||||
@ -62,7 +62,7 @@ func collectPackages(component *cyclonedx.Component, s *sbom.SBOM, idMap map[str
|
|||||||
switch component.Type {
|
switch component.Type {
|
||||||
case cyclonedx.ComponentTypeOS:
|
case cyclonedx.ComponentTypeOS:
|
||||||
case cyclonedx.ComponentTypeContainer:
|
case cyclonedx.ComponentTypeContainer:
|
||||||
case cyclonedx.ComponentTypeApplication, cyclonedx.ComponentTypeFramework, cyclonedx.ComponentTypeLibrary:
|
case cyclonedx.ComponentTypeApplication, cyclonedx.ComponentTypeFramework, cyclonedx.ComponentTypeLibrary, cyclonedx.ComponentTypeMachineLearningModel:
|
||||||
p := decodeComponent(component)
|
p := decodeComponent(component)
|
||||||
idMap[component.BOMRef] = p
|
idMap[component.BOMRef] = p
|
||||||
if component.BOMRef != "" {
|
if component.BOMRef != "" {
|
||||||
|
|||||||
@ -40,6 +40,7 @@ func Test_OriginatorSupplier(t *testing.T) {
|
|||||||
pkg.PhpComposerInstalledEntry{},
|
pkg.PhpComposerInstalledEntry{},
|
||||||
pkg.PhpPearEntry{},
|
pkg.PhpPearEntry{},
|
||||||
pkg.PhpPeclEntry{},
|
pkg.PhpPeclEntry{},
|
||||||
|
pkg.PnpmLockEntry{},
|
||||||
pkg.PortageEntry{},
|
pkg.PortageEntry{},
|
||||||
pkg.PythonPipfileLockEntry{},
|
pkg.PythonPipfileLockEntry{},
|
||||||
pkg.PythonPdmLockEntry{},
|
pkg.PythonPdmLockEntry{},
|
||||||
@ -54,6 +55,7 @@ func Test_OriginatorSupplier(t *testing.T) {
|
|||||||
pkg.OpamPackage{},
|
pkg.OpamPackage{},
|
||||||
pkg.YarnLockEntry{},
|
pkg.YarnLockEntry{},
|
||||||
pkg.TerraformLockProviderEntry{},
|
pkg.TerraformLockProviderEntry{},
|
||||||
|
pkg.GGUFFileHeader{},
|
||||||
)
|
)
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
|
|||||||
@ -82,6 +82,8 @@ func SourceInfo(p pkg.Package) string {
|
|||||||
answer = "acquired package info from Homebrew formula"
|
answer = "acquired package info from Homebrew formula"
|
||||||
case pkg.TerraformPkg:
|
case pkg.TerraformPkg:
|
||||||
answer = "acquired package info from Terraform dependency lock file"
|
answer = "acquired package info from Terraform dependency lock file"
|
||||||
|
case pkg.ModelPkg:
|
||||||
|
answer = "acquired package info from AI artifact (e.g. GGUF File)"
|
||||||
default:
|
default:
|
||||||
answer = "acquired package info from the following paths"
|
answer = "acquired package info from the following paths"
|
||||||
}
|
}
|
||||||
|
|||||||
@ -351,6 +351,14 @@ func Test_SourceInfo(t *testing.T) {
|
|||||||
"acquired package info from Terraform dependency lock file",
|
"acquired package info from Terraform dependency lock file",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
input: pkg.Package{
|
||||||
|
Type: pkg.ModelPkg,
|
||||||
|
},
|
||||||
|
expected: []string{
|
||||||
|
"",
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
var pkgTypes []pkg.Type
|
var pkgTypes []pkg.Type
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
|
|||||||
@ -35,14 +35,23 @@ func (d *Document) UnmarshalJSON(data []byte) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Descriptor describes what created the document as well as surrounding metadata
|
// Descriptor identifies the tool that generated this SBOM document, including its name, version, and configuration used during catalog generation.
|
||||||
type Descriptor struct {
|
type Descriptor struct {
|
||||||
Name string `json:"name"`
|
// Name is the name of the tool that generated this SBOM (e.g., "syft").
|
||||||
Version string `json:"version"`
|
Name string `json:"name"`
|
||||||
|
|
||||||
|
// Version is the version of the tool that generated this SBOM.
|
||||||
|
Version string `json:"version"`
|
||||||
|
|
||||||
|
// Configuration contains the tool configuration used during SBOM generation.
|
||||||
Configuration interface{} `json:"configuration,omitempty"`
|
Configuration interface{} `json:"configuration,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Schema specifies the JSON schema version and URL reference that defines the structure and validation rules for this document format.
|
||||||
type Schema struct {
|
type Schema struct {
|
||||||
|
// Version is the JSON schema version for this document format.
|
||||||
Version string `json:"version"`
|
Version string `json:"version"`
|
||||||
URL string `json:"url"`
|
|
||||||
|
// URL is the URL to the JSON schema definition document.
|
||||||
|
URL string `json:"url"`
|
||||||
}
|
}
|
||||||
|
|||||||
@ -10,25 +10,55 @@ import (
|
|||||||
"github.com/anchore/syft/syft/license"
|
"github.com/anchore/syft/syft/license"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// File represents a file discovered during cataloging with its metadata, content digests, licenses, and relationships to packages.
|
||||||
type File struct {
|
type File struct {
|
||||||
ID string `json:"id"`
|
// ID is a unique identifier for this file within the SBOM.
|
||||||
Location file.Coordinates `json:"location"`
|
ID string `json:"id"`
|
||||||
Metadata *FileMetadataEntry `json:"metadata,omitempty"`
|
|
||||||
Contents string `json:"contents,omitempty"`
|
// Location is the file path and layer information where this file was found.
|
||||||
Digests []file.Digest `json:"digests,omitempty"`
|
Location file.Coordinates `json:"location"`
|
||||||
Licenses []FileLicense `json:"licenses,omitempty"`
|
|
||||||
Executable *file.Executable `json:"executable,omitempty"`
|
// Metadata contains filesystem metadata such as permissions, ownership, and file type.
|
||||||
Unknowns []string `json:"unknowns,omitempty"`
|
Metadata *FileMetadataEntry `json:"metadata,omitempty"`
|
||||||
|
|
||||||
|
// Contents is the file contents for small files.
|
||||||
|
Contents string `json:"contents,omitempty"`
|
||||||
|
|
||||||
|
// Digests contains cryptographic hashes of the file contents.
|
||||||
|
Digests []file.Digest `json:"digests,omitempty"`
|
||||||
|
|
||||||
|
// Licenses contains license information discovered within this file.
|
||||||
|
Licenses []FileLicense `json:"licenses,omitempty"`
|
||||||
|
|
||||||
|
// Executable contains executable metadata if this file is a binary.
|
||||||
|
Executable *file.Executable `json:"executable,omitempty"`
|
||||||
|
|
||||||
|
// Unknowns contains unknown fields for forward compatibility.
|
||||||
|
Unknowns []string `json:"unknowns,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FileMetadataEntry contains filesystem-level metadata attributes such as permissions, ownership, type, and size for a cataloged file.
|
||||||
type FileMetadataEntry struct {
|
type FileMetadataEntry struct {
|
||||||
Mode int `json:"mode"`
|
// Mode is the Unix file permission mode in octal format.
|
||||||
Type string `json:"type"`
|
Mode int `json:"mode"`
|
||||||
|
|
||||||
|
// Type is the file type (e.g., "RegularFile", "Directory", "SymbolicLink").
|
||||||
|
Type string `json:"type"`
|
||||||
|
|
||||||
|
// LinkDestination is the target path for symbolic links.
|
||||||
LinkDestination string `json:"linkDestination,omitempty"`
|
LinkDestination string `json:"linkDestination,omitempty"`
|
||||||
UserID int `json:"userID"`
|
|
||||||
GroupID int `json:"groupID"`
|
// UserID is the file owner user ID.
|
||||||
MIMEType string `json:"mimeType"`
|
UserID int `json:"userID"`
|
||||||
Size int64 `json:"size"`
|
|
||||||
|
// GroupID is the file owner group ID.
|
||||||
|
GroupID int `json:"groupID"`
|
||||||
|
|
||||||
|
// MIMEType is the MIME type of the file contents.
|
||||||
|
MIMEType string `json:"mimeType"`
|
||||||
|
|
||||||
|
// Size is the file size in bytes.
|
||||||
|
Size int64 `json:"size"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type auxFileMetadataEntry FileMetadataEntry
|
type auxFileMetadataEntry FileMetadataEntry
|
||||||
@ -82,17 +112,31 @@ type sbomImportLegacyFileMetadataEntry struct {
|
|||||||
Size int64 `json:"Size"`
|
Size int64 `json:"Size"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FileLicense represents license information discovered within a file's contents or metadata, including the matched license text and SPDX expression.
|
||||||
type FileLicense struct {
|
type FileLicense struct {
|
||||||
Value string `json:"value"`
|
// Value is the raw license identifier or text as found in the file.
|
||||||
SPDXExpression string `json:"spdxExpression"`
|
Value string `json:"value"`
|
||||||
Type license.Type `json:"type"`
|
|
||||||
Evidence *FileLicenseEvidence `json:"evidence,omitempty"`
|
// SPDXExpression is the parsed SPDX license expression.
|
||||||
|
SPDXExpression string `json:"spdxExpression"`
|
||||||
|
|
||||||
|
// Type is the license type classification (e.g., declared, concluded, discovered).
|
||||||
|
Type license.Type `json:"type"`
|
||||||
|
|
||||||
|
// Evidence contains supporting evidence for this license detection.
|
||||||
|
Evidence *FileLicenseEvidence `json:"evidence,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FileLicenseEvidence contains supporting evidence for a license detection in a file, including the byte offset, extent, and confidence level.
|
||||||
type FileLicenseEvidence struct {
|
type FileLicenseEvidence struct {
|
||||||
|
// Confidence is the confidence score for this license detection (0-100).
|
||||||
Confidence int `json:"confidence"`
|
Confidence int `json:"confidence"`
|
||||||
Offset int `json:"offset"`
|
|
||||||
Extent int `json:"extent"`
|
// Offset is the byte offset where the license text starts in the file.
|
||||||
|
Offset int `json:"offset"`
|
||||||
|
|
||||||
|
// Extent is the length of the license text in bytes.
|
||||||
|
Extent int `json:"extent"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type intOrStringFileType struct {
|
type intOrStringFileType struct {
|
||||||
|
|||||||
@ -4,28 +4,67 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// IDLikes represents a list of distribution IDs that this Linux distribution is similar to or derived from, as defined in os-release ID_LIKE field.
|
||||||
type IDLikes []string
|
type IDLikes []string
|
||||||
|
|
||||||
|
// LinuxRelease contains Linux distribution identification and version information extracted from /etc/os-release or similar system files.
|
||||||
type LinuxRelease struct {
|
type LinuxRelease struct {
|
||||||
PrettyName string `json:"prettyName,omitempty"`
|
// PrettyName is a human-readable operating system name with version.
|
||||||
Name string `json:"name,omitempty"`
|
PrettyName string `json:"prettyName,omitempty"`
|
||||||
ID string `json:"id,omitempty"`
|
|
||||||
IDLike IDLikes `json:"idLike,omitempty"`
|
// Name is the operating system name without version information.
|
||||||
Version string `json:"version,omitempty"`
|
Name string `json:"name,omitempty"`
|
||||||
VersionID string `json:"versionID,omitempty"`
|
|
||||||
VersionCodename string `json:"versionCodename,omitempty"`
|
// ID is the lower-case operating system identifier (e.g., "ubuntu", "rhel").
|
||||||
BuildID string `json:"buildID,omitempty"`
|
ID string `json:"id,omitempty"`
|
||||||
ImageID string `json:"imageID,omitempty"`
|
|
||||||
ImageVersion string `json:"imageVersion,omitempty"`
|
// IDLike is a list of operating system IDs this distribution is similar to or derived from.
|
||||||
Variant string `json:"variant,omitempty"`
|
IDLike IDLikes `json:"idLike,omitempty"`
|
||||||
VariantID string `json:"variantID,omitempty"`
|
|
||||||
HomeURL string `json:"homeURL,omitempty"`
|
// Version is the operating system version including codename if available.
|
||||||
SupportURL string `json:"supportURL,omitempty"`
|
Version string `json:"version,omitempty"`
|
||||||
BugReportURL string `json:"bugReportURL,omitempty"`
|
|
||||||
PrivacyPolicyURL string `json:"privacyPolicyURL,omitempty"`
|
// VersionID is the operating system version number or identifier.
|
||||||
CPEName string `json:"cpeName,omitempty"`
|
VersionID string `json:"versionID,omitempty"`
|
||||||
SupportEnd string `json:"supportEnd,omitempty"`
|
|
||||||
ExtendedSupport bool `json:"extendedSupport,omitempty"`
|
// VersionCodename is the operating system release codename (e.g., "jammy", "bullseye").
|
||||||
|
VersionCodename string `json:"versionCodename,omitempty"`
|
||||||
|
|
||||||
|
// BuildID is a build identifier for the operating system.
|
||||||
|
BuildID string `json:"buildID,omitempty"`
|
||||||
|
|
||||||
|
// ImageID is an identifier for container or cloud images.
|
||||||
|
ImageID string `json:"imageID,omitempty"`
|
||||||
|
|
||||||
|
// ImageVersion is the version for container or cloud images.
|
||||||
|
ImageVersion string `json:"imageVersion,omitempty"`
|
||||||
|
|
||||||
|
// Variant is the operating system variant name (e.g., "Server", "Workstation").
|
||||||
|
Variant string `json:"variant,omitempty"`
|
||||||
|
|
||||||
|
// VariantID is the lower-case operating system variant identifier.
|
||||||
|
VariantID string `json:"variantID,omitempty"`
|
||||||
|
|
||||||
|
// HomeURL is the homepage URL for the operating system.
|
||||||
|
HomeURL string `json:"homeURL,omitempty"`
|
||||||
|
|
||||||
|
// SupportURL is the support or help URL for the operating system.
|
||||||
|
SupportURL string `json:"supportURL,omitempty"`
|
||||||
|
|
||||||
|
// BugReportURL is the bug reporting URL for the operating system.
|
||||||
|
BugReportURL string `json:"bugReportURL,omitempty"`
|
||||||
|
|
||||||
|
// PrivacyPolicyURL is the privacy policy URL for the operating system.
|
||||||
|
PrivacyPolicyURL string `json:"privacyPolicyURL,omitempty"`
|
||||||
|
|
||||||
|
// CPEName is the Common Platform Enumeration name for the operating system.
|
||||||
|
CPEName string `json:"cpeName,omitempty"`
|
||||||
|
|
||||||
|
// SupportEnd is the end of support date or version identifier.
|
||||||
|
SupportEnd string `json:"supportEnd,omitempty"`
|
||||||
|
|
||||||
|
// ExtendedSupport indicates whether extended security or support is available.
|
||||||
|
ExtendedSupport bool `json:"extendedSupport,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *IDLikes) UnmarshalJSON(data []byte) error {
|
func (s *IDLikes) UnmarshalJSON(data []byte) error {
|
||||||
|
|||||||
@ -36,22 +36,40 @@ type PackageBasicData struct {
|
|||||||
PURL string `json:"purl"`
|
PURL string `json:"purl"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// cpes is a collection of Common Platform Enumeration identifiers for a package.
|
||||||
type cpes []CPE
|
type cpes []CPE
|
||||||
|
|
||||||
|
// CPE represents a Common Platform Enumeration identifier used for matching packages to known vulnerabilities in security databases.
|
||||||
type CPE struct {
|
type CPE struct {
|
||||||
Value string `json:"cpe"`
|
// Value is the CPE string identifier.
|
||||||
|
Value string `json:"cpe"`
|
||||||
|
|
||||||
|
// Source is the source where this CPE was obtained or generated from.
|
||||||
Source string `json:"source,omitempty"`
|
Source string `json:"source,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// licenses is a collection of license findings associated with a package.
|
||||||
type licenses []License
|
type licenses []License
|
||||||
|
|
||||||
|
// License represents software license information discovered for a package, including SPDX expressions and supporting evidence locations.
|
||||||
type License struct {
|
type License struct {
|
||||||
Value string `json:"value"`
|
// Value is the raw license identifier or expression as found.
|
||||||
SPDXExpression string `json:"spdxExpression"`
|
Value string `json:"value"`
|
||||||
Type license.Type `json:"type"`
|
|
||||||
URLs []string `json:"urls"`
|
// SPDXExpression is the parsed SPDX license expression.
|
||||||
Locations []file.Location `json:"locations"`
|
SPDXExpression string `json:"spdxExpression"`
|
||||||
Contents string `json:"contents,omitempty"`
|
|
||||||
|
// Type is the license type classification (e.g., declared, concluded, discovered).
|
||||||
|
Type license.Type `json:"type"`
|
||||||
|
|
||||||
|
// URLs are URLs where license text or information can be found.
|
||||||
|
URLs []string `json:"urls"`
|
||||||
|
|
||||||
|
// Locations are file locations where this license was discovered.
|
||||||
|
Locations []file.Location `json:"locations"`
|
||||||
|
|
||||||
|
// Contents is the full license text content.
|
||||||
|
Contents string `json:"contents,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func newModelLicensesFromValues(licenses []string) (ml []License) {
|
func newModelLicensesFromValues(licenses []string) (ml []License) {
|
||||||
|
|||||||
@ -1,8 +1,16 @@
|
|||||||
package model
|
package model
|
||||||
|
|
||||||
|
// Relationship represents a directed relationship between two artifacts in the SBOM, such as package-contains-file or package-depends-on-package.
|
||||||
type Relationship struct {
|
type Relationship struct {
|
||||||
Parent string `json:"parent"`
|
// Parent is the ID of the parent artifact in this relationship.
|
||||||
Child string `json:"child"`
|
Parent string `json:"parent"`
|
||||||
Type string `json:"type"`
|
|
||||||
|
// Child is the ID of the child artifact in this relationship.
|
||||||
|
Child string `json:"child"`
|
||||||
|
|
||||||
|
// Type is the relationship type (e.g., "contains", "dependency-of", "ancestor-of").
|
||||||
|
Type string `json:"type"`
|
||||||
|
|
||||||
|
// Metadata contains additional relationship-specific metadata.
|
||||||
Metadata interface{} `json:"metadata,omitempty"`
|
Metadata interface{} `json:"metadata,omitempty"`
|
||||||
}
|
}
|
||||||
|
|||||||
@ -11,18 +11,25 @@ import (
|
|||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Source object represents the thing that was cataloged
|
// Source represents the artifact that was analyzed to generate this SBOM, such as a container image, directory, or file archive.
|
||||||
// Note: syft currently makes no claims or runs any logic to determine the Supplier field below
|
// The Supplier field can be provided by users to fulfill NTIA minimum elements requirements.
|
||||||
|
|
||||||
// Instead, the Supplier can be determined by the user of syft and passed as a config or flag to help fulfill
|
|
||||||
// the NTIA minimum elements. For mor information see the NTIA framing document below
|
|
||||||
// https://www.ntia.gov/files/ntia/publications/framingsbom_20191112.pdf
|
|
||||||
type Source struct {
|
type Source struct {
|
||||||
ID string `json:"id"`
|
// ID is a unique identifier for the analyzed source artifact.
|
||||||
Name string `json:"name"`
|
ID string `json:"id"`
|
||||||
Version string `json:"version"`
|
|
||||||
Supplier string `json:"supplier,omitempty"`
|
// Name is the name of the analyzed artifact (e.g., image name, directory path).
|
||||||
Type string `json:"type"`
|
Name string `json:"name"`
|
||||||
|
|
||||||
|
// Version is the version of the analyzed artifact (e.g., image tag).
|
||||||
|
Version string `json:"version"`
|
||||||
|
|
||||||
|
// Supplier is supplier information, which can be user-provided for NTIA minimum elements compliance.
|
||||||
|
Supplier string `json:"supplier,omitempty"`
|
||||||
|
|
||||||
|
// Type is the source type (e.g., "image", "directory", "file").
|
||||||
|
Type string `json:"type"`
|
||||||
|
|
||||||
|
// Metadata contains additional source-specific metadata.
|
||||||
Metadata interface{} `json:"metadata"`
|
Metadata interface{} `json:"metadata"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -19,16 +19,16 @@ type Directory struct {
|
|||||||
indexer *directoryIndexer
|
indexer *directoryIndexer
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewFromDirectory(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
|
func NewFromDirectory(root, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
|
||||||
r, err := newFromDirectoryWithoutIndex(root, base, pathFilters...)
|
resolver, err := newFromDirectoryWithoutIndex(root, base, pathFilters...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return r, r.buildIndex()
|
return resolver, resolver.buildIndex()
|
||||||
}
|
}
|
||||||
|
|
||||||
func newFromDirectoryWithoutIndex(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
|
func newFromDirectoryWithoutIndex(root, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
|
||||||
chroot, err := NewChrootContextFromCWD(root, base)
|
chroot, err := NewChrootContextFromCWD(root, base)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to interpret chroot context: %w", err)
|
return nil, fmt.Errorf("unable to interpret chroot context: %w", err)
|
||||||
@ -66,6 +66,6 @@ func (r *Directory) buildIndex() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Stringer to represent a directory path data source
|
// Stringer to represent a directory path data source
|
||||||
func (r Directory) String() string {
|
func (r *Directory) String() string {
|
||||||
return fmt.Sprintf("dir:%s", r.path)
|
return fmt.Sprintf("dir:%s", r.path)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,17 +17,31 @@ type File struct {
|
|||||||
indexer *fileIndexer
|
indexer *fileIndexer
|
||||||
}
|
}
|
||||||
|
|
||||||
// parent should be the symlink free absolute path to the parent directory
|
// NewFromFile single file analyser
|
||||||
// path is the filepath of the file we're creating content access for
|
// path is the filepath of the file we're creating content access for
|
||||||
func NewFromFile(parent, path string, pathFilters ...PathIndexVisitor) (*File, error) {
|
func NewFromFile(path string, pathFilters ...PathIndexVisitor) (*File, error) {
|
||||||
chroot, err := NewChrootContextFromCWD(parent, parent)
|
resolver, err := newFromFileWithoutIndex(path, pathFilters...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolver, resolver.buildIndex()
|
||||||
|
}
|
||||||
|
|
||||||
|
func newFromFileWithoutIndex(path string, pathFilters ...PathIndexVisitor) (*File, error) {
|
||||||
|
absParentDir, err := absoluteSymlinkFreePathToParent(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
chroot, err := NewChrootContextFromCWD(absParentDir, absParentDir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to interpret chroot context: %w", err)
|
return nil, fmt.Errorf("unable to interpret chroot context: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
cleanBase := chroot.Base()
|
cleanBase := chroot.Base()
|
||||||
|
|
||||||
file := &File{
|
return &File{
|
||||||
path: path,
|
path: path,
|
||||||
FiletreeResolver: FiletreeResolver{
|
FiletreeResolver: FiletreeResolver{
|
||||||
Chroot: *chroot,
|
Chroot: *chroot,
|
||||||
@ -36,9 +50,7 @@ func NewFromFile(parent, path string, pathFilters ...PathIndexVisitor) (*File, e
|
|||||||
Opener: nativeOSFileOpener,
|
Opener: nativeOSFileOpener,
|
||||||
},
|
},
|
||||||
indexer: newFileIndexer(path, cleanBase, pathFilters...),
|
indexer: newFileIndexer(path, cleanBase, pathFilters...),
|
||||||
}
|
}, nil
|
||||||
|
|
||||||
return file, file.buildIndex()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *File) buildIndex() error {
|
func (r *File) buildIndex() error {
|
||||||
@ -58,6 +70,6 @@ func (r *File) buildIndex() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Stringer to represent a file path data source
|
// Stringer to represent a file path data source
|
||||||
func (r File) String() string {
|
func (r *File) String() string {
|
||||||
return fmt.Sprintf("file:%s", r.path)
|
return fmt.Sprintf("file:%s", r.path)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1384,9 +1384,10 @@ func TestFileResolver_FilesByPath(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NotNil(t, parentPath)
|
require.NotNil(t, parentPath)
|
||||||
|
|
||||||
resolver, err := NewFromFile(parentPath, tt.filePath)
|
resolver, err := NewFromFile(tt.filePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NotNil(t, resolver)
|
require.NotNil(t, resolver)
|
||||||
|
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||||
|
|
||||||
refs, err := resolver.FilesByPath(tt.fileByPathInput)
|
refs, err := resolver.FilesByPath(tt.fileByPathInput)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@ -1431,8 +1432,11 @@ func TestFileResolver_MultipleFilesByPath(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NotNil(t, parentPath)
|
require.NotNil(t, parentPath)
|
||||||
|
|
||||||
resolver, err := NewFromFile(parentPath, filePath)
|
resolver, err := NewFromFile(filePath)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
require.NotNil(t, resolver)
|
||||||
|
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||||
|
|
||||||
refs, err := resolver.FilesByPath(tt.input...)
|
refs, err := resolver.FilesByPath(tt.input...)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
@ -1449,8 +1453,11 @@ func TestFileResolver_FilesByGlob(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NotNil(t, parentPath)
|
require.NotNil(t, parentPath)
|
||||||
|
|
||||||
resolver, err := NewFromFile(parentPath, filePath)
|
resolver, err := NewFromFile(filePath)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
require.NotNil(t, resolver)
|
||||||
|
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||||
|
|
||||||
refs, err := resolver.FilesByGlob("**/*.txt")
|
refs, err := resolver.FilesByGlob("**/*.txt")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
@ -1476,8 +1483,11 @@ func Test_fileResolver_FilesByMIMEType(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NotNil(t, parentPath)
|
require.NotNil(t, parentPath)
|
||||||
|
|
||||||
resolver, err := NewFromFile(parentPath, filePath)
|
resolver, err := NewFromFile(filePath)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
require.NotNil(t, resolver)
|
||||||
|
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||||
|
|
||||||
locations, err := resolver.FilesByMIMEType(test.mimeType)
|
locations, err := resolver.FilesByMIMEType(test.mimeType)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, test.expectedPaths.Size(), len(locations))
|
assert.Equal(t, test.expectedPaths.Size(), len(locations))
|
||||||
@ -1497,10 +1507,12 @@ func Test_fileResolver_FileContentsByLocation(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NotNil(t, parentPath)
|
require.NotNil(t, parentPath)
|
||||||
|
|
||||||
r, err := NewFromFile(parentPath, filePath)
|
resolver, err := NewFromFile(filePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, resolver)
|
||||||
|
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||||
|
|
||||||
exists, existingPath, err := r.Tree.File(stereoscopeFile.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt")))
|
exists, existingPath, err := resolver.Tree.File(stereoscopeFile.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt")))
|
||||||
require.True(t, exists)
|
require.True(t, exists)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.True(t, existingPath.HasReference())
|
require.True(t, existingPath.HasReference())
|
||||||
@ -1525,7 +1537,7 @@ func Test_fileResolver_FileContentsByLocation(t *testing.T) {
|
|||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
t.Run(test.name, func(t *testing.T) {
|
t.Run(test.name, func(t *testing.T) {
|
||||||
|
|
||||||
actual, err := r.FileContentsByLocation(test.location)
|
actual, err := resolver.FileContentsByLocation(test.location)
|
||||||
if test.err {
|
if test.err {
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
return
|
return
|
||||||
@ -1546,8 +1558,11 @@ func TestFileResolver_AllLocations_errorOnDirRequest(t *testing.T) {
|
|||||||
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
|
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NotNil(t, parentPath)
|
require.NotNil(t, parentPath)
|
||||||
resolver, err := NewFromFile(parentPath, filePath)
|
|
||||||
|
resolver, err := NewFromFile(filePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, resolver)
|
||||||
|
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||||
|
|
||||||
var dirLoc *file.Location
|
var dirLoc *file.Location
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
@ -1575,8 +1590,11 @@ func TestFileResolver_AllLocations(t *testing.T) {
|
|||||||
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
|
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NotNil(t, parentPath)
|
require.NotNil(t, parentPath)
|
||||||
resolver, err := NewFromFile(parentPath, filePath)
|
|
||||||
|
resolver, err := NewFromFile(filePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, resolver)
|
||||||
|
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||||
|
|
||||||
paths := strset.New()
|
paths := strset.New()
|
||||||
for loc := range resolver.AllLocations(context.Background()) {
|
for loc := range resolver.AllLocations(context.Background()) {
|
||||||
@ -1600,8 +1618,11 @@ func Test_FileResolver_AllLocationsDoesNotLeakGoRoutine(t *testing.T) {
|
|||||||
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
|
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NotNil(t, parentPath)
|
require.NotNil(t, parentPath)
|
||||||
resolver, err := NewFromFile(parentPath, filePath)
|
|
||||||
|
resolver, err := NewFromFile(filePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, resolver)
|
||||||
|
assert.Equal(t, resolver.Chroot.Base(), parentPath)
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
|||||||
@ -4,9 +4,10 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/stereoscope/pkg/file"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/anchore/stereoscope/pkg/file"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFileMetadataFromPath(t *testing.T) {
|
func TestFileMetadataFromPath(t *testing.T) {
|
||||||
|
|||||||
@ -58,6 +58,7 @@ type AlpmDBEntry struct {
|
|||||||
Depends []string `mapstructure:"depends" json:"depends,omitempty"`
|
Depends []string `mapstructure:"depends" json:"depends,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AlpmFileRecord represents a single file entry within an Arch Linux package with its associated metadata tracked by pacman.
|
||||||
type AlpmFileRecord struct {
|
type AlpmFileRecord struct {
|
||||||
// Path is the file path relative to the filesystem root
|
// Path is the file path relative to the filesystem root
|
||||||
Path string `mapstruture:"path" json:"path,omitempty"`
|
Path string `mapstruture:"path" json:"path,omitempty"`
|
||||||
|
|||||||
2
syft/pkg/cataloger/.gitignore
vendored
Normal file
2
syft/pkg/cataloger/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# these are generated by pkgtest helpers, no need to check them in
|
||||||
|
**/test-fixtures/test-observations.json
|
||||||
16
syft/pkg/cataloger/ai/cataloger.go
Normal file
16
syft/pkg/cataloger/ai/cataloger.go
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
/*
|
||||||
|
Package ai provides concrete Cataloger implementations for AI artifacts and machine learning models,
|
||||||
|
including support for GGUF (GPT-Generated Unified Format) model files.
|
||||||
|
*/
|
||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewGGUFCataloger returns a new cataloger instance for GGUF model files.
|
||||||
|
func NewGGUFCataloger() pkg.Cataloger {
|
||||||
|
return generic.NewCataloger("gguf-cataloger").
|
||||||
|
WithParserByGlobs(parseGGUFModel, "**/*.gguf")
|
||||||
|
}
|
||||||
107
syft/pkg/cataloger/ai/cataloger_test.go
Normal file
107
syft/pkg/cataloger/ai/cataloger_test.go
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/google/go-cmp/cmp/cmpopts"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/artifact"
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGGUFCataloger_Globs(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
fixture string
|
||||||
|
expected []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "obtain gguf files",
|
||||||
|
fixture: "test-fixtures/glob-paths",
|
||||||
|
expected: []string{
|
||||||
|
"models/model.gguf",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
t.Run(test.name, func(t *testing.T) {
|
||||||
|
pkgtest.NewCatalogTester().
|
||||||
|
FromDirectory(t, test.fixture).
|
||||||
|
ExpectsResolverContentQueries(test.expected).
|
||||||
|
TestCataloger(t, NewGGUFCataloger())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGGUFCataloger_Integration(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
setup func(t *testing.T) string
|
||||||
|
expectedPackages []pkg.Package
|
||||||
|
expectedRelationships []artifact.Relationship
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "catalog single GGUF file",
|
||||||
|
setup: func(t *testing.T) string {
|
||||||
|
dir := t.TempDir()
|
||||||
|
data := newTestGGUFBuilder().
|
||||||
|
withVersion(3).
|
||||||
|
withStringKV("general.architecture", "llama").
|
||||||
|
withStringKV("general.name", "llama3-8b").
|
||||||
|
withStringKV("general.version", "3.0").
|
||||||
|
withStringKV("general.license", "Apache-2.0").
|
||||||
|
withStringKV("general.quantization", "Q4_K_M").
|
||||||
|
withUint64KV("general.parameter_count", 8030000000).
|
||||||
|
build()
|
||||||
|
|
||||||
|
path := filepath.Join(dir, "llama3-8b.gguf")
|
||||||
|
os.WriteFile(path, data, 0644)
|
||||||
|
return dir
|
||||||
|
},
|
||||||
|
expectedPackages: []pkg.Package{
|
||||||
|
{
|
||||||
|
Name: "llama3-8b",
|
||||||
|
Version: "3.0",
|
||||||
|
Type: pkg.ModelPkg,
|
||||||
|
Licenses: pkg.NewLicenseSet(
|
||||||
|
pkg.NewLicenseFromFields("Apache-2.0", "", nil),
|
||||||
|
),
|
||||||
|
Metadata: pkg.GGUFFileHeader{
|
||||||
|
ModelName: "llama3-8b",
|
||||||
|
License: "Apache-2.0",
|
||||||
|
Architecture: "llama",
|
||||||
|
Quantization: "Unknown",
|
||||||
|
Parameters: 0,
|
||||||
|
GGUFVersion: 3,
|
||||||
|
TensorCount: 0,
|
||||||
|
Header: map[string]interface{}{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectedRelationships: nil,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
fixtureDir := tt.setup(t)
|
||||||
|
|
||||||
|
// Use pkgtest to catalog and compare
|
||||||
|
tester := pkgtest.NewCatalogTester().
|
||||||
|
FromDirectory(t, fixtureDir).
|
||||||
|
Expects(tt.expectedPackages, tt.expectedRelationships).
|
||||||
|
IgnoreLocationLayer().
|
||||||
|
IgnorePackageFields("FoundBy", "Locations"). // These are set by the cataloger
|
||||||
|
WithCompareOptions(
|
||||||
|
// Ignore MetadataHash as it's computed dynamically
|
||||||
|
cmpopts.IgnoreFields(pkg.GGUFFileHeader{}, "MetadataHash"),
|
||||||
|
)
|
||||||
|
|
||||||
|
tester.TestCataloger(t, NewGGUFCataloger())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
28
syft/pkg/cataloger/ai/package.go
Normal file
28
syft/pkg/cataloger/ai/package.go
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newGGUFPackage(metadata *pkg.GGUFFileHeader, version string, locations ...file.Location) pkg.Package {
|
||||||
|
p := pkg.Package{
|
||||||
|
Name: metadata.ModelName,
|
||||||
|
Version: version,
|
||||||
|
Locations: file.NewLocationSet(locations...),
|
||||||
|
Type: pkg.ModelPkg,
|
||||||
|
Licenses: pkg.NewLicenseSet(),
|
||||||
|
Metadata: *metadata,
|
||||||
|
// NOTE: PURL is intentionally not set as the package-url spec
|
||||||
|
// has not yet finalized support for ML model packages
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add license to the package if present in metadata
|
||||||
|
if metadata.License != "" {
|
||||||
|
p.Licenses.Add(pkg.NewLicenseFromFields(metadata.License, "", nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
p.SetID()
|
||||||
|
|
||||||
|
return p
|
||||||
|
}
|
||||||
123
syft/pkg/cataloger/ai/package_test.go
Normal file
123
syft/pkg/cataloger/ai/package_test.go
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/google/go-cmp/cmp"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewGGUFPackage(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
metadata *pkg.GGUFFileHeader
|
||||||
|
version string
|
||||||
|
locations []file.Location
|
||||||
|
checkFunc func(t *testing.T, p pkg.Package)
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "complete GGUF package with all fields",
|
||||||
|
version: "3.0",
|
||||||
|
metadata: &pkg.GGUFFileHeader{
|
||||||
|
ModelName: "llama3-8b-instruct",
|
||||||
|
License: "Apache-2.0",
|
||||||
|
Architecture: "llama",
|
||||||
|
Quantization: "Q4_K_M",
|
||||||
|
Parameters: 8030000000,
|
||||||
|
GGUFVersion: 3,
|
||||||
|
TensorCount: 291,
|
||||||
|
Header: map[string]any{},
|
||||||
|
},
|
||||||
|
locations: []file.Location{file.NewLocation("/models/llama3-8b.gguf")},
|
||||||
|
checkFunc: func(t *testing.T, p pkg.Package) {
|
||||||
|
if d := cmp.Diff("llama3-8b-instruct", p.Name); d != "" {
|
||||||
|
t.Errorf("Name mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
if d := cmp.Diff("3.0", p.Version); d != "" {
|
||||||
|
t.Errorf("Version mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
if d := cmp.Diff(pkg.ModelPkg, p.Type); d != "" {
|
||||||
|
t.Errorf("Type mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
assert.Empty(t, p.PURL, "PURL should not be set for model packages")
|
||||||
|
assert.Len(t, p.Licenses.ToSlice(), 1)
|
||||||
|
if d := cmp.Diff("Apache-2.0", p.Licenses.ToSlice()[0].Value); d != "" {
|
||||||
|
t.Errorf("License value mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
assert.NotEmpty(t, p.ID())
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "minimal GGUF package",
|
||||||
|
version: "1.0",
|
||||||
|
metadata: &pkg.GGUFFileHeader{
|
||||||
|
ModelName: "simple-model",
|
||||||
|
Architecture: "gpt2",
|
||||||
|
GGUFVersion: 3,
|
||||||
|
TensorCount: 50,
|
||||||
|
},
|
||||||
|
locations: []file.Location{file.NewLocation("/models/simple.gguf")},
|
||||||
|
checkFunc: func(t *testing.T, p pkg.Package) {
|
||||||
|
if d := cmp.Diff("simple-model", p.Name); d != "" {
|
||||||
|
t.Errorf("Name mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
if d := cmp.Diff("1.0", p.Version); d != "" {
|
||||||
|
t.Errorf("Version mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
if d := cmp.Diff(pkg.ModelPkg, p.Type); d != "" {
|
||||||
|
t.Errorf("Type mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
assert.Empty(t, p.PURL, "PURL should not be set for model packages")
|
||||||
|
assert.Empty(t, p.Licenses.ToSlice())
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "GGUF package with multiple locations",
|
||||||
|
version: "1.5",
|
||||||
|
metadata: &pkg.GGUFFileHeader{
|
||||||
|
ModelName: "multi-location-model",
|
||||||
|
Architecture: "llama",
|
||||||
|
GGUFVersion: 3,
|
||||||
|
TensorCount: 150,
|
||||||
|
},
|
||||||
|
locations: []file.Location{
|
||||||
|
file.NewLocation("/models/model1.gguf"),
|
||||||
|
file.NewLocation("/models/model2.gguf"),
|
||||||
|
},
|
||||||
|
checkFunc: func(t *testing.T, p pkg.Package) {
|
||||||
|
assert.Len(t, p.Locations.ToSlice(), 2)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
p := newGGUFPackage(tt.metadata, tt.version, tt.locations...)
|
||||||
|
|
||||||
|
if d := cmp.Diff(tt.metadata.ModelName, p.Name); d != "" {
|
||||||
|
t.Errorf("Name mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
if d := cmp.Diff(tt.version, p.Version); d != "" {
|
||||||
|
t.Errorf("Version mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
if d := cmp.Diff(pkg.ModelPkg, p.Type); d != "" {
|
||||||
|
t.Errorf("Type mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify metadata is attached
|
||||||
|
metadata, ok := p.Metadata.(pkg.GGUFFileHeader)
|
||||||
|
require.True(t, ok, "metadata should be GGUFFileHeader")
|
||||||
|
if d := cmp.Diff(*tt.metadata, metadata); d != "" {
|
||||||
|
t.Errorf("Metadata mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
|
||||||
|
if tt.checkFunc != nil {
|
||||||
|
tt.checkFunc(t, p)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
74
syft/pkg/cataloger/ai/parse_gguf.go
Normal file
74
syft/pkg/cataloger/ai/parse_gguf.go
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/binary"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
|
||||||
|
gguf_parser "github.com/gpustack/gguf-parser-go"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GGUF file format constants
|
||||||
|
const (
|
||||||
|
ggufMagicNumber = 0x46554747 // "GGUF" in little-endian
|
||||||
|
maxHeaderSize = 50 * 1024 * 1024 // 50MB for large tokenizer vocabularies
|
||||||
|
)
|
||||||
|
|
||||||
|
// readHeader reads only the GGUF header (metadata) without reading tensor data
|
||||||
|
// This is much more efficient than reading the entire file
|
||||||
|
// The reader should be wrapped with io.LimitedReader to prevent OOM issues
|
||||||
|
func readHeader(r io.Reader) ([]byte, error) {
|
||||||
|
// Read initial chunk to determine header size
|
||||||
|
// GGUF format: magic(4) + version(4) + tensor_count(8) + metadata_kv_count(8) + metadata_kvs + tensors_info
|
||||||
|
initialBuf := make([]byte, 24) // Enough for magic, version, tensor count, and kv count
|
||||||
|
if _, err := io.ReadFull(r, initialBuf); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read GGUF header prefix: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify magic number
|
||||||
|
magic := binary.LittleEndian.Uint32(initialBuf[0:4])
|
||||||
|
if magic != ggufMagicNumber {
|
||||||
|
return nil, fmt.Errorf("invalid GGUF magic number: 0x%08X", magic)
|
||||||
|
}
|
||||||
|
|
||||||
|
// We need to read the metadata KV pairs to know the full header size
|
||||||
|
// The io.LimitedReader wrapping this reader ensures we don't read more than maxHeaderSize
|
||||||
|
headerData := make([]byte, 0, 1024*1024) // Start with 1MB capacity
|
||||||
|
headerData = append(headerData, initialBuf...)
|
||||||
|
|
||||||
|
// Read the rest of the header in larger chunks for efficiency
|
||||||
|
// The LimitedReader will return EOF once maxHeaderSize is reached
|
||||||
|
buf := make([]byte, 64*1024) // 64KB chunks
|
||||||
|
for {
|
||||||
|
n, err := r.Read(buf)
|
||||||
|
if n > 0 {
|
||||||
|
headerData = append(headerData, buf[:n]...)
|
||||||
|
}
|
||||||
|
if err == io.EOF {
|
||||||
|
// Reached end of file or limit, we have all available data
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read GGUF header: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return headerData, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper to convert gguf_parser metadata to simpler types
|
||||||
|
func convertGGUFMetadataKVs(kvs gguf_parser.GGUFMetadataKVs) map[string]interface{} {
|
||||||
|
result := make(map[string]interface{})
|
||||||
|
|
||||||
|
for _, kv := range kvs {
|
||||||
|
// Skip standard fields that are extracted separately
|
||||||
|
switch kv.Key {
|
||||||
|
case "general.architecture", "general.name", "general.license",
|
||||||
|
"general.version", "general.parameter_count", "general.quantization":
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
result[kv.Key] = kv.Value
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
141
syft/pkg/cataloger/ai/parse_gguf_model.go
Normal file
141
syft/pkg/cataloger/ai/parse_gguf_model.go
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/cespare/xxhash/v2"
|
||||||
|
gguf_parser "github.com/gpustack/gguf-parser-go"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/internal"
|
||||||
|
"github.com/anchore/syft/internal/log"
|
||||||
|
"github.com/anchore/syft/internal/unknown"
|
||||||
|
"github.com/anchore/syft/syft/artifact"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
|
)
|
||||||
|
|
||||||
|
// parseGGUFModel parses a GGUF model file and returns the discovered package.
|
||||||
|
// This implementation only reads the header portion of the file, not the entire model.
|
||||||
|
func parseGGUFModel(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
|
defer internal.CloseAndLogError(reader, reader.Path())
|
||||||
|
|
||||||
|
// Read and validate the GGUF file header using LimitedReader to prevent OOM
|
||||||
|
// We use LimitedReader to cap reads at maxHeaderSize (50MB)
|
||||||
|
limitedReader := &io.LimitedReader{R: reader, N: maxHeaderSize}
|
||||||
|
headerData, err := readHeader(limitedReader)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("failed to read GGUF header: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a temporary file for the library to parse
|
||||||
|
// The library requires a file path, so we create a temp file
|
||||||
|
tempFile, err := os.CreateTemp("", "syft-gguf-*.gguf")
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("failed to create temp file: %w", err)
|
||||||
|
}
|
||||||
|
tempPath := tempFile.Name()
|
||||||
|
defer os.Remove(tempPath)
|
||||||
|
|
||||||
|
// Write the validated header data to temp file
|
||||||
|
if _, err := tempFile.Write(headerData); err != nil {
|
||||||
|
tempFile.Close()
|
||||||
|
return nil, nil, fmt.Errorf("failed to write to temp file: %w", err)
|
||||||
|
}
|
||||||
|
tempFile.Close()
|
||||||
|
|
||||||
|
// Parse using gguf-parser-go with options to skip unnecessary data
|
||||||
|
ggufFile, err := gguf_parser.ParseGGUFFile(tempPath,
|
||||||
|
gguf_parser.SkipLargeMetadata(),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("failed to parse GGUF file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract metadata
|
||||||
|
metadata := ggufFile.Metadata()
|
||||||
|
|
||||||
|
// Extract version separately (will be set on Package.Version)
|
||||||
|
modelVersion := extractVersion(ggufFile.Header.MetadataKV)
|
||||||
|
|
||||||
|
// Convert to syft metadata structure
|
||||||
|
syftMetadata := &pkg.GGUFFileHeader{
|
||||||
|
ModelName: metadata.Name,
|
||||||
|
License: metadata.License,
|
||||||
|
Architecture: metadata.Architecture,
|
||||||
|
Quantization: metadata.FileTypeDescriptor,
|
||||||
|
Parameters: uint64(metadata.Parameters),
|
||||||
|
GGUFVersion: uint32(ggufFile.Header.Version),
|
||||||
|
TensorCount: ggufFile.Header.TensorCount,
|
||||||
|
Header: convertGGUFMetadataKVs(ggufFile.Header.MetadataKV),
|
||||||
|
MetadataHash: computeKVMetadataHash(ggufFile.Header.MetadataKV),
|
||||||
|
}
|
||||||
|
|
||||||
|
// If model name is not in metadata, use filename
|
||||||
|
if syftMetadata.ModelName == "" {
|
||||||
|
syftMetadata.ModelName = extractModelNameFromPath(reader.Path())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create package from metadata
|
||||||
|
p := newGGUFPackage(
|
||||||
|
syftMetadata,
|
||||||
|
modelVersion,
|
||||||
|
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
|
||||||
|
)
|
||||||
|
|
||||||
|
return []pkg.Package{p}, nil, unknown.IfEmptyf([]pkg.Package{p}, "unable to parse GGUF file")
|
||||||
|
}
|
||||||
|
|
||||||
|
// computeKVMetadataHash computes a stable hash of the KV metadata for use as a global identifier
|
||||||
|
func computeKVMetadataHash(metadata gguf_parser.GGUFMetadataKVs) string {
|
||||||
|
// Sort the KV pairs by key for stable hashing
|
||||||
|
sortedKVs := make([]gguf_parser.GGUFMetadataKV, len(metadata))
|
||||||
|
copy(sortedKVs, metadata)
|
||||||
|
sort.Slice(sortedKVs, func(i, j int) bool {
|
||||||
|
return sortedKVs[i].Key < sortedKVs[j].Key
|
||||||
|
})
|
||||||
|
|
||||||
|
// Marshal sorted KVs to JSON for stable hashing
|
||||||
|
jsonBytes, err := json.Marshal(sortedKVs)
|
||||||
|
if err != nil {
|
||||||
|
log.Debugf("failed to marshal metadata for hashing: %v", err)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute xxhash
|
||||||
|
hash := xxhash.Sum64(jsonBytes)
|
||||||
|
return fmt.Sprintf("%016x", hash) // 16 hex chars (64 bits)
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractVersion attempts to extract version from metadata KV pairs
|
||||||
|
func extractVersion(kvs gguf_parser.GGUFMetadataKVs) string {
|
||||||
|
for _, kv := range kvs {
|
||||||
|
if kv.Key == "general.version" {
|
||||||
|
if v, ok := kv.Value.(string); ok && v != "" {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractModelNameFromPath extracts the model name from the file path
|
||||||
|
func extractModelNameFromPath(path string) string {
|
||||||
|
// Get the base filename
|
||||||
|
base := filepath.Base(path)
|
||||||
|
|
||||||
|
// Remove .gguf extension
|
||||||
|
name := strings.TrimSuffix(base, ".gguf")
|
||||||
|
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
|
||||||
|
// integrity check
|
||||||
|
var _ generic.Parser = parseGGUFModel
|
||||||
41
syft/pkg/cataloger/ai/test_builder_test.go
Normal file
41
syft/pkg/cataloger/ai/test_builder_test.go
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
gguf_parser "github.com/gpustack/gguf-parser-go"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Create a test GGUF file
|
||||||
|
data := newTestGGUFBuilder().
|
||||||
|
withVersion(3).
|
||||||
|
withStringKV("general.architecture", "llama").
|
||||||
|
withStringKV("general.name", "test-model").
|
||||||
|
build()
|
||||||
|
|
||||||
|
// Write to temp file
|
||||||
|
tempFile, err := os.CreateTemp("", "test-*.gguf")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
defer os.Remove(tempFile.Name())
|
||||||
|
|
||||||
|
if _, err := tempFile.Write(data); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
tempFile.Close()
|
||||||
|
|
||||||
|
fmt.Printf("Wrote %d bytes to %s\n", len(data), tempFile.Name())
|
||||||
|
|
||||||
|
// Try to parse it
|
||||||
|
fmt.Println("Attempting to parse...")
|
||||||
|
gf, err := gguf_parser.ParseGGUFFile(tempFile.Name(), gguf_parser.SkipLargeMetadata())
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Parse error: %v\n", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Success! Model: %s\n", gf.Metadata().Name)
|
||||||
|
}
|
||||||
127
syft/pkg/cataloger/ai/test_helpers_test.go
Normal file
127
syft/pkg/cataloger/ai/test_helpers_test.go
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
package ai
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/binary"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GGUF type constants for test builder
|
||||||
|
const (
|
||||||
|
ggufMagic = 0x46554747 // "GGUF" in little-endian
|
||||||
|
ggufTypeUint8 = 0
|
||||||
|
ggufTypeInt8 = 1
|
||||||
|
ggufTypeUint16 = 2
|
||||||
|
ggufTypeInt16 = 3
|
||||||
|
ggufTypeUint32 = 4
|
||||||
|
ggufTypeInt32 = 5
|
||||||
|
ggufTypeFloat32 = 6
|
||||||
|
ggufTypeBool = 7
|
||||||
|
ggufTypeString = 8
|
||||||
|
ggufTypeArray = 9
|
||||||
|
ggufTypeUint64 = 10
|
||||||
|
ggufTypeInt64 = 11
|
||||||
|
ggufTypeFloat64 = 12
|
||||||
|
)
|
||||||
|
|
||||||
|
// testGGUFBuilder helps build GGUF files for testing
|
||||||
|
type testGGUFBuilder struct {
|
||||||
|
buf *bytes.Buffer
|
||||||
|
version uint32
|
||||||
|
tensorCount uint64
|
||||||
|
kvPairs []testKVPair
|
||||||
|
}
|
||||||
|
|
||||||
|
type testKVPair struct {
|
||||||
|
key string
|
||||||
|
valueType uint32
|
||||||
|
value interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTestGGUFBuilder() *testGGUFBuilder {
|
||||||
|
return &testGGUFBuilder{
|
||||||
|
buf: new(bytes.Buffer),
|
||||||
|
version: 3,
|
||||||
|
tensorCount: 0,
|
||||||
|
kvPairs: []testKVPair{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *testGGUFBuilder) withVersion(v uint32) *testGGUFBuilder {
|
||||||
|
b.version = v
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *testGGUFBuilder) withTensorCount(count uint64) *testGGUFBuilder {
|
||||||
|
b.tensorCount = count
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *testGGUFBuilder) withStringKV(key, value string) *testGGUFBuilder {
|
||||||
|
b.kvPairs = append(b.kvPairs, testKVPair{key: key, valueType: ggufTypeString, value: value})
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *testGGUFBuilder) withUint64KV(key string, value uint64) *testGGUFBuilder {
|
||||||
|
b.kvPairs = append(b.kvPairs, testKVPair{key: key, valueType: ggufTypeUint64, value: value})
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *testGGUFBuilder) withUint32KV(key string, value uint32) *testGGUFBuilder {
|
||||||
|
b.kvPairs = append(b.kvPairs, testKVPair{key: key, valueType: ggufTypeUint32, value: value})
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *testGGUFBuilder) writeString(s string) {
|
||||||
|
binary.Write(b.buf, binary.LittleEndian, uint64(len(s)))
|
||||||
|
b.buf.WriteString(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *testGGUFBuilder) build() []byte {
|
||||||
|
// Write magic number "GGUF"
|
||||||
|
binary.Write(b.buf, binary.LittleEndian, uint32(ggufMagic))
|
||||||
|
|
||||||
|
// Write version
|
||||||
|
binary.Write(b.buf, binary.LittleEndian, b.version)
|
||||||
|
|
||||||
|
// Write tensor count
|
||||||
|
binary.Write(b.buf, binary.LittleEndian, b.tensorCount)
|
||||||
|
|
||||||
|
// Write KV count
|
||||||
|
binary.Write(b.buf, binary.LittleEndian, uint64(len(b.kvPairs)))
|
||||||
|
|
||||||
|
// Write KV pairs
|
||||||
|
for _, kv := range b.kvPairs {
|
||||||
|
// Write key
|
||||||
|
b.writeString(kv.key)
|
||||||
|
// Write value type
|
||||||
|
binary.Write(b.buf, binary.LittleEndian, kv.valueType)
|
||||||
|
// Write value based on type
|
||||||
|
switch kv.valueType {
|
||||||
|
case ggufTypeString:
|
||||||
|
b.writeString(kv.value.(string))
|
||||||
|
case ggufTypeUint32:
|
||||||
|
binary.Write(b.buf, binary.LittleEndian, kv.value.(uint32))
|
||||||
|
case ggufTypeUint64:
|
||||||
|
binary.Write(b.buf, binary.LittleEndian, kv.value.(uint64))
|
||||||
|
case ggufTypeUint8:
|
||||||
|
binary.Write(b.buf, binary.LittleEndian, kv.value.(uint8))
|
||||||
|
case ggufTypeInt32:
|
||||||
|
binary.Write(b.buf, binary.LittleEndian, kv.value.(int32))
|
||||||
|
case ggufTypeBool:
|
||||||
|
var v uint8
|
||||||
|
if kv.value.(bool) {
|
||||||
|
v = 1
|
||||||
|
}
|
||||||
|
binary.Write(b.buf, binary.LittleEndian, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return b.buf.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildInvalidMagic creates a file with invalid magic number
|
||||||
|
func (b *testGGUFBuilder) buildInvalidMagic() []byte {
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
binary.Write(buf, binary.LittleEndian, uint32(0x12345678))
|
||||||
|
return buf.Bytes()
|
||||||
|
}
|
||||||
@ -1403,6 +1403,22 @@ func Test_Cataloger_PositiveCases(t *testing.T) {
|
|||||||
Metadata: metadata("ffmpeg-library"),
|
Metadata: metadata("ffmpeg-library"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
logicalFixture: "elixir/1.19.1/linux-amd64",
|
||||||
|
expected: pkg.Package{
|
||||||
|
Name: "elixir",
|
||||||
|
Version: "1.19.1",
|
||||||
|
Type: "binary",
|
||||||
|
PURL: "pkg:generic/elixir@1.19.1",
|
||||||
|
Locations: locations("elixir", "lib/elixir/ebin/elixir.app"),
|
||||||
|
Metadata: pkg.BinarySignature{
|
||||||
|
Matches: []pkg.ClassifierMatch{
|
||||||
|
match("elixir-binary", "elixir"),
|
||||||
|
match("elixir-library", "lib/elixir/ebin/elixir.app"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
|
|||||||
@ -663,6 +663,26 @@ func DefaultClassifiers() []binutils.Classifier {
|
|||||||
PURL: mustPURL("pkg:generic/ffmpeg@version"),
|
PURL: mustPURL("pkg:generic/ffmpeg@version"),
|
||||||
CPEs: singleCPE("cpe:2.3:a:ffmpeg:ffmpeg:*:*:*:*:*:*:*:*", cpe.NVDDictionaryLookupSource),
|
CPEs: singleCPE("cpe:2.3:a:ffmpeg:ffmpeg:*:*:*:*:*:*:*:*", cpe.NVDDictionaryLookupSource),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Class: "elixir-binary",
|
||||||
|
FileGlob: "**/elixir",
|
||||||
|
EvidenceMatcher: m.FileContentsVersionMatcher(
|
||||||
|
`(?m)ELIXIR_VERSION=(?P<version>[0-9]+\.[0-9]+\.[0-9]+)`),
|
||||||
|
Package: "elixir",
|
||||||
|
PURL: mustPURL("pkg:generic/elixir@version"),
|
||||||
|
CPEs: []cpe.CPE{
|
||||||
|
cpe.Must("cpe:2.3:a:elixir-lang:elixir:*:*:*:*:*:*:*:*", cpe.NVDDictionaryLookupSource),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Class: "elixir-library",
|
||||||
|
FileGlob: "**/elixir/ebin/elixir.app",
|
||||||
|
EvidenceMatcher: m.FileContentsVersionMatcher(
|
||||||
|
`(?m)\{vsn,"(?P<version>[0-9]+\.[0-9]+\.[0-9]+(-[a-z0-9]+)?)"\}`),
|
||||||
|
Package: "elixir",
|
||||||
|
PURL: mustPURL("pkg:generic/elixir@version"),
|
||||||
|
CPEs: singleCPE("cpe:2.3:a:elixir-lang:elixir:*:*:*:*:*:*:*:*", cpe.NVDDictionaryLookupSource),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
return append(classifiers, defaultJavaClassifiers()...)
|
return append(classifiers, defaultJavaClassifiers()...)
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import (
|
|||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
packageurl "github.com/anchore/packageurl-go"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
)
|
)
|
||||||
@ -32,6 +33,19 @@ func newPEPackage(versionResources map[string]string, f file.Location) pkg.Packa
|
|||||||
Metadata: newPEBinaryVersionResourcesFromMap(versionResources),
|
Metadata: newPEBinaryVersionResourcesFromMap(versionResources),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If this appears to be Ghostscript, emit a canonical generic purl
|
||||||
|
// Example expected: pkg:generic/ghostscript@<version>
|
||||||
|
prod := strings.ToLower(spaceNormalize(versionResources["ProductName"]))
|
||||||
|
if prod == "" {
|
||||||
|
// fall back to FileDescription if ProductName is missing
|
||||||
|
prod = strings.ToLower(spaceNormalize(versionResources["FileDescription"]))
|
||||||
|
}
|
||||||
|
if p.Version != "" && strings.Contains(prod, "ghostscript") {
|
||||||
|
// build a generic PURL for ghostscript
|
||||||
|
purl := packageurl.NewPackageURL(packageurl.TypeGeneric, "", "ghostscript", p.Version, nil, "").ToString()
|
||||||
|
p.PURL = purl
|
||||||
|
}
|
||||||
|
|
||||||
p.SetID()
|
p.SetID()
|
||||||
|
|
||||||
return p
|
return p
|
||||||
|
|||||||
24
syft/pkg/cataloger/binary/pe_package_test.go
Normal file
24
syft/pkg/cataloger/binary/pe_package_test.go
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
package binary
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGhostscriptPEGeneratesGenericPURL(t *testing.T) {
|
||||||
|
vr := map[string]string{
|
||||||
|
"CompanyName": "Artifex Software, Inc.",
|
||||||
|
"ProductName": "GPL Ghostscript",
|
||||||
|
"FileDescription": "Ghostscript Interpreter",
|
||||||
|
"ProductVersion": "9.54.0",
|
||||||
|
}
|
||||||
|
|
||||||
|
loc := file.NewLocation("/usr/bin/gswin64c.exe")
|
||||||
|
p := newPEPackage(vr, loc)
|
||||||
|
|
||||||
|
expected := "pkg:generic/ghostscript@9.54.0"
|
||||||
|
if p.PURL != expected {
|
||||||
|
t.Fatalf("expected purl %q, got %q", expected, p.PURL)
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
# SPDX-FileCopyrightText: 2021 The Elixir Team
|
||||||
|
# SPDX-FileCopyrightText: 2012 Plataformatec
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
ELIXIR_VERSION=1.19.1
|
||||||
|
|
||||||
|
if [ $# -eq 0 ] || { [ $# -eq 1 ] && { [ "$1" = "--help" ] || [ "$1" = "-h" ]; }; }; then
|
||||||
|
cat <<USAGE >&2
|
||||||
|
Usage: $(basename "$0") [options] [.exs file] [data]
|
||||||
|
|
||||||
|
## General options
|
||||||
|
|
||||||
|
-e "COMMAND" Evaluates the given command (*)
|
||||||
|
-h, --help Prints this message (standalone)
|
||||||
|
-r "FILE" Requires the given files/patterns (*)
|
||||||
|
-S SCRIPT Finds and executes the given script in \$PATH
|
||||||
@ -0,0 +1,19 @@
|
|||||||
|
{application,elixir,
|
||||||
|
[{description,"elixir"},
|
||||||
|
{vsn,"1.19.1"},
|
||||||
|
{modules,
|
||||||
|
['Elixir.Access','Elixir.Agent.Server','Elixir.Agent',
|
||||||
|
'Elixir.Application','Elixir.ArgumentError',
|
||||||
|
elixir_overridable,elixir_parser,elixir_quote,elixir_rewrite,
|
||||||
|
elixir_sup,elixir_tokenizer,elixir_utils,iex]},
|
||||||
|
{registered,[elixir_sup,elixir_config,elixir_code_server]},
|
||||||
|
{applications,[kernel,stdlib,compiler]},
|
||||||
|
{mod,{elixir,[]}},
|
||||||
|
{env,
|
||||||
|
[{ansi_syntax_colors,
|
||||||
|
[{atom,cyan},
|
||||||
|
{binary,default_color},
|
||||||
|
{operator,default_color}]},
|
||||||
|
{check_endianness,true},
|
||||||
|
{dbg_callback,{'Elixir.Macro',dbg,[]}},
|
||||||
|
{time_zone_database,'Elixir.Calendar.UTCOnlyTimeZoneDatabase'}]}]}.
|
||||||
58
syft/pkg/cataloger/common/cpe/target_software_to_pkg_type.go
Normal file
58
syft/pkg/cataloger/common/cpe/target_software_to_pkg_type.go
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
package cpe
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TargetSoftwareToPackageType is derived from looking at target_software attributes in the NVD dataset
|
||||||
|
// TODO: ideally this would be driven from the store, where we can resolve ecosystem aliases directly
|
||||||
|
func TargetSoftwareToPackageType(tsw string) pkg.Type {
|
||||||
|
tsw = strings.NewReplacer("-", "_", " ", "_").Replace(strings.ToLower(tsw))
|
||||||
|
switch tsw {
|
||||||
|
case "alpine", "apk":
|
||||||
|
return pkg.ApkPkg
|
||||||
|
case "debian", "dpkg":
|
||||||
|
return pkg.DebPkg
|
||||||
|
case "java", "maven", "ant", "gradle", "jenkins", "jenkins_ci", "kafka", "logstash", "mule", "nifi", "solr", "spark", "storm", "struts", "tomcat", "zookeeper", "log4j":
|
||||||
|
return pkg.JavaPkg
|
||||||
|
case "javascript", "node", "nodejs", "node.js", "npm", "yarn", "apache", "jquery", "next.js", "prismjs":
|
||||||
|
return pkg.NpmPkg
|
||||||
|
case "c", "c++", "c/c++", "conan", "gnu_c++", "qt":
|
||||||
|
return pkg.ConanPkg
|
||||||
|
case "dart":
|
||||||
|
return pkg.DartPubPkg
|
||||||
|
case "redhat", "rpm", "redhat_enterprise_linux", "rhel", "suse", "suse_linux", "opensuse", "opensuse_linux", "fedora", "centos", "oracle_linux", "ol":
|
||||||
|
return pkg.RpmPkg
|
||||||
|
case "elixir", "hex":
|
||||||
|
return pkg.HexPkg
|
||||||
|
case "erlang":
|
||||||
|
return pkg.ErlangOTPPkg
|
||||||
|
case ".net", ".net_framework", "asp", "asp.net", "dotnet", "dotnet_framework", "c#", "csharp", "nuget":
|
||||||
|
return pkg.DotnetPkg
|
||||||
|
case "ruby", "gem", "nokogiri", "ruby_on_rails":
|
||||||
|
return pkg.GemPkg
|
||||||
|
case "rust", "cargo", "crates":
|
||||||
|
return pkg.RustPkg
|
||||||
|
case "python", "pip", "pypi", "flask":
|
||||||
|
return pkg.PythonPkg
|
||||||
|
case "kb", "knowledgebase", "msrc", "mskb", "microsoft":
|
||||||
|
return pkg.KbPkg
|
||||||
|
case "portage", "gentoo":
|
||||||
|
return pkg.PortagePkg
|
||||||
|
case "go", "golang", "gomodule":
|
||||||
|
return pkg.GoModulePkg
|
||||||
|
case "linux_kernel", "linux", "z/linux":
|
||||||
|
return pkg.LinuxKernelPkg
|
||||||
|
case "php":
|
||||||
|
return pkg.PhpComposerPkg
|
||||||
|
case "swift":
|
||||||
|
return pkg.SwiftPkg
|
||||||
|
case "wordpress", "wordpress_plugin", "wordpress_":
|
||||||
|
return pkg.WordpressPluginPkg
|
||||||
|
case "lua", "luarocks":
|
||||||
|
return pkg.LuaRocksPkg
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
@ -1,9 +1,12 @@
|
|||||||
package dart
|
package dart
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
"github.com/anchore/packageurl-go"
|
"github.com/anchore/packageurl-go"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/pkg/cataloger/internal/licenses"
|
||||||
)
|
)
|
||||||
|
|
||||||
func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...file.Location) pkg.Package {
|
func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...file.Location) pkg.Package {
|
||||||
@ -29,7 +32,7 @@ func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...fil
|
|||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
func newPubspecPackage(raw pubspecPackage, locations ...file.Location) pkg.Package {
|
func newPubspecPackage(ctx context.Context, resolver file.Resolver, raw pubspecPackage, locations ...file.Location) pkg.Package {
|
||||||
var env *pkg.DartPubspecEnvironment
|
var env *pkg.DartPubspecEnvironment
|
||||||
if raw.Environment.SDK != "" || raw.Environment.Flutter != "" {
|
if raw.Environment.SDK != "" || raw.Environment.Flutter != "" {
|
||||||
// this is required only after pubspec v2, but might have been optional before this
|
// this is required only after pubspec v2, but might have been optional before this
|
||||||
@ -58,6 +61,8 @@ func newPubspecPackage(raw pubspecPackage, locations ...file.Location) pkg.Packa
|
|||||||
|
|
||||||
p.SetID()
|
p.SetID()
|
||||||
|
|
||||||
|
p = licenses.RelativeToPackage(ctx, resolver, p)
|
||||||
|
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -29,7 +29,7 @@ type dartPubspecEnvironment struct {
|
|||||||
Flutter string `mapstructure:"flutter" yaml:"flutter"`
|
Flutter string `mapstructure:"flutter" yaml:"flutter"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func parsePubspec(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
|
func parsePubspec(ctx context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
var pkgs []pkg.Package
|
var pkgs []pkg.Package
|
||||||
|
|
||||||
dec := yaml.NewDecoder(reader)
|
dec := yaml.NewDecoder(reader)
|
||||||
@ -41,6 +41,8 @@ func parsePubspec(_ context.Context, _ file.Resolver, _ *generic.Environment, re
|
|||||||
|
|
||||||
pkgs = append(pkgs,
|
pkgs = append(pkgs,
|
||||||
newPubspecPackage(
|
newPubspecPackage(
|
||||||
|
ctx,
|
||||||
|
resolver,
|
||||||
p,
|
p,
|
||||||
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
|
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
|
||||||
),
|
),
|
||||||
|
|||||||
@ -24,6 +24,10 @@ import (
|
|||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
deinstallStatus string = "deinstall"
|
||||||
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
errEndOfPackages = fmt.Errorf("no more packages to read")
|
errEndOfPackages = fmt.Errorf("no more packages to read")
|
||||||
sourceRegexp = regexp.MustCompile(`(?P<name>\S+)( \((?P<version>.*)\))?`)
|
sourceRegexp = regexp.MustCompile(`(?P<name>\S+)( \((?P<version>.*)\))?`)
|
||||||
@ -112,6 +116,7 @@ type dpkgExtractedMetadata struct {
|
|||||||
Provides string `mapstructure:"Provides"`
|
Provides string `mapstructure:"Provides"`
|
||||||
Depends string `mapstructure:"Depends"`
|
Depends string `mapstructure:"Depends"`
|
||||||
PreDepends string `mapstructure:"PreDepends"` // note: original doc is Pre-Depends
|
PreDepends string `mapstructure:"PreDepends"` // note: original doc is Pre-Depends
|
||||||
|
Status string `mapstructure:"Status"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseDpkgStatusEntry returns an individual Dpkg entry, or returns errEndOfPackages if there are no more packages to parse from the reader.
|
// parseDpkgStatusEntry returns an individual Dpkg entry, or returns errEndOfPackages if there are no more packages to parse from the reader.
|
||||||
@ -134,6 +139,11 @@ func parseDpkgStatusEntry(reader *bufio.Reader) (*pkg.DpkgDBEntry, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Skip entries which have been removed but not purged, e.g. "rc" status in dpkg -l
|
||||||
|
if strings.Contains(raw.Status, deinstallStatus) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
sourceName, sourceVersion := extractSourceVersion(raw.Source)
|
sourceName, sourceVersion := extractSourceVersion(raw.Source)
|
||||||
if sourceVersion != "" {
|
if sourceVersion != "" {
|
||||||
raw.SourceVersion = sourceVersion
|
raw.SourceVersion = sourceVersion
|
||||||
|
|||||||
@ -237,6 +237,37 @@ func Test_parseDpkgStatus(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "deinstall status packages are ignored",
|
||||||
|
fixturePath: "test-fixtures/var/lib/dpkg/status.d/deinstall",
|
||||||
|
expected: []pkg.DpkgDBEntry{
|
||||||
|
{
|
||||||
|
Package: "linux-image-6.14.0-1012-aws",
|
||||||
|
Source: "linux-signed-aws-6.14",
|
||||||
|
Version: "6.14.0-1012.12~24.04.1",
|
||||||
|
Architecture: "amd64",
|
||||||
|
InstalledSize: 15221,
|
||||||
|
Maintainer: "Canonical Kernel Team <kernel-team@lists.ubuntu.com>",
|
||||||
|
Description: `Signed kernel image aws
|
||||||
|
A kernel image for aws. This version of it is signed with
|
||||||
|
Canonical's signing key.`,
|
||||||
|
Provides: []string{"fuse-module",
|
||||||
|
"linux-image",
|
||||||
|
"spl-dkms",
|
||||||
|
"spl-modules",
|
||||||
|
"v4l2loopback-dkms",
|
||||||
|
"v4l2loopback-modules",
|
||||||
|
"zfs-dkms",
|
||||||
|
"zfs-modules"},
|
||||||
|
Depends: []string{
|
||||||
|
"kmod",
|
||||||
|
"linux-base (>= 4.5ubuntu1~16.04.1)",
|
||||||
|
"linux-modules-6.14.0-1012-aws",
|
||||||
|
},
|
||||||
|
Files: []pkg.DpkgFileRecord{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
|
|||||||
@ -0,0 +1,38 @@
|
|||||||
|
Package: linux-image-6.14.0-1012-aws
|
||||||
|
Status: install ok installed
|
||||||
|
Priority: optional
|
||||||
|
Section: kernel
|
||||||
|
Installed-Size: 15221
|
||||||
|
Maintainer: Canonical Kernel Team <kernel-team@lists.ubuntu.com>
|
||||||
|
Architecture: amd64
|
||||||
|
Source: linux-signed-aws-6.14
|
||||||
|
Version: 6.14.0-1012.12~24.04.1
|
||||||
|
Provides: fuse-module, linux-image, spl-dkms, spl-modules, v4l2loopback-dkms, v4l2loopback-modules, zfs-dkms, zfs-modules
|
||||||
|
Depends: kmod, linux-base (>= 4.5ubuntu1~16.04.1), linux-modules-6.14.0-1012-aws
|
||||||
|
Recommends: grub-pc | grub-efi-amd64 | grub-efi-ia32 | grub | lilo, initramfs-tools | linux-initramfs-tool
|
||||||
|
Suggests: bpftool, linux-perf, linux-aws-6.14-doc-6.14.0 | linux-aws-6.14-source-6.14.0, linux-aws-6.14-tools, linux-headers-6.14.0-1012-aws
|
||||||
|
Conflicts: linux-image-unsigned-6.14.0-1012-aws
|
||||||
|
Description: Signed kernel image aws
|
||||||
|
A kernel image for aws. This version of it is signed with
|
||||||
|
Canonical's signing key.
|
||||||
|
Built-Using: linux-aws-6.14 (= 6.14.0-1012.12~24.04.1)
|
||||||
|
|
||||||
|
Package: linux-image-6.8.0-1029-aws
|
||||||
|
Status: deinstall ok config-files
|
||||||
|
Priority: optional
|
||||||
|
Section: kernel
|
||||||
|
Installed-Size: 14591
|
||||||
|
Maintainer: Canonical Kernel Team <kernel-team@lists.ubuntu.com>
|
||||||
|
Architecture: amd64
|
||||||
|
Source: linux-signed-aws
|
||||||
|
Version: 6.8.0-1029.31
|
||||||
|
Config-Version: 6.8.0-1029.31
|
||||||
|
Provides: fuse-module, linux-image, spl-dkms, spl-modules, v4l2loopback-dkms, v4l2loopback-modules, zfs-dkms, zfs-modules
|
||||||
|
Depends: kmod, linux-base (>= 4.5ubuntu1~16.04.1), linux-modules-6.8.0-1029-aws
|
||||||
|
Recommends: grub-pc | grub-efi-amd64 | grub-efi-ia32 | grub | lilo, initramfs-tools | linux-initramfs-tool
|
||||||
|
Suggests: fdutils, linux-aws-doc-6.8.0 | linux-aws-source-6.8.0, linux-aws-tools, linux-headers-6.8.0-1029-aws
|
||||||
|
Conflicts: linux-image-unsigned-6.8.0-1029-aws
|
||||||
|
Description: Signed kernel image aws
|
||||||
|
A kernel image for aws. This version of it is signed with
|
||||||
|
Canonical's signing key.
|
||||||
|
Built-Using: linux-aws (= 6.8.0-1029.31)
|
||||||
@ -12,6 +12,7 @@ import (
|
|||||||
// binary cataloger will search for .dll and .exe files and create packages based off of the version resources embedded
|
// binary cataloger will search for .dll and .exe files and create packages based off of the version resources embedded
|
||||||
// as a resource directory within the executable. If there is no evidence of a .NET runtime (a CLR header) then no
|
// as a resource directory within the executable. If there is no evidence of a .NET runtime (a CLR header) then no
|
||||||
// package will be created.
|
// package will be created.
|
||||||
|
//
|
||||||
// Deprecated: use depsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
|
// Deprecated: use depsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
|
||||||
type binaryCataloger struct {
|
type binaryCataloger struct {
|
||||||
}
|
}
|
||||||
|
|||||||
@ -13,12 +13,14 @@ func NewDotnetDepsBinaryCataloger(config CatalogerConfig) pkg.Cataloger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewDotnetDepsCataloger returns a cataloger based on deps.json file contents.
|
// NewDotnetDepsCataloger returns a cataloger based on deps.json file contents.
|
||||||
|
//
|
||||||
// Deprecated: use NewDotnetDepsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
|
// Deprecated: use NewDotnetDepsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
|
||||||
func NewDotnetDepsCataloger() pkg.Cataloger {
|
func NewDotnetDepsCataloger() pkg.Cataloger {
|
||||||
return &depsCataloger{}
|
return &depsCataloger{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewDotnetPortableExecutableCataloger returns a cataloger based on PE file contents.
|
// NewDotnetPortableExecutableCataloger returns a cataloger based on PE file contents.
|
||||||
|
//
|
||||||
// Deprecated: use NewDotnetDepsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
|
// Deprecated: use NewDotnetDepsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
|
||||||
func NewDotnetPortableExecutableCataloger() pkg.Cataloger {
|
func NewDotnetPortableExecutableCataloger() pkg.Cataloger {
|
||||||
return &binaryCataloger{}
|
return &binaryCataloger{}
|
||||||
|
|||||||
@ -9,6 +9,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// depsCataloger will search for deps.json file contents.
|
// depsCataloger will search for deps.json file contents.
|
||||||
|
//
|
||||||
// Deprecated: use depsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
|
// Deprecated: use depsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
|
||||||
type depsCataloger struct {
|
type depsCataloger struct {
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,18 +4,20 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/internal"
|
||||||
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/pkg/cataloger/internal/licenses"
|
||||||
)
|
)
|
||||||
|
|
||||||
// resolveModuleLicenses finds and parses license files for Go modules
|
// resolveModuleLicenses finds and parses license files for Go modules
|
||||||
func resolveModuleLicenses(ctx context.Context, pkgInfos []pkgInfo, fs afero.Fs) pkg.LicenseSet {
|
func resolveModuleLicenses(ctx context.Context, scanRoot string, pkgInfos []pkgInfo, fs afero.Fs) pkg.LicenseSet {
|
||||||
licenses := pkg.NewLicenseSet()
|
out := pkg.NewLicenseSet()
|
||||||
|
|
||||||
for _, info := range pkgInfos {
|
for _, info := range pkgInfos {
|
||||||
modDir, pkgDir, err := getAbsolutePkgPaths(info)
|
modDir, pkgDir, err := getAbsolutePkgPaths(info)
|
||||||
@ -23,22 +25,32 @@ func resolveModuleLicenses(ctx context.Context, pkgInfos []pkgInfo, fs afero.Fs)
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
licenseFiles, err := findAllLicenseCandidatesUpwards(pkgDir, licenseRegexp, modDir, fs)
|
licenseFiles, err := findAllLicenseCandidatesUpwards(pkgDir, modDir, fs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range licenseFiles {
|
for _, f := range licenseFiles {
|
||||||
contents, err := fs.Open(f)
|
out.Add(readLicenses(ctx, scanRoot, fs, f)...)
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
licenses.Add(pkg.NewLicensesFromReadCloserWithContext(ctx, file.NewLocationReadCloser(file.Location{}, contents))...)
|
|
||||||
_ = contents.Close()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return licenses
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func readLicenses(ctx context.Context, scanRoot string, fs afero.Fs, f string) []pkg.License {
|
||||||
|
contents, err := fs.Open(f)
|
||||||
|
if err != nil {
|
||||||
|
log.WithFields("file", f, "error", err).Debug("unable to read license file")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
defer internal.CloseAndLogError(contents, f)
|
||||||
|
location := file.Location{}
|
||||||
|
if scanRoot != "" && strings.HasPrefix(f, scanRoot) {
|
||||||
|
// include location when licenses are found within the scan target
|
||||||
|
location = file.NewLocation(strings.TrimPrefix(f, scanRoot))
|
||||||
|
}
|
||||||
|
return pkg.NewLicensesFromReadCloserWithContext(ctx, file.NewLocationReadCloser(location, contents))
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -60,7 +72,7 @@ When we should consider redesign tip to stem:
|
|||||||
- We need to consider the case here where nested modules are visited by accident and licenses
|
- We need to consider the case here where nested modules are visited by accident and licenses
|
||||||
are erroneously associated to a 'parent module'; bubble up currently prevents this
|
are erroneously associated to a 'parent module'; bubble up currently prevents this
|
||||||
*/
|
*/
|
||||||
func findAllLicenseCandidatesUpwards(dir string, r *regexp.Regexp, stopAt string, fs afero.Fs) ([]string, error) {
|
func findAllLicenseCandidatesUpwards(dir string, stopAt string, fs afero.Fs) ([]string, error) {
|
||||||
// Validate that both paths are absolute
|
// Validate that both paths are absolute
|
||||||
if !filepath.IsAbs(dir) {
|
if !filepath.IsAbs(dir) {
|
||||||
return nil, fmt.Errorf("dir must be an absolute path, got: %s", dir)
|
return nil, fmt.Errorf("dir must be an absolute path, got: %s", dir)
|
||||||
@ -69,25 +81,16 @@ func findAllLicenseCandidatesUpwards(dir string, r *regexp.Regexp, stopAt string
|
|||||||
return nil, fmt.Errorf("stopAt must be an absolute path, got: %s", stopAt)
|
return nil, fmt.Errorf("stopAt must be an absolute path, got: %s", stopAt)
|
||||||
}
|
}
|
||||||
|
|
||||||
licenses, err := findLicenseCandidates(dir, r, stopAt, fs)
|
return findLicenseCandidates(dir, stopAt, fs)
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure we return an empty slice rather than nil for consistency
|
|
||||||
if licenses == nil {
|
|
||||||
return []string{}, nil
|
|
||||||
}
|
|
||||||
return licenses, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func findLicenseCandidates(dir string, r *regexp.Regexp, stopAt string, fs afero.Fs) ([]string, error) {
|
func findLicenseCandidates(dir string, stopAt string, fs afero.Fs) ([]string, error) {
|
||||||
// stop if we've gone outside the stopAt directory
|
// stop if we've gone outside the stopAt directory
|
||||||
if !strings.HasPrefix(dir, stopAt) {
|
if !strings.HasPrefix(dir, stopAt) {
|
||||||
return []string{}, nil
|
return []string{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
licenses, err := findLicensesInDir(dir, r, fs)
|
out, err := findLicensesInDir(dir, fs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -95,17 +98,17 @@ func findLicenseCandidates(dir string, r *regexp.Regexp, stopAt string, fs afero
|
|||||||
parent := filepath.Dir(dir)
|
parent := filepath.Dir(dir)
|
||||||
// can't go any higher up the directory tree: "/" case
|
// can't go any higher up the directory tree: "/" case
|
||||||
if parent == dir {
|
if parent == dir {
|
||||||
return licenses, nil
|
return out, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// search parent directory and combine results
|
// search parent directory and combine results
|
||||||
parentLicenses, err := findLicenseCandidates(parent, r, stopAt, fs)
|
parentLicenses, err := findLicenseCandidates(parent, stopAt, fs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Combine current directory licenses with parent directory licenses
|
// Combine current directory licenses with parent directory licenses
|
||||||
return append(licenses, parentLicenses...), nil
|
return append(out, parentLicenses...), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getAbsolutePkgPaths(info pkgInfo) (modDir string, pkgDir string, err error) {
|
func getAbsolutePkgPaths(info pkgInfo) (modDir string, pkgDir string, err error) {
|
||||||
@ -126,8 +129,8 @@ func getAbsolutePkgPaths(info pkgInfo) (modDir string, pkgDir string, err error)
|
|||||||
return modDir, pkgDir, nil
|
return modDir, pkgDir, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func findLicensesInDir(dir string, r *regexp.Regexp, fs afero.Fs) ([]string, error) {
|
func findLicensesInDir(dir string, fs afero.Fs) ([]string, error) {
|
||||||
var licenses []string
|
var out []string
|
||||||
|
|
||||||
dirContents, err := afero.ReadDir(fs, dir)
|
dirContents, err := afero.ReadDir(fs, dir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -139,11 +142,11 @@ func findLicensesInDir(dir string, r *regexp.Regexp, fs afero.Fs) ([]string, err
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if r.MatchString(f.Name()) {
|
if licenses.IsLicenseFile(f.Name()) {
|
||||||
path := filepath.Join(dir, f.Name())
|
path := filepath.Join(dir, f.Name())
|
||||||
licenses = append(licenses, path)
|
out = append(out, path)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return licenses, nil
|
return out, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -70,8 +70,8 @@ func TestFindAllLicenseCandidatesUpwards(t *testing.T) {
|
|||||||
fs.MkdirAll("/empty/dir/tree", 0755)
|
fs.MkdirAll("/empty/dir/tree", 0755)
|
||||||
// No license files
|
// No license files
|
||||||
},
|
},
|
||||||
expectedFiles: []string{},
|
expectedFiles: nil,
|
||||||
description: "Should return empty slice when no license files found",
|
description: "Should return nil when no license files found",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "handles directory at filesystem root",
|
name: "handles directory at filesystem root",
|
||||||
@ -205,7 +205,7 @@ func TestFindAllLicenseCandidatesUpwards(t *testing.T) {
|
|||||||
tt.setupFS(fs)
|
tt.setupFS(fs)
|
||||||
|
|
||||||
// Run the function
|
// Run the function
|
||||||
result, err := findAllLicenseCandidatesUpwards(tt.startDir, licenseRegexp, tt.stopAt, fs)
|
result, err := findAllLicenseCandidatesUpwards(tt.startDir, tt.stopAt, fs)
|
||||||
|
|
||||||
// Check error expectation
|
// Check error expectation
|
||||||
if tt.expectedError {
|
if tt.expectedError {
|
||||||
|
|||||||
@ -19,23 +19,21 @@ import (
|
|||||||
"github.com/go-git/go-git/v5"
|
"github.com/go-git/go-git/v5"
|
||||||
"github.com/go-git/go-git/v5/plumbing"
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
"github.com/go-git/go-git/v5/storage/memory"
|
"github.com/go-git/go-git/v5/storage/memory"
|
||||||
"github.com/scylladb/go-set/strset"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/cache"
|
"github.com/anchore/syft/internal/cache"
|
||||||
"github.com/anchore/syft/internal/licenses"
|
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/pkg/cataloger/internal/licenses"
|
||||||
)
|
)
|
||||||
|
|
||||||
type goLicenseResolver struct {
|
type goLicenseResolver struct {
|
||||||
catalogerName string
|
catalogerName string
|
||||||
opts CatalogerConfig
|
opts CatalogerConfig
|
||||||
localModCacheDir fs.FS
|
localModCacheDir fs.FS
|
||||||
localVendorDir fs.FS
|
localVendorDir fs.FS
|
||||||
licenseCache cache.Resolver[[]pkg.License]
|
licenseCache cache.Resolver[[]pkg.License]
|
||||||
lowerLicenseFileNames *strset.Set
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newGoLicenseResolver(catalogerName string, opts CatalogerConfig) goLicenseResolver {
|
func newGoLicenseResolver(catalogerName string, opts CatalogerConfig) goLicenseResolver {
|
||||||
@ -59,23 +57,14 @@ func newGoLicenseResolver(catalogerName string, opts CatalogerConfig) goLicenseR
|
|||||||
}
|
}
|
||||||
|
|
||||||
return goLicenseResolver{
|
return goLicenseResolver{
|
||||||
catalogerName: catalogerName,
|
catalogerName: catalogerName,
|
||||||
opts: opts,
|
opts: opts,
|
||||||
localModCacheDir: localModCacheDir,
|
localModCacheDir: localModCacheDir,
|
||||||
localVendorDir: localVendorDir,
|
localVendorDir: localVendorDir,
|
||||||
licenseCache: cache.GetResolverCachingErrors[[]pkg.License]("golang", "v2"),
|
licenseCache: cache.GetResolverCachingErrors[[]pkg.License]("golang", "v2"),
|
||||||
lowerLicenseFileNames: strset.New(lowercaseLicenseFiles()...),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func lowercaseLicenseFiles() []string {
|
|
||||||
fileNames := licenses.FileNames()
|
|
||||||
for i := range fileNames {
|
|
||||||
fileNames[i] = strings.ToLower(fileNames[i])
|
|
||||||
}
|
|
||||||
return fileNames
|
|
||||||
}
|
|
||||||
|
|
||||||
func remotesForModule(proxies []string, noProxy []string, module string) []string {
|
func remotesForModule(proxies []string, noProxy []string, module string) []string {
|
||||||
for _, pattern := range noProxy {
|
for _, pattern := range noProxy {
|
||||||
if matched, err := path.Match(pattern, module); err == nil && matched {
|
if matched, err := path.Match(pattern, module); err == nil && matched {
|
||||||
@ -194,7 +183,7 @@ func (c *goLicenseResolver) findLicensesInFS(ctx context.Context, urlPrefix stri
|
|||||||
log.Debugf("nil entry for %s#%s", urlPrefix, filePath)
|
log.Debugf("nil entry for %s#%s", urlPrefix, filePath)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if !c.lowerLicenseFileNames.Has(strings.ToLower(d.Name())) {
|
if !licenses.IsLicenseFile(d.Name()) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
rdr, err := fsys.Open(filePath)
|
rdr, err := fsys.Open(filePath)
|
||||||
@ -203,11 +192,11 @@ func (c *goLicenseResolver) findLicensesInFS(ctx context.Context, urlPrefix stri
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
defer internal.CloseAndLogError(rdr, filePath)
|
defer internal.CloseAndLogError(rdr, filePath)
|
||||||
licenses := pkg.NewLicensesFromReadCloserWithContext(ctx, file.NewLocationReadCloser(file.NewLocation(filePath), rdr))
|
foundLicenses := pkg.NewLicensesFromReadCloserWithContext(ctx, file.NewLocationReadCloser(file.NewLocation(filePath), rdr))
|
||||||
// since these licenses are found in an external fs.FS, not in the scanned source,
|
// since these licenses are found in an external fs.FS, not in the scanned source,
|
||||||
// get rid of the locations but keep information about the where the license was found
|
// get rid of the locations but keep information about the where the license was found
|
||||||
// by prepending the urlPrefix to the internal path for an accurate representation
|
// by prepending the urlPrefix to the internal path for an accurate representation
|
||||||
for _, l := range licenses {
|
for _, l := range foundLicenses {
|
||||||
l.URLs = []string{urlPrefix + filePath}
|
l.URLs = []string{urlPrefix + filePath}
|
||||||
l.Locations = file.NewLocationSet()
|
l.Locations = file.NewLocationSet()
|
||||||
out = append(out, l)
|
out = append(out, l)
|
||||||
@ -246,7 +235,7 @@ func (c *goLicenseResolver) findLicensesInSource(ctx context.Context, resolver f
|
|||||||
func (c *goLicenseResolver) parseLicenseFromLocation(ctx context.Context, l file.Location, resolver file.Resolver) ([]pkg.License, error) {
|
func (c *goLicenseResolver) parseLicenseFromLocation(ctx context.Context, l file.Location, resolver file.Resolver) ([]pkg.License, error) {
|
||||||
var out []pkg.License
|
var out []pkg.License
|
||||||
fileName := path.Base(l.RealPath)
|
fileName := path.Base(l.RealPath)
|
||||||
if c.lowerLicenseFileNames.Has(strings.ToLower(fileName)) {
|
if licenses.IsLicenseFile(fileName) {
|
||||||
contents, err := resolver.FileContentsByLocation(l)
|
contents, err := resolver.FileContentsByLocation(l)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|||||||
@ -10,7 +10,14 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func (c *goBinaryCataloger) newGoBinaryPackage(dep *debug.Module, m pkg.GolangBinaryBuildinfoEntry, licenses []pkg.License, locations ...file.Location) pkg.Package {
|
func (c *goBinaryCataloger) newGoBinaryPackage(dep *debug.Module, m pkg.GolangBinaryBuildinfoEntry, licenses []pkg.License, locations ...file.Location) pkg.Package {
|
||||||
|
// Similar to syft/pkg/cataloger/golang/parse_go_mod.go logic - use original path for relative replacements
|
||||||
|
finalPath := dep.Path
|
||||||
if dep.Replace != nil {
|
if dep.Replace != nil {
|
||||||
|
if strings.HasPrefix(dep.Replace.Path, ".") || strings.HasPrefix(dep.Replace.Path, "/") {
|
||||||
|
finalPath = dep.Path
|
||||||
|
} else {
|
||||||
|
finalPath = dep.Replace.Path
|
||||||
|
}
|
||||||
dep = dep.Replace
|
dep = dep.Replace
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -23,10 +30,10 @@ func (c *goBinaryCataloger) newGoBinaryPackage(dep *debug.Module, m pkg.GolangBi
|
|||||||
}
|
}
|
||||||
|
|
||||||
p := pkg.Package{
|
p := pkg.Package{
|
||||||
Name: dep.Path,
|
Name: finalPath,
|
||||||
Version: version,
|
Version: version,
|
||||||
Licenses: pkg.NewLicenseSet(licenses...),
|
Licenses: pkg.NewLicenseSet(licenses...),
|
||||||
PURL: packageURL(dep.Path, version),
|
PURL: packageURL(finalPath, version),
|
||||||
Language: pkg.Go,
|
Language: pkg.Go,
|
||||||
Type: pkg.GoModulePkg,
|
Type: pkg.GoModulePkg,
|
||||||
Locations: file.NewLocationSet(locations...),
|
Locations: file.NewLocationSet(locations...),
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
package golang
|
package golang
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"runtime/debug"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@ -54,3 +55,67 @@ func Test_packageURL(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_newGoBinaryPackage_relativeReplace(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
dep *debug.Module
|
||||||
|
expectedName string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "relative replace with ../",
|
||||||
|
dep: &debug.Module{
|
||||||
|
Path: "github.com/aws/aws-sdk-go-v2",
|
||||||
|
Version: "(devel)",
|
||||||
|
Replace: &debug.Module{
|
||||||
|
Path: "../../",
|
||||||
|
Version: "(devel)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectedName: "github.com/aws/aws-sdk-go-v2", // should use original path, not relative
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "relative replace with ./",
|
||||||
|
dep: &debug.Module{
|
||||||
|
Path: "github.com/example/module",
|
||||||
|
Version: "v1.0.0",
|
||||||
|
Replace: &debug.Module{
|
||||||
|
Path: "./local",
|
||||||
|
Version: "v0.0.0",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectedName: "github.com/example/module", // should use original path
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "absolute replace",
|
||||||
|
dep: &debug.Module{
|
||||||
|
Path: "github.com/old/module",
|
||||||
|
Version: "v1.0.0",
|
||||||
|
Replace: &debug.Module{
|
||||||
|
Path: "github.com/new/module",
|
||||||
|
Version: "v2.0.0",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectedName: "github.com/new/module", // should use replacement path
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "no replace",
|
||||||
|
dep: &debug.Module{
|
||||||
|
Path: "github.com/normal/module",
|
||||||
|
Version: "v1.0.0",
|
||||||
|
},
|
||||||
|
expectedName: "github.com/normal/module", // should use original path
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
t.Run(test.name, func(t *testing.T) {
|
||||||
|
cataloger := &goBinaryCataloger{}
|
||||||
|
result := cataloger.newGoBinaryPackage(test.dep, pkg.GolangBinaryBuildinfoEntry{}, nil)
|
||||||
|
|
||||||
|
assert.Equal(t, test.expectedName, result.Name)
|
||||||
|
assert.Equal(t, pkg.Go, result.Language)
|
||||||
|
assert.Equal(t, pkg.GoModulePkg, result.Type)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -7,7 +7,7 @@ import (
|
|||||||
"go/build"
|
"go/build"
|
||||||
"io"
|
"io"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"slices"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@ -20,14 +20,11 @@ import (
|
|||||||
"github.com/anchore/syft/internal/unknown"
|
"github.com/anchore/syft/internal/unknown"
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/internal/fileresolver"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
|
||||||
licenseRegexp = regexp.MustCompile(`^(?i)((UN)?LICEN(S|C)E|COPYING|NOTICE).*$`)
|
|
||||||
)
|
|
||||||
|
|
||||||
type goModCataloger struct {
|
type goModCataloger struct {
|
||||||
licenseResolver goLicenseResolver
|
licenseResolver goLicenseResolver
|
||||||
}
|
}
|
||||||
@ -46,9 +43,14 @@ func (c *goModCataloger) parseGoModFile(ctx context.Context, resolver file.Resol
|
|||||||
log.Debugf("unable to get go.sum: %v", err)
|
log.Debugf("unable to get go.sum: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
scanRoot := ""
|
||||||
|
if dir, ok := resolver.(*fileresolver.Directory); ok && dir != nil {
|
||||||
|
scanRoot = dir.Chroot.Base()
|
||||||
|
}
|
||||||
|
|
||||||
// source analysis using go toolchain if available
|
// source analysis using go toolchain if available
|
||||||
syftSourcePackages, sourceModules, sourceDependencies, unknownErr := c.loadPackages(modDir, reader.Location)
|
syftSourcePackages, sourceModules, sourceDependencies, unknownErr := c.loadPackages(modDir, reader.Location)
|
||||||
catalogedModules, sourceModuleToPkg := c.catalogModules(ctx, syftSourcePackages, sourceModules, reader, digests)
|
catalogedModules, sourceModuleToPkg := c.catalogModules(ctx, scanRoot, syftSourcePackages, sourceModules, reader, digests)
|
||||||
relationships := buildModuleRelationships(catalogedModules, sourceDependencies, sourceModuleToPkg)
|
relationships := buildModuleRelationships(catalogedModules, sourceDependencies, sourceModuleToPkg)
|
||||||
|
|
||||||
// base case go.mod file parsing
|
// base case go.mod file parsing
|
||||||
@ -208,12 +210,16 @@ func (c *goModCataloger) visitPackages(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pkgs[module.Path] = append(pkgs[module.Path], pkgInfo{
|
|
||||||
|
info := pkgInfo{
|
||||||
pkgPath: p.PkgPath,
|
pkgPath: p.PkgPath,
|
||||||
modulePath: module.Path,
|
modulePath: module.Path,
|
||||||
pkgDir: pkgDir,
|
pkgDir: pkgDir,
|
||||||
moduleDir: module.Dir,
|
moduleDir: module.Dir,
|
||||||
})
|
}
|
||||||
|
if !slices.Contains(pkgs[module.Path], info) { // avoid duplicates
|
||||||
|
pkgs[module.Path] = append(pkgs[module.Path], info)
|
||||||
|
}
|
||||||
modules[p.Module.Path] = module
|
modules[p.Module.Path] = module
|
||||||
|
|
||||||
return true
|
return true
|
||||||
@ -224,6 +230,7 @@ func (c *goModCataloger) visitPackages(
|
|||||||
// create syft packages from Go modules found by the go toolchain
|
// create syft packages from Go modules found by the go toolchain
|
||||||
func (c *goModCataloger) catalogModules(
|
func (c *goModCataloger) catalogModules(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
scanRoot string,
|
||||||
pkgs map[string][]pkgInfo,
|
pkgs map[string][]pkgInfo,
|
||||||
modules map[string]*packages.Module,
|
modules map[string]*packages.Module,
|
||||||
reader file.LocationReadCloser,
|
reader file.LocationReadCloser,
|
||||||
@ -243,7 +250,7 @@ func (c *goModCataloger) catalogModules(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pkgInfos := pkgs[m.Path]
|
pkgInfos := pkgs[m.Path]
|
||||||
moduleLicenses := resolveModuleLicenses(ctx, pkgInfos, afero.NewOsFs())
|
moduleLicenses := resolveModuleLicenses(ctx, scanRoot, pkgInfos, afero.NewOsFs())
|
||||||
// we do out of source lookups for module parsing
|
// we do out of source lookups for module parsing
|
||||||
// locations are NOT included in the SBOM because of this
|
// locations are NOT included in the SBOM because of this
|
||||||
goModulePkg := pkg.Package{
|
goModulePkg := pkg.Package{
|
||||||
|
|||||||
@ -1,15 +1,22 @@
|
|||||||
package homebrew
|
package homebrew
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"path"
|
||||||
|
|
||||||
"github.com/anchore/packageurl-go"
|
"github.com/anchore/packageurl-go"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/pkg/cataloger/internal/licenses"
|
||||||
)
|
)
|
||||||
|
|
||||||
func newHomebrewPackage(pd parsedHomebrewData, formulaLocation file.Location) pkg.Package {
|
func newHomebrewPackage(ctx context.Context, resolver file.Resolver, pd parsedHomebrewData, formulaLocation file.Location) pkg.Package {
|
||||||
var licenses []string
|
var lics []pkg.License
|
||||||
if pd.License != "" {
|
if pd.License != "" {
|
||||||
licenses = append(licenses, pd.License)
|
lics = append(lics, pkg.NewLicensesFromValues(pd.License)...)
|
||||||
|
} else {
|
||||||
|
// sometimes licenses are included in the parent directory
|
||||||
|
lics = licenses.FindInDirs(ctx, resolver, path.Dir(formulaLocation.Path()))
|
||||||
}
|
}
|
||||||
|
|
||||||
p := pkg.Package{
|
p := pkg.Package{
|
||||||
@ -17,7 +24,7 @@ func newHomebrewPackage(pd parsedHomebrewData, formulaLocation file.Location) pk
|
|||||||
Version: pd.Version,
|
Version: pd.Version,
|
||||||
Type: pkg.HomebrewPkg,
|
Type: pkg.HomebrewPkg,
|
||||||
Locations: file.NewLocationSet(formulaLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
|
Locations: file.NewLocationSet(formulaLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
|
||||||
Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromValues(licenses...)...),
|
Licenses: pkg.NewLicenseSet(lics...),
|
||||||
FoundBy: "homebrew-cataloger",
|
FoundBy: "homebrew-cataloger",
|
||||||
PURL: packageURL(pd.Name, pd.Version),
|
PURL: packageURL(pd.Name, pd.Version),
|
||||||
Metadata: pkg.HomebrewFormula{
|
Metadata: pkg.HomebrewFormula{
|
||||||
|
|||||||
@ -22,7 +22,7 @@ type parsedHomebrewData struct {
|
|||||||
License string
|
License string
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseHomebrewFormula(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
|
func parseHomebrewFormula(ctx context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
pd, err := parseFormulaFile(reader)
|
pd, err := parseFormulaFile(reader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.WithFields("path", reader.RealPath).Trace("failed to parse formula")
|
log.WithFields("path", reader.RealPath).Trace("failed to parse formula")
|
||||||
@ -35,6 +35,8 @@ func parseHomebrewFormula(_ context.Context, _ file.Resolver, _ *generic.Environ
|
|||||||
|
|
||||||
return []pkg.Package{
|
return []pkg.Package{
|
||||||
newHomebrewPackage(
|
newHomebrewPackage(
|
||||||
|
ctx,
|
||||||
|
resolver,
|
||||||
*pd,
|
*pd,
|
||||||
reader.Location,
|
reader.Location,
|
||||||
),
|
),
|
||||||
|
|||||||
167
syft/pkg/cataloger/internal/cpegenerate/README.md
Normal file
167
syft/pkg/cataloger/internal/cpegenerate/README.md
Normal file
@ -0,0 +1,167 @@
|
|||||||
|
# CPE Generation
|
||||||
|
|
||||||
|
This package generates Common Platform Enumeration (CPE) identifiers for software packages discovered by Syft.
|
||||||
|
CPEs are standardized identifiers that enable vulnerability matching by linking packages to known vulnerabilities in databases like the National Vulnerability Database (NVD).
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
CPE generation in Syft uses a **two-tier approach** to balance accuracy and coverage:
|
||||||
|
|
||||||
|
1. **Dictionary Lookups** (Authoritative): Pre-validated CPEs from the official NIST CPE dictionary
|
||||||
|
2. **Heuristic Generation** (Fallback): Intelligent generation based on package metadata and ecosystem-specific patterns
|
||||||
|
|
||||||
|
This dual approach ensures:
|
||||||
|
- **High accuracy** for packages in the NIST dictionary (no false positives)
|
||||||
|
- **Broad coverage** for packages not yet in the dictionary (maximizes vulnerability detection)
|
||||||
|
- **Fast performance** with an embedded, indexed CPE dictionary (~814KB)
|
||||||
|
|
||||||
|
## Why It Matters
|
||||||
|
|
||||||
|
CPEs link discovered packages to security vulnerabilities (CVEs) in tools like Grype. Without accurate CPE generation, vulnerability scanning misses security issues.
|
||||||
|
|
||||||
|
## How It Works
|
||||||
|
|
||||||
|
### Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────┐
|
||||||
|
│ Syft Package Discovery │
|
||||||
|
└──────────────────┬──────────────────────────────────────┘
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
┌─────────────────────┐
|
||||||
|
│ CPE Generation │
|
||||||
|
│ (this package) │
|
||||||
|
└──────────┬──────────┘
|
||||||
|
│
|
||||||
|
┌───────────┴────────────┐
|
||||||
|
│ │
|
||||||
|
▼ ▼
|
||||||
|
┌──────────────────┐ ┌─────────────────────┐
|
||||||
|
│ Dictionary │ │ Heuristic │
|
||||||
|
│ Lookup │ │ Generation │
|
||||||
|
│ │ │ │
|
||||||
|
│ • Embedded index │ │ • Ecosystem rules │
|
||||||
|
│ • ~22K entries │ │ • Vendor/product │
|
||||||
|
│ • 11 ecosystems │ │ candidates │
|
||||||
|
└──────────────────┘ │ • Curated mappings │
|
||||||
|
│ • Smart filters │
|
||||||
|
└─────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Dictionary Generation Process
|
||||||
|
|
||||||
|
The dictionary is generated offline and embedded into the Syft binary for fast, offline lookups.
|
||||||
|
|
||||||
|
**Location**: `dictionary/index-generator/`
|
||||||
|
|
||||||
|
**Process**:
|
||||||
|
1. **Fetch**: Retrieves CPE data from NVD Products API using incremental updates
|
||||||
|
2. **Cache**: Stores raw API responses in ORAS registry for reuse (`.cpe-cache/`)
|
||||||
|
3. **Filter**:
|
||||||
|
- Removes CPEs without reference URLs
|
||||||
|
- Excludes hardware (`h`) and OS (`o`) CPEs (keeps only applications `a`)
|
||||||
|
4. **Index by Ecosystem**:
|
||||||
|
- Extracts package names from reference URLs (npm, pypi, rubygems, etc.)
|
||||||
|
- Creates index: `ecosystem → package_name → [CPE strings]`
|
||||||
|
5. **Embed**: Generates `data/cpe-index.json` embedded via `go:embed` directive
|
||||||
|
|
||||||
|
### Runtime CPE Lookup/Generation
|
||||||
|
|
||||||
|
**Entry Point**: `generate.go`
|
||||||
|
|
||||||
|
When Syft discovers a package:
|
||||||
|
|
||||||
|
1. **Check for Declared CPEs**: If package metadata already contains CPEs (from SBOM imports), skip generation
|
||||||
|
2. **Try Dictionary Lookup** (`FromDictionaryFind`):
|
||||||
|
- Loads embedded CPE index (singleton, loaded once)
|
||||||
|
- Looks up by ecosystem + package name
|
||||||
|
- Returns pre-validated CPEs if found
|
||||||
|
- Marks source as `NVDDictionaryLookupSource`
|
||||||
|
3. **Fallback to Heuristic Generation** (`FromPackageAttributes`):
|
||||||
|
- Generates vendor/product/targetSW candidates using ecosystem-specific logic
|
||||||
|
- Creates CPE permutations from candidates
|
||||||
|
- Applies filters to remove known false positives
|
||||||
|
- Marks source as `GeneratedSource`
|
||||||
|
|
||||||
|
### Supported Ecosystems
|
||||||
|
|
||||||
|
**Dictionary Lookups** (11 ecosystems):
|
||||||
|
npm, RubyGems, PyPI, Jenkins Plugins, crates.io, PHP, Go Modules, WordPress Plugins/Themes
|
||||||
|
|
||||||
|
**Heuristic Generation** (all package types):
|
||||||
|
All dictionary ecosystems plus Java, .NET/NuGet, Alpine APK, Debian/RPM, and any other package type Syft discovers
|
||||||
|
|
||||||
|
### Ecosystem-Specific Intelligence
|
||||||
|
|
||||||
|
The heuristic generator uses per-ecosystem strategies:
|
||||||
|
|
||||||
|
- **Java**: Extracts vendor from groupId, product from artifactId
|
||||||
|
- **Python**: Parses author fields, adds `_project` suffix variants
|
||||||
|
- **Go**: Extracts org/repo from module paths (`github.com/org/repo`)
|
||||||
|
- **JavaScript**: Handles npm scope patterns (`@scope/package`)
|
||||||
|
|
||||||
|
### Curated Mappings & Filters
|
||||||
|
|
||||||
|
- **500+ curated mappings**: `curl` → `haxx`, `spring-boot` → `pivotal`, etc.
|
||||||
|
- **Filters**: Prevent false positives (Jenkins plugins vs. core, Jira client vs. server)
|
||||||
|
- **Validation**: Ensures CPE syntax correctness before returning
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Embedded Index Format
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"ecosystems": {
|
||||||
|
"npm": {
|
||||||
|
"lodash": ["cpe:2.3:a:lodash:lodash:*:*:*:*:*:node.js:*:*"]
|
||||||
|
},
|
||||||
|
"pypi": {
|
||||||
|
"Django": ["cpe:2.3:a:djangoproject:django:*:*:*:*:*:python:*:*"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The dictionary generator maps packages to ecosystems using reference URL patterns (npmjs.com, pypi.org, rubygems.org, etc.).
|
||||||
|
|
||||||
|
## Maintenance
|
||||||
|
|
||||||
|
### Updating the CPE Dictionary
|
||||||
|
|
||||||
|
The CPE dictionary should be updated periodically to include new packages:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Full workflow: pull cache → update from NVD → build index
|
||||||
|
make generate:cpe-index
|
||||||
|
|
||||||
|
# Or run individual steps:
|
||||||
|
make generate:cpe-index:cache:pull # Pull cached CPE data from ORAS
|
||||||
|
make generate:cpe-index:cache:update # Fetch updates from NVD Products API
|
||||||
|
make generate:cpe-index:build # Generate cpe-index.json from cache
|
||||||
|
```
|
||||||
|
|
||||||
|
**Optional**: Set `NVD_API_KEY` for faster updates (50 req/30s vs 5 req/30s)
|
||||||
|
|
||||||
|
This workflow:
|
||||||
|
1. Pulls existing cache from ORAS registry (avoids re-fetching all ~1.5M CPEs)
|
||||||
|
2. Fetches only products modified since last update from NVD Products API
|
||||||
|
3. Builds indexed dictionary (~814KB, ~22K entries)
|
||||||
|
4. Pushes updated cache for team reuse
|
||||||
|
|
||||||
|
### Extending CPE Generation
|
||||||
|
|
||||||
|
**Add dictionary support for a new ecosystem:**
|
||||||
|
1. Add URL pattern in `index-generator/generate.go`
|
||||||
|
2. Regenerate index with `make generate:cpe-index`
|
||||||
|
|
||||||
|
**Improve heuristic generation:**
|
||||||
|
1. Modify ecosystem-specific file (e.g., `java.go`, `python.go`)
|
||||||
|
2. Add curated mappings to `candidate_by_package_type.go`
|
||||||
|
|
||||||
|
**Key files:**
|
||||||
|
- `generate.go` - Main generation logic
|
||||||
|
- `dictionary/` - Dictionary generator and embedded index
|
||||||
|
- `candidate_by_package_type.go` - Ecosystem-specific candidates
|
||||||
|
- `filter.go` - Filtering rules
|
||||||
63
syft/pkg/cataloger/internal/cpegenerate/candidate_for_pe.go
Normal file
63
syft/pkg/cataloger/internal/cpegenerate/candidate_for_pe.go
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
package cpegenerate
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
)
|
||||||
|
|
||||||
|
// candidateVendorsForPE returns vendor candidates for PE (BinaryPkg) packages based on common metadata hints.
|
||||||
|
// Specifically, normalize Ghostscript binaries to vendor "artifex" when detected.
|
||||||
|
func candidateVendorsForPE(p pkg.Package) fieldCandidateSet {
|
||||||
|
candidates := newFieldCandidateSet()
|
||||||
|
|
||||||
|
meta, ok := p.Metadata.(pkg.PEBinary)
|
||||||
|
if !ok {
|
||||||
|
return candidates
|
||||||
|
}
|
||||||
|
|
||||||
|
var company, product, fileDesc string
|
||||||
|
for _, kv := range meta.VersionResources {
|
||||||
|
switch strings.ToLower(kv.Key) {
|
||||||
|
case "companyname":
|
||||||
|
company = strings.ToLower(kv.Value)
|
||||||
|
case "productname":
|
||||||
|
product = strings.ToLower(kv.Value)
|
||||||
|
case "filedescription":
|
||||||
|
fileDesc = strings.ToLower(kv.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(product, "ghostscript") || strings.Contains(fileDesc, "ghostscript") || strings.Contains(company, "artifex") {
|
||||||
|
candidates.addValue("artifex")
|
||||||
|
}
|
||||||
|
|
||||||
|
return candidates
|
||||||
|
}
|
||||||
|
|
||||||
|
// candidateProductsForPE returns product candidates for PE (BinaryPkg) packages based on common metadata hints.
|
||||||
|
// Specifically, normalize Ghostscript binaries to product "ghostscript" when detected.
|
||||||
|
func candidateProductsForPE(p pkg.Package) fieldCandidateSet {
|
||||||
|
candidates := newFieldCandidateSet()
|
||||||
|
|
||||||
|
meta, ok := p.Metadata.(pkg.PEBinary)
|
||||||
|
if !ok {
|
||||||
|
return candidates
|
||||||
|
}
|
||||||
|
|
||||||
|
var product, fileDesc string
|
||||||
|
for _, kv := range meta.VersionResources {
|
||||||
|
switch strings.ToLower(kv.Key) {
|
||||||
|
case "productname":
|
||||||
|
product = strings.ToLower(kv.Value)
|
||||||
|
case "filedescription":
|
||||||
|
fileDesc = strings.ToLower(kv.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(product, "ghostscript") || strings.Contains(fileDesc, "ghostscript") {
|
||||||
|
candidates.addValue("ghostscript")
|
||||||
|
}
|
||||||
|
|
||||||
|
return candidates
|
||||||
|
}
|
||||||
@ -653,6 +653,9 @@
|
|||||||
"dbCharts": [
|
"dbCharts": [
|
||||||
"cpe:2.3:a:jenkins:dbcharts:*:*:*:*:*:jenkins:*:*"
|
"cpe:2.3:a:jenkins:dbcharts:*:*:*:*:*:jenkins:*:*"
|
||||||
],
|
],
|
||||||
|
"deadmanssnitch": [
|
||||||
|
"cpe:2.3:a:jenkins:dead_man\\'s_snitch:*:*:*:*:*:jenkins:*:*"
|
||||||
|
],
|
||||||
"debian-package-builder": [
|
"debian-package-builder": [
|
||||||
"cpe:2.3:a:jenkins:debian_package_builder:*:*:*:*:*:jenkins:*:*"
|
"cpe:2.3:a:jenkins:debian_package_builder:*:*:*:*:*:jenkins:*:*"
|
||||||
],
|
],
|
||||||
@ -1360,6 +1363,9 @@
|
|||||||
"oic-auth": [
|
"oic-auth": [
|
||||||
"cpe:2.3:a:jenkins:openid_connect_authentication:*:*:*:*:*:jenkins:*:*"
|
"cpe:2.3:a:jenkins:openid_connect_authentication:*:*:*:*:*:jenkins:*:*"
|
||||||
],
|
],
|
||||||
|
"oidc-provider": [
|
||||||
|
"cpe:2.3:a:jenkins:openid_connect_provider:*:*:*:*:*:jenkins:*:*"
|
||||||
|
],
|
||||||
"ontrack": [
|
"ontrack": [
|
||||||
"cpe:2.3:a:jenkins:ontrack:*:*:*:*:*:jenkins:*:*"
|
"cpe:2.3:a:jenkins:ontrack:*:*:*:*:*:jenkins:*:*"
|
||||||
],
|
],
|
||||||
@ -1531,6 +1537,9 @@
|
|||||||
"qualys-pc": [
|
"qualys-pc": [
|
||||||
"cpe:2.3:a:qualys:policy_compliance:*:*:*:*:*:jenkins:*:*"
|
"cpe:2.3:a:qualys:policy_compliance:*:*:*:*:*:jenkins:*:*"
|
||||||
],
|
],
|
||||||
|
"qualys-was": [
|
||||||
|
"cpe:2.3:a:qualys:web_application_screening:*:*:*:*:*:jenkins:*:*"
|
||||||
|
],
|
||||||
"quayio-trigger": [
|
"quayio-trigger": [
|
||||||
"cpe:2.3:a:jenkins:quay.io_trigger:*:*:*:*:*:jenkins:*:*"
|
"cpe:2.3:a:jenkins:quay.io_trigger:*:*:*:*:*:jenkins:*:*"
|
||||||
],
|
],
|
||||||
@ -2164,6 +2173,9 @@
|
|||||||
"@azure/ms-rest-nodeauth": [
|
"@azure/ms-rest-nodeauth": [
|
||||||
"cpe:2.3:a:microsoft:ms-rest-nodeauth:*:*:*:*:*:node.js:*:*"
|
"cpe:2.3:a:microsoft:ms-rest-nodeauth:*:*:*:*:*:node.js:*:*"
|
||||||
],
|
],
|
||||||
|
"@backstage/backend-common": [
|
||||||
|
"cpe:2.3:a:linuxfoundation:backstage_backend-common:*:*:*:*:*:node.js:*:*"
|
||||||
|
],
|
||||||
"@backstage/plugin-auth-backend": [
|
"@backstage/plugin-auth-backend": [
|
||||||
"cpe:2.3:a:linuxfoundation:auth_backend:*:*:*:*:*:node.js:*:*"
|
"cpe:2.3:a:linuxfoundation:auth_backend:*:*:*:*:*:node.js:*:*"
|
||||||
],
|
],
|
||||||
@ -3035,6 +3047,9 @@
|
|||||||
"electron-packager": [
|
"electron-packager": [
|
||||||
"cpe:2.3:a:electron-packager_project:electron-packager:*:*:*:*:*:node.js:*:*"
|
"cpe:2.3:a:electron-packager_project:electron-packager:*:*:*:*:*:node.js:*:*"
|
||||||
],
|
],
|
||||||
|
"electron-pdf": [
|
||||||
|
"cpe:2.3:a:fraserxu:electron-pdf:*:*:*:*:*:node.js:*:*"
|
||||||
|
],
|
||||||
"elliptic": [
|
"elliptic": [
|
||||||
"cpe:2.3:a:indutny:elliptic:*:*:*:*:*:node.js:*:*"
|
"cpe:2.3:a:indutny:elliptic:*:*:*:*:*:node.js:*:*"
|
||||||
],
|
],
|
||||||
@ -5284,6 +5299,9 @@
|
|||||||
"ts-process-promises": [
|
"ts-process-promises": [
|
||||||
"cpe:2.3:a:ts-process-promises_project:ts-process-promises:*:*:*:*:*:node.js:*:*"
|
"cpe:2.3:a:ts-process-promises_project:ts-process-promises:*:*:*:*:*:node.js:*:*"
|
||||||
],
|
],
|
||||||
|
"tsup": [
|
||||||
|
"cpe:2.3:a:egoist:tsup:*:*:*:*:*:node.js:*:*"
|
||||||
|
],
|
||||||
"ua-parser": [
|
"ua-parser": [
|
||||||
"cpe:2.3:a:ua-parser_project:ua-parser:*:*:*:*:*:node.js:*:*"
|
"cpe:2.3:a:ua-parser_project:ua-parser:*:*:*:*:*:node.js:*:*"
|
||||||
],
|
],
|
||||||
@ -5552,6 +5570,9 @@
|
|||||||
"alfnru/password_recovery": [
|
"alfnru/password_recovery": [
|
||||||
"cpe:2.3:a:password_recovery_project:password_recovery:*:*:*:*:*:roundcube:*:*"
|
"cpe:2.3:a:password_recovery_project:password_recovery:*:*:*:*:*:roundcube:*:*"
|
||||||
],
|
],
|
||||||
|
"couleurcitron/tarteaucitron-wp": [
|
||||||
|
"cpe:2.3:a:couleurcitron:tarteaucitron-wp:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"dev-lancer/minecraft-motd-parser": [
|
"dev-lancer/minecraft-motd-parser": [
|
||||||
"cpe:2.3:a:jgniecki:minecraft_motd_parser:*:*:*:*:*:*:*:*"
|
"cpe:2.3:a:jgniecki:minecraft_motd_parser:*:*:*:*:*:*:*:*"
|
||||||
],
|
],
|
||||||
@ -7259,6 +7280,9 @@
|
|||||||
"ab-press-optimizer-lite": [
|
"ab-press-optimizer-lite": [
|
||||||
"cpe:2.3:a:abpressoptimizer:ab_press_optimizer:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:abpressoptimizer:ab_press_optimizer:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"abitgone-commentsafe": [
|
||||||
|
"cpe:2.3:a:abitgone:abitgone_commentsafe:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"about-me": [
|
"about-me": [
|
||||||
"cpe:2.3:a:about-me_project:about-me:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:about-me_project:about-me:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -7605,6 +7629,9 @@
|
|||||||
"advanced-backgrounds": [
|
"advanced-backgrounds": [
|
||||||
"cpe:2.3:a:wpbackgrounds:advanced_wordpress_backgrounds:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wpbackgrounds:advanced_wordpress_backgrounds:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"advanced-blocks-pro": [
|
||||||
|
"cpe:2.3:a:essamamdani:advanced_blocks_pro:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"advanced-booking-calendar": [
|
"advanced-booking-calendar": [
|
||||||
"cpe:2.3:a:elbtide:advanced_booking_calendar:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:elbtide:advanced_booking_calendar:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -7702,6 +7729,9 @@
|
|||||||
"affiliatebooster-blocks": [
|
"affiliatebooster-blocks": [
|
||||||
"cpe:2.3:a:affiliatebooster:affiliate_booster:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:affiliatebooster:affiliate_booster:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"affiliateimportereb": [
|
||||||
|
"cpe:2.3:a:cr1000:affiliateimportereb:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"affiliates-manager": [
|
"affiliates-manager": [
|
||||||
"cpe:2.3:a:wpaffiliatemanager:affiliates_manager:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wpaffiliatemanager:affiliates_manager:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -8408,6 +8438,9 @@
|
|||||||
"cpe:2.3:a:dotstore:woocommerce_category_banner_management:*:*:*:*:*:wordpress:*:*",
|
"cpe:2.3:a:dotstore:woocommerce_category_banner_management:*:*:*:*:*:wordpress:*:*",
|
||||||
"cpe:2.3:a:multidots:banner_management_for_woocommerce:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:multidots:banner_management_for_woocommerce:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"bannerlid": [
|
||||||
|
"cpe:2.3:a:web_lid:bannerlid:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"barcode-scanner-lite-pos-to-manage-products-inventory-and-orders": [
|
"barcode-scanner-lite-pos-to-manage-products-inventory-and-orders": [
|
||||||
"cpe:2.3:a:ukrsolution:barcode_scanner_and_inventory_manager:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:ukrsolution:barcode_scanner_and_inventory_manager:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -8516,6 +8549,9 @@
|
|||||||
"better-elementor-addons": [
|
"better-elementor-addons": [
|
||||||
"cpe:2.3:a:kitforest:better_elementor_addons:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:kitforest:better_elementor_addons:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"better-follow-button-for-jetpack": [
|
||||||
|
"cpe:2.3:a:antonpug:better_flow_button_for_jetpack:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"better-font-awesome": [
|
"better-font-awesome": [
|
||||||
"cpe:2.3:a:better_font_awesome_project:better_font_awesome:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:better_font_awesome_project:better_font_awesome:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -8770,6 +8806,9 @@
|
|||||||
"bp-cover": [
|
"bp-cover": [
|
||||||
"cpe:2.3:a:buddypress_cover_project:buddypress_cover:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:buddypress_cover_project:buddypress_cover:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"bp-email-assign-templates": [
|
||||||
|
"cpe:2.3:a:shanebp:bp_email_assign_templates:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"bp-profile-search": [
|
"bp-profile-search": [
|
||||||
"cpe:2.3:a:dontdream:bp_profile_search:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:dontdream:bp_profile_search:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -9240,6 +9279,9 @@
|
|||||||
"chained-quiz": [
|
"chained-quiz": [
|
||||||
"cpe:2.3:a:kibokolabs:chained_quiz:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:kibokolabs:chained_quiz:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"chalet-montagne-com-tools": [
|
||||||
|
"cpe:2.3:a:alpium:chalet-montagne.com_tools:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"chamber-dashboard-business-directory": [
|
"chamber-dashboard-business-directory": [
|
||||||
"cpe:2.3:a:chamber_dashboard_business_directory_project:chamber_dashboard_business_directory:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:chamber_dashboard_business_directory_project:chamber_dashboard_business_directory:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -9252,6 +9294,9 @@
|
|||||||
"change-memory-limit": [
|
"change-memory-limit": [
|
||||||
"cpe:2.3:a:simon99:change_memory_limit:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:simon99:change_memory_limit:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"change-table-prefix": [
|
||||||
|
"cpe:2.3:a:youngtechleads:change_table_prefix:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"change-uploaded-file-permissions": [
|
"change-uploaded-file-permissions": [
|
||||||
"cpe:2.3:a:change_uploaded_file_permissions_project:change_uploaded_file_permissions:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:change_uploaded_file_permissions_project:change_uploaded_file_permissions:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -9550,6 +9595,9 @@
|
|||||||
"commenttweets": [
|
"commenttweets": [
|
||||||
"cpe:2.3:a:theresehansen:commenttweets:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:theresehansen:commenttweets:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"common-tools-for-site": [
|
||||||
|
"cpe:2.3:a:chetanvaghela:common_tools_for_site:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"commonsbooking": [
|
"commonsbooking": [
|
||||||
"cpe:2.3:a:wielebenwir:commonsbooking:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wielebenwir:commonsbooking:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -10041,6 +10089,9 @@
|
|||||||
"csv-importer": [
|
"csv-importer": [
|
||||||
"cpe:2.3:a:deniskobozev:csv_importer:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:deniskobozev:csv_importer:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"csv-mass-importer": [
|
||||||
|
"cpe:2.3:a:aleapp:csv_mass_importer:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"ct-commerce": [
|
"ct-commerce": [
|
||||||
"cpe:2.3:a:ujwolbastakoti:ct_commerce:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:ujwolbastakoti:ct_commerce:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -10798,6 +10849,9 @@
|
|||||||
"easy-svg": [
|
"easy-svg": [
|
||||||
"cpe:2.3:a:benjaminzekavica:easy_svg_support:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:benjaminzekavica:easy_svg_support:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"easy-svg-upload": [
|
||||||
|
"cpe:2.3:a:delowerhossain:easy_svg_upload:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"easy-table": [
|
"easy-table": [
|
||||||
"cpe:2.3:a:easy_table_project:easy_table:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:easy_table_project:easy_table:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -11286,6 +11340,9 @@
|
|||||||
"exit-intent-popups-by-optimonk": [
|
"exit-intent-popups-by-optimonk": [
|
||||||
"cpe:2.3:a:optimonk:optimonk\\:popups\\,_personalization_\\\u0026_a\\/b_testing:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:optimonk:optimonk\\:popups\\,_personalization_\\\u0026_a\\/b_testing:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"exit-notifier": [
|
||||||
|
"cpe:2.3:a:cvstech:exit_notifier:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"exmage-wp-image-links": [
|
"exmage-wp-image-links": [
|
||||||
"cpe:2.3:a:villatheme:exmage:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:villatheme:exmage:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -11325,6 +11382,9 @@
|
|||||||
"exquisite-paypal-donation": [
|
"exquisite-paypal-donation": [
|
||||||
"cpe:2.3:a:exquisite_paypal_donation_project:exquisite_paypal_donation:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:exquisite_paypal_donation_project:exquisite_paypal_donation:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"extended-search-plugin": [
|
||||||
|
"cpe:2.3:a:jakesnyder:enhanced_search_box:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"extensions-for-cf7": [
|
"extensions-for-cf7": [
|
||||||
"cpe:2.3:a:hasthemes:extensions_for_cf7:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:hasthemes:extensions_for_cf7:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -11571,6 +11631,7 @@
|
|||||||
"cpe:2.3:a:five_minute_webshop_project:five_minute_webshop:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:five_minute_webshop_project:five_minute_webshop:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
"fl3r-feelbox": [
|
"fl3r-feelbox": [
|
||||||
|
"cpe:2.3:a:armandofiore:fl3r_feelbox:*:*:*:*:*:wordpress:*:*",
|
||||||
"cpe:2.3:a:fl3r-feelbox_project:fl3r-feelbox:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:fl3r-feelbox_project:fl3r-feelbox:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
"flash-album-gallery": [
|
"flash-album-gallery": [
|
||||||
@ -12235,6 +12296,9 @@
|
|||||||
"google-sitemap-plugin": [
|
"google-sitemap-plugin": [
|
||||||
"cpe:2.3:a:bestwebsoft:google_sitemap:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:bestwebsoft:google_sitemap:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"google-website-translator": [
|
||||||
|
"cpe:2.3:a:prisna:google_website_translator:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"googleanalytics": [
|
"googleanalytics": [
|
||||||
"cpe:2.3:a:sharethis:dashboard_for_google_analytics:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:sharethis:dashboard_for_google_analytics:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -12634,6 +12698,9 @@
|
|||||||
"hunk-companion": [
|
"hunk-companion": [
|
||||||
"cpe:2.3:a:themehunk:hunk_companion:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:themehunk:hunk_companion:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"hurrytimer": [
|
||||||
|
"cpe:2.3:a:nabillemsieh:hurrytimer:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"hyphenator": [
|
"hyphenator": [
|
||||||
"cpe:2.3:a:benedictb\\/maciejgryniuk:hyphenator:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:benedictb\\/maciejgryniuk:hyphenator:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -12907,6 +12974,9 @@
|
|||||||
"cpe:2.3:a:cm-wp:woody_code_snippets:*:*:*:*:*:wordpress:*:*",
|
"cpe:2.3:a:cm-wp:woody_code_snippets:*:*:*:*:*:wordpress:*:*",
|
||||||
"cpe:2.3:a:webcraftic:woody_ad_snippets:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:webcraftic:woody_ad_snippets:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"insert-php-code-snippet": [
|
||||||
|
"cpe:2.3:a:f1logic:insert_php_code_snippet:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"insight-core": [
|
"insight-core": [
|
||||||
"cpe:2.3:a:thememove:insight_core:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:thememove:insight_core:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -13011,6 +13081,9 @@
|
|||||||
"ip-blacklist-cloud": [
|
"ip-blacklist-cloud": [
|
||||||
"cpe:2.3:a:ip_blacklist_cloud_project:ip_blacklist_cloud:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:ip_blacklist_cloud_project:ip_blacklist_cloud:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"ip-vault-wp-firewall": [
|
||||||
|
"cpe:2.3:a:youtag:two-factor_authentication:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"ip2location-country-blocker": [
|
"ip2location-country-blocker": [
|
||||||
"cpe:2.3:a:ip2location:country_blocker:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:ip2location:country_blocker:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -13557,6 +13630,9 @@
|
|||||||
"list-category-posts": [
|
"list-category-posts": [
|
||||||
"cpe:2.3:a:fernandobriano:list_category_posts:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:fernandobriano:list_category_posts:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"list-children": [
|
||||||
|
"cpe:2.3:a:sizeable:list_children:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"list-last-changes": [
|
"list-last-changes": [
|
||||||
"cpe:2.3:a:rolandbaer:list_last_changes:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:rolandbaer:list_last_changes:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -13854,6 +13930,9 @@
|
|||||||
"manual-image-crop": [
|
"manual-image-crop": [
|
||||||
"cpe:2.3:a:manual_image_crop_project:manual_image_crop:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:manual_image_crop_project:manual_image_crop:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"mapfig-studio": [
|
||||||
|
"cpe:2.3:a:acugis:mapfig_studio:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"mapping-multiple-urls-redirect-same-page": [
|
"mapping-multiple-urls-redirect-same-page": [
|
||||||
"cpe:2.3:a:mapping_multiple_urls_redirect_same_page_project:mapping_multiple_urls_redirect_same_page:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:mapping_multiple_urls_redirect_same_page_project:mapping_multiple_urls_redirect_same_page:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -14237,6 +14316,9 @@
|
|||||||
"monetize": [
|
"monetize": [
|
||||||
"cpe:2.3:a:monetize_project:monetize:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:monetize_project:monetize:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"monitor-chat": [
|
||||||
|
"cpe:2.3:a:edwardstoever:monitor.chat:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"month-name-translation-benaceur": [
|
"month-name-translation-benaceur": [
|
||||||
"cpe:2.3:a:benaceur-php:month_name_translation_benaceur:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:benaceur-php:month_name_translation_benaceur:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -14306,6 +14388,9 @@
|
|||||||
"mq-woocommerce-products-price-bulk-edit": [
|
"mq-woocommerce-products-price-bulk-edit": [
|
||||||
"cpe:2.3:a:mq-woocommerce-products-price-bulk-edit_project:mq-woocommerce-products-price-bulk-edit:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:mq-woocommerce-products-price-bulk-edit_project:mq-woocommerce-products-price-bulk-edit:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"ms-registration": [
|
||||||
|
"cpe:2.3:a:alphaefficiencyteam:custom_login_and_registration:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"ms-reviews": [
|
"ms-reviews": [
|
||||||
"cpe:2.3:a:ms-reviews_project:ms-reviews:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:ms-reviews_project:ms-reviews:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -14438,7 +14523,7 @@
|
|||||||
"cpe:2.3:a:stormhillmedia:mybook_table_bookstore:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:stormhillmedia:mybook_table_bookstore:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
"mycred": [
|
"mycred": [
|
||||||
"cpe:2.3:a:mycred:mycred:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wpexperts:mycred:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
"mycryptocheckout": [
|
"mycryptocheckout": [
|
||||||
"cpe:2.3:a:plainviewplugins:mycryptocheckout:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:plainviewplugins:mycryptocheckout:*:*:*:*:*:wordpress:*:*"
|
||||||
@ -14625,12 +14710,18 @@
|
|||||||
"ninjafirewall": [
|
"ninjafirewall": [
|
||||||
"cpe:2.3:a:nintechnet:ninjafirewall:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:nintechnet:ninjafirewall:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"ninjateam-telegram": [
|
||||||
|
"cpe:2.3:a:ninjateam:chat_for_telegram:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"nirweb-support": [
|
"nirweb-support": [
|
||||||
"cpe:2.3:a:nirweb:nirweb_support:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:nirweb:nirweb_support:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
"nitropack": [
|
"nitropack": [
|
||||||
"cpe:2.3:a:nitropack:nitropack:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:nitropack:nitropack:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"nix-anti-spam-light": [
|
||||||
|
"cpe:2.3:a:nixsolutions:nix_anti-spam_light:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"nktagcloud": [
|
"nktagcloud": [
|
||||||
"cpe:2.3:a:better_tag_cloud_project:better_tag_cloud:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:better_tag_cloud_project:better_tag_cloud:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -15186,6 +15277,9 @@
|
|||||||
"cpe:2.3:a:greentreelabs:gallery_photoblocks:*:*:*:*:*:wordpress:*:*",
|
"cpe:2.3:a:greentreelabs:gallery_photoblocks:*:*:*:*:*:wordpress:*:*",
|
||||||
"cpe:2.3:a:wpchill:gallery_photoblocks:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wpchill:gallery_photoblocks:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"photokit": [
|
||||||
|
"cpe:2.3:a:jackzhu:photokit:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"photoshow": [
|
"photoshow": [
|
||||||
"cpe:2.3:a:codepeople:smart_image_gallery:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:codepeople:smart_image_gallery:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -15511,6 +15605,9 @@
|
|||||||
"postman-smtp": [
|
"postman-smtp": [
|
||||||
"cpe:2.3:a:postman-smtp_project:postman-smtp:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:postman-smtp_project:postman-smtp:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"postmash": [
|
||||||
|
"cpe:2.3:a:jmash:postmash:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"postmatic": [
|
"postmatic": [
|
||||||
"cpe:2.3:a:gopostmatic:replyable:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:gopostmatic:replyable:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -15761,6 +15858,9 @@
|
|||||||
"pure-chat": [
|
"pure-chat": [
|
||||||
"cpe:2.3:a:purechat:pure_chat:*:*:*:*:*:*:*:*"
|
"cpe:2.3:a:purechat:pure_chat:*:*:*:*:*:*:*:*"
|
||||||
],
|
],
|
||||||
|
"pure-css-circle-progress-bar": [
|
||||||
|
"cpe:2.3:a:shafayat:pure_css_circle_progress_bar:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"purple-xmls-google-product-feed-for-woocommerce": [
|
"purple-xmls-google-product-feed-for-woocommerce": [
|
||||||
"cpe:2.3:a:dpl:product_feed_on_woocommerce_for_google\\,_awin\\,_shareasale\\,_bing\\,_and_more:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:dpl:product_feed_on_woocommerce_for_google\\,_awin\\,_shareasale\\,_bing\\,_and_more:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -15964,6 +16064,9 @@
|
|||||||
"react-webcam": [
|
"react-webcam": [
|
||||||
"cpe:2.3:a:react_webcam_project:react_webcam:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:react_webcam_project:react_webcam:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"reaction-buttons": [
|
||||||
|
"cpe:2.3:a:jakob42:reaction_buttons:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"read-and-understood": [
|
"read-and-understood": [
|
||||||
"cpe:2.3:a:read_and_understood_project:read_and_understood:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:read_and_understood_project:read_and_understood:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -16124,6 +16227,9 @@
|
|||||||
"reservation-studio-widget": [
|
"reservation-studio-widget": [
|
||||||
"cpe:2.3:a:pvmg:reservation.studio:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:pvmg:reservation.studio:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"reset": [
|
||||||
|
"cpe:2.3:a:smartzminds:reset:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"resize-at-upload-plus": [
|
"resize-at-upload-plus": [
|
||||||
"cpe:2.3:a:resize_at_upload_plus_project:resize_at_upload_plus:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:resize_at_upload_plus_project:resize_at_upload_plus:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -16527,6 +16633,9 @@
|
|||||||
"sellkit": [
|
"sellkit": [
|
||||||
"cpe:2.3:a:artbees:sellkit:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:artbees:sellkit:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"send-email-only-on-reply-to-my-comment": [
|
||||||
|
"cpe:2.3:a:yasirwazir:send_email_only_on_reply_to_my_comment:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"send-emails-with-mandrill": [
|
"send-emails-with-mandrill": [
|
||||||
"cpe:2.3:a:millermedia:mandrill:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:millermedia:mandrill:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -17092,6 +17201,9 @@
|
|||||||
"site-editor": [
|
"site-editor": [
|
||||||
"cpe:2.3:a:siteeditor:site_editor:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:siteeditor:site_editor:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"site-mailer": [
|
||||||
|
"cpe:2.3:a:elementor:site_mailer:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"site-offline": [
|
"site-offline": [
|
||||||
"cpe:2.3:a:freehtmldesigns:site_offline:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:freehtmldesigns:site_offline:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -17780,6 +17892,9 @@
|
|||||||
"svg-support": [
|
"svg-support": [
|
||||||
"cpe:2.3:a:benbodhi:svg_support:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:benbodhi:svg_support:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"svg-uploads-support": [
|
||||||
|
"cpe:2.3:a:ablyperu:svg_uploads_support:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"svg-vector-icon-plugin": [
|
"svg-vector-icon-plugin": [
|
||||||
"cpe:2.3:a:wp_svg_icons_project:wp_svg_icons:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wp_svg_icons_project:wp_svg_icons:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -17859,6 +17974,7 @@
|
|||||||
"cpe:2.3:a:tainacan:tainacan:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:tainacan:tainacan:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
"tarteaucitronjs": [
|
"tarteaucitronjs": [
|
||||||
|
"cpe:2.3:a:amauri:tarteaucitron.io:*:*:*:*:*:wordpress:*:*",
|
||||||
"cpe:2.3:a:tarteaucitron.js_-_cookies_legislation_\\\u0026_gdpr_project:tarteaucitron.js_-_cookies_legislation_\\\u0026_gdpr:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:tarteaucitron.js_-_cookies_legislation_\\\u0026_gdpr_project:tarteaucitron.js_-_cookies_legislation_\\\u0026_gdpr:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
"taskbuilder": [
|
"taskbuilder": [
|
||||||
@ -18106,6 +18222,9 @@
|
|||||||
"timeline-widget-addon-for-elementor": [
|
"timeline-widget-addon-for-elementor": [
|
||||||
"cpe:2.3:a:coolplugins:timeline_widget_for_elementor:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:coolplugins:timeline_widget_for_elementor:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"timer-countdown": [
|
||||||
|
"cpe:2.3:a:yaidier:countdown_timer:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"timesheet": [
|
"timesheet": [
|
||||||
"cpe:2.3:a:bestwebsoft:timesheet:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:bestwebsoft:timesheet:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -18249,9 +18368,15 @@
|
|||||||
"tripetto": [
|
"tripetto": [
|
||||||
"cpe:2.3:a:tripetto:tripetto:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:tripetto:tripetto:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"tripplan": [
|
||||||
|
"cpe:2.3:a:checklist:trip_plan:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"truebooker-appointment-booking": [
|
"truebooker-appointment-booking": [
|
||||||
"cpe:2.3:a:themetechmount:truebooker:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:themetechmount:truebooker:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"trx_addons": [
|
||||||
|
"cpe:2.3:a:themerex:addons:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"ts-webfonts-for-conoha": [
|
"ts-webfonts-for-conoha": [
|
||||||
"cpe:2.3:a:gmo:typesquare_webfonts_for_conoha:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:gmo:typesquare_webfonts_for_conoha:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -18457,9 +18582,15 @@
|
|||||||
"ultimate-weather-plugin": [
|
"ultimate-weather-plugin": [
|
||||||
"cpe:2.3:a:ultimate-weather_project:ultimate-weather:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:ultimate-weather_project:ultimate-weather:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"ultimate-woocommerce-auction-pro": [
|
||||||
|
"cpe:2.3:a:auctionplugin:ultimate_wordpress_auction_plugin:*:*:*:*:pro:wordpress:*:*"
|
||||||
|
],
|
||||||
"ultimate-wp-query-search-filter": [
|
"ultimate-wp-query-search-filter": [
|
||||||
"cpe:2.3:a:ultimate_wp_query_search_filter_project:ultimate_wp_query_search_filter:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:ultimate_wp_query_search_filter_project:ultimate_wp_query_search_filter:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"ultimate-youtube-video-player": [
|
||||||
|
"cpe:2.3:a:codelizar:ultimate_youtube_video_\\\u0026_shorts_player_with_vimeo:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"ultra-companion": [
|
"ultra-companion": [
|
||||||
"cpe:2.3:a:wpoperation:ultra_companion:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wpoperation:ultra_companion:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -19198,6 +19329,9 @@
|
|||||||
"woo-esto": [
|
"woo-esto": [
|
||||||
"cpe:2.3:a:rebing:woocommerce_esto:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:rebing:woocommerce_esto:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"woo-exfood": [
|
||||||
|
"cpe:2.3:a:exthemes:woocommerce_food:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"woo-floating-cart-lite": [
|
"woo-floating-cart-lite": [
|
||||||
"cpe:2.3:a:xplodedthemes:xt_floating_cart_for_woocommerce:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:xplodedthemes:xt_floating_cart_for_woocommerce:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -19267,6 +19401,9 @@
|
|||||||
"woo-shipping-dpd-baltic": [
|
"woo-shipping-dpd-baltic": [
|
||||||
"cpe:2.3:a:dpdgroup:woocommerce_shipping:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:dpdgroup:woocommerce_shipping:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"woo-slider-pro-drag-drop-slider-builder-for-woocommerce": [
|
||||||
|
"cpe:2.3:a:binarycarpenter:woo_slider_pro:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"woo-smart-compare": [
|
"woo-smart-compare": [
|
||||||
"cpe:2.3:a:wpclever:wpc_smart_compare_for_woocommerce:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wpclever:wpc_smart_compare_for_woocommerce:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -19820,6 +19957,9 @@
|
|||||||
"cpe:2.3:a:kigurumi:csv_exporter:*:*:*:*:*:wordpress:*:*",
|
"cpe:2.3:a:kigurumi:csv_exporter:*:*:*:*:*:wordpress:*:*",
|
||||||
"cpe:2.3:a:wp_csv_exporter_project:wp_csv_exporter:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wp_csv_exporter_project:wp_csv_exporter:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"wp-curriculo-vitae": [
|
||||||
|
"cpe:2.3:a:williamluis:wp-curriculo_vitae_free:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"wp-custom-admin-interface": [
|
"wp-custom-admin-interface": [
|
||||||
"cpe:2.3:a:wp_custom_admin_interface_project:wp_custom_admin_interface:*:*:*:*:*:*:*:*"
|
"cpe:2.3:a:wp_custom_admin_interface_project:wp_custom_admin_interface:*:*:*:*:*:*:*:*"
|
||||||
],
|
],
|
||||||
@ -19891,7 +20031,8 @@
|
|||||||
"cpe:2.3:a:display_users_project:display_users:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:display_users_project:display_users:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
"wp-docs": [
|
"wp-docs": [
|
||||||
"cpe:2.3:a:androidbubble:wp_docs:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:androidbubble:wp_docs:*:*:*:*:*:wordpress:*:*",
|
||||||
|
"cpe:2.3:a:fahadmahmood:wp_docs:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
"wp-domain-redirect": [
|
"wp-domain-redirect": [
|
||||||
"cpe:2.3:a:wp_domain_redirect_project:wp_domain_redirect:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wp_domain_redirect_project:wp_domain_redirect:*:*:*:*:*:wordpress:*:*"
|
||||||
@ -20795,6 +20936,9 @@
|
|||||||
"wp-table-builder": [
|
"wp-table-builder": [
|
||||||
"cpe:2.3:a:dotcamp:wp_table_builder:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:dotcamp:wp_table_builder:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"wp-table-manager": [
|
||||||
|
"cpe:2.3:a:joomunited:wp_table_manager:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"wp-table-reloaded": [
|
"wp-table-reloaded": [
|
||||||
"cpe:2.3:a:wp-table_reloaded_project:wp-table_reloaded:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wp-table_reloaded_project:wp-table_reloaded:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -21139,6 +21283,9 @@
|
|||||||
"wppizza": [
|
"wppizza": [
|
||||||
"cpe:2.3:a:wp-pizza:wppizza:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wp-pizza:wppizza:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"wpquiz": [
|
||||||
|
"cpe:2.3:a:bauc:wpquiz:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"wprequal": [
|
"wprequal": [
|
||||||
"cpe:2.3:a:kevinbrent:wprequal:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:kevinbrent:wprequal:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -21169,6 +21316,9 @@
|
|||||||
"wpsolr-search-engine": [
|
"wpsolr-search-engine": [
|
||||||
"cpe:2.3:a:wpsolr:wpsolr-search-engine:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wpsolr:wpsolr-search-engine:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"wpstickybar-sticky-bar-sticky-header": [
|
||||||
|
"cpe:2.3:a:a17lab:wpstickybar:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"wpstream": [
|
"wpstream": [
|
||||||
"cpe:2.3:a:wpstream:wpstream:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:wpstream:wpstream:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -21276,6 +21426,9 @@
|
|||||||
"xtremelocator": [
|
"xtremelocator": [
|
||||||
"cpe:2.3:a:xtremelocator:xtremelocator:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:xtremelocator:xtremelocator:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"xv-random-quotes": [
|
||||||
|
"cpe:2.3:a:xavivars:xv_random_quotes:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"yabp": [
|
"yabp": [
|
||||||
"cpe:2.3:a:tromit:yabp:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:tromit:yabp:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -21362,6 +21515,9 @@
|
|||||||
"yotpo-social-reviews-for-woocommerce": [
|
"yotpo-social-reviews-for-woocommerce": [
|
||||||
"cpe:2.3:a:yotpo:yotpo:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:yotpo:yotpo:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"yotuwp-easy-youtube-embed": [
|
||||||
|
"cpe:2.3:a:yotuwp:video_gallery:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"yourchannel": [
|
"yourchannel": [
|
||||||
"cpe:2.3:a:plugin:yourchannel:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:plugin:yourchannel:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
@ -21782,6 +21938,9 @@
|
|||||||
"pressmart": [
|
"pressmart": [
|
||||||
"cpe:2.3:a:presslayouts:pressmart:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:presslayouts:pressmart:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
"puzzles": [
|
||||||
|
"cpe:2.3:a:themerex:puzzles:*:*:*:*:*:wordpress:*:*"
|
||||||
|
],
|
||||||
"regina-lite": [
|
"regina-lite": [
|
||||||
"cpe:2.3:a:machothemes:regina_lite:*:*:*:*:*:wordpress:*:*"
|
"cpe:2.3:a:machothemes:regina_lite:*:*:*:*:*:wordpress:*:*"
|
||||||
],
|
],
|
||||||
|
|||||||
@ -1,3 +0,0 @@
|
|||||||
package dictionary
|
|
||||||
|
|
||||||
//go:generate go run ./index-generator/ -o data/cpe-index.json
|
|
||||||
6
syft/pkg/cataloger/internal/cpegenerate/dictionary/index-generator/.gitignore
vendored
Normal file
6
syft/pkg/cataloger/internal/cpegenerate/dictionary/index-generator/.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
# ORAS cache directory - raw CPE data from NVD API
|
||||||
|
.cpe-cache/
|
||||||
|
|
||||||
|
# Build artifacts
|
||||||
|
index-generator
|
||||||
|
.tmp-*
|
||||||
@ -0,0 +1,370 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const cacheDir = ".cpe-cache"
|
||||||
|
|
||||||
|
// IncrementMetadata tracks a single fetch increment for a monthly batch
|
||||||
|
type IncrementMetadata struct {
|
||||||
|
FetchedAt time.Time `json:"fetchedAt"`
|
||||||
|
LastModStartDate time.Time `json:"lastModStartDate"`
|
||||||
|
LastModEndDate time.Time `json:"lastModEndDate"`
|
||||||
|
Products int `json:"products"`
|
||||||
|
StartIndex int `json:"startIndex"` // API pagination start index
|
||||||
|
EndIndex int `json:"endIndex"` // API pagination end index (last fetched)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MonthlyBatchMetadata tracks all increments for a specific month
|
||||||
|
type MonthlyBatchMetadata struct {
|
||||||
|
Complete bool `json:"complete"`
|
||||||
|
TotalProducts int `json:"totalProducts"`
|
||||||
|
Increments []IncrementMetadata `json:"increments"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheMetadata tracks the state of the CPE cache using monthly time-based organization
|
||||||
|
type CacheMetadata struct {
|
||||||
|
LastFullRefresh time.Time `json:"lastFullRefresh"`
|
||||||
|
LastStartIndex int `json:"lastStartIndex"` // last successfully processed startIndex for resume
|
||||||
|
TotalProducts int `json:"totalProducts"`
|
||||||
|
MonthlyBatches map[string]*MonthlyBatchMetadata `json:"monthlyBatches"` // key is "YYYY-MM"
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheManager handles local caching of CPE data
|
||||||
|
type CacheManager struct {
|
||||||
|
cacheDir string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewCacheManager creates a new cache manager
|
||||||
|
func NewCacheManager() *CacheManager {
|
||||||
|
return &CacheManager{
|
||||||
|
cacheDir: cacheDir,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnsureCacheDir ensures the cache directory exists
|
||||||
|
func (m *CacheManager) EnsureCacheDir() error {
|
||||||
|
if err := os.MkdirAll(m.cacheDir, 0755); err != nil {
|
||||||
|
return fmt.Errorf("failed to create cache directory: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadMetadata loads the cache metadata
|
||||||
|
func (m *CacheManager) LoadMetadata() (*CacheMetadata, error) {
|
||||||
|
metadataPath := filepath.Join(m.cacheDir, "metadata.json")
|
||||||
|
|
||||||
|
// check if metadata file exists
|
||||||
|
if _, err := os.Stat(metadataPath); os.IsNotExist(err) {
|
||||||
|
// return empty metadata for first run
|
||||||
|
return &CacheMetadata{
|
||||||
|
LastFullRefresh: time.Time{},
|
||||||
|
TotalProducts: 0,
|
||||||
|
MonthlyBatches: make(map[string]*MonthlyBatchMetadata),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := os.ReadFile(metadataPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read metadata: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var metadata CacheMetadata
|
||||||
|
if err := json.Unmarshal(data, &metadata); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to unmarshal metadata: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ensure MonthlyBatches map is initialized
|
||||||
|
if metadata.MonthlyBatches == nil {
|
||||||
|
metadata.MonthlyBatches = make(map[string]*MonthlyBatchMetadata)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &metadata, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveMetadata saves the cache metadata
|
||||||
|
func (m *CacheManager) SaveMetadata(metadata *CacheMetadata) error {
|
||||||
|
if err := m.EnsureCacheDir(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
metadataPath := filepath.Join(m.cacheDir, "metadata.json")
|
||||||
|
|
||||||
|
data, err := json.MarshalIndent(metadata, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to marshal metadata: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.WriteFile(metadataPath, data, 0600); err != nil {
|
||||||
|
return fmt.Errorf("failed to write metadata: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveProductsToMonthlyFile saves products to a monthly file (initial.json or YYYY-MM.json)
|
||||||
|
// uses atomic write pattern with temp file + rename for safety
|
||||||
|
func (m *CacheManager) SaveProductsToMonthlyFile(filename string, products []NVDProduct) error {
|
||||||
|
if err := m.EnsureCacheDir(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
filePath := filepath.Join(m.cacheDir, filename)
|
||||||
|
tempPath := filePath + ".tmp"
|
||||||
|
|
||||||
|
// marshal products to JSON
|
||||||
|
data, err := json.MarshalIndent(products, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to marshal products: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// write to temp file first
|
||||||
|
if err := os.WriteFile(tempPath, data, 0600); err != nil {
|
||||||
|
return fmt.Errorf("failed to write temp file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// atomic rename
|
||||||
|
if err := os.Rename(tempPath, filePath); err != nil {
|
||||||
|
// cleanup temp file on error
|
||||||
|
_ = os.Remove(tempPath)
|
||||||
|
return fmt.Errorf("failed to rename temp file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadMonthlyFile loads products from a monthly file
|
||||||
|
func (m *CacheManager) LoadMonthlyFile(filename string) ([]NVDProduct, error) {
|
||||||
|
filePath := filepath.Join(m.cacheDir, filename)
|
||||||
|
|
||||||
|
data, err := os.ReadFile(filePath)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return []NVDProduct{}, nil
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to read %s: %w", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var products []NVDProduct
|
||||||
|
if err := json.Unmarshal(data, &products); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to unmarshal %s: %w", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return products, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMonthKey returns the "YYYY-MM" key for a given time
|
||||||
|
func GetMonthKey(t time.Time) string {
|
||||||
|
return t.Format("2006-01")
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveProducts saves products grouped by modification month
|
||||||
|
// this is called after fetching from the API to organize products into monthly files
|
||||||
|
func (m *CacheManager) SaveProducts(products []NVDProduct, isFullRefresh bool, metadata *CacheMetadata, increment IncrementMetadata) error {
|
||||||
|
if len(products) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if isFullRefresh {
|
||||||
|
return m.saveFullRefresh(products, metadata)
|
||||||
|
}
|
||||||
|
|
||||||
|
return m.saveIncrementalUpdate(products, metadata, increment)
|
||||||
|
}
|
||||||
|
|
||||||
|
// saveFullRefresh saves all products to initial.json
|
||||||
|
func (m *CacheManager) saveFullRefresh(products []NVDProduct, metadata *CacheMetadata) error {
|
||||||
|
if err := m.SaveProductsToMonthlyFile("initial.json", products); err != nil {
|
||||||
|
return fmt.Errorf("failed to save initial.json: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata.LastFullRefresh = time.Now()
|
||||||
|
metadata.TotalProducts = len(products)
|
||||||
|
metadata.LastStartIndex = 0 // reset on full refresh
|
||||||
|
metadata.MonthlyBatches = make(map[string]*MonthlyBatchMetadata)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// saveIncrementalUpdate saves products grouped by modification month to monthly files
|
||||||
|
func (m *CacheManager) saveIncrementalUpdate(products []NVDProduct, metadata *CacheMetadata, increment IncrementMetadata) error {
|
||||||
|
productsByMonth, err := groupProductsByMonth(products)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for monthKey, monthProducts := range productsByMonth {
|
||||||
|
if err := m.saveMonthlyBatch(monthKey, monthProducts, metadata, increment); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// update last processed index for resume capability
|
||||||
|
metadata.LastStartIndex = increment.EndIndex
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// groupProductsByMonth groups products by their lastModified month
|
||||||
|
func groupProductsByMonth(products []NVDProduct) (map[string][]NVDProduct, error) {
|
||||||
|
productsByMonth := make(map[string][]NVDProduct)
|
||||||
|
|
||||||
|
for _, product := range products {
|
||||||
|
lastMod, err := time.Parse(time.RFC3339, product.CPE.LastModified)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse lastModified for %s: %w", product.CPE.CPENameID, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
monthKey := GetMonthKey(lastMod)
|
||||||
|
productsByMonth[monthKey] = append(productsByMonth[monthKey], product)
|
||||||
|
}
|
||||||
|
|
||||||
|
return productsByMonth, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// saveMonthlyBatch saves products for a specific month, merging with existing data
|
||||||
|
func (m *CacheManager) saveMonthlyBatch(monthKey string, monthProducts []NVDProduct, metadata *CacheMetadata, increment IncrementMetadata) error {
|
||||||
|
filename := monthKey + ".json"
|
||||||
|
|
||||||
|
// load existing products for this month
|
||||||
|
existing, err := m.LoadMonthlyFile(filename)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to load existing %s: %w", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// merge products (newer wins)
|
||||||
|
merged := mergeProducts(existing, monthProducts)
|
||||||
|
|
||||||
|
// atomically save merged products
|
||||||
|
if err := m.SaveProductsToMonthlyFile(filename, merged); err != nil {
|
||||||
|
return fmt.Errorf("failed to save %s: %w", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// update metadata
|
||||||
|
updateMonthlyBatchMetadata(metadata, monthKey, monthProducts, merged, increment)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// mergeProducts deduplicates products by CPENameID, with newer products overwriting older ones
|
||||||
|
func mergeProducts(existing, updated []NVDProduct) []NVDProduct {
|
||||||
|
productMap := make(map[string]NVDProduct)
|
||||||
|
|
||||||
|
for _, p := range existing {
|
||||||
|
productMap[p.CPE.CPENameID] = p
|
||||||
|
}
|
||||||
|
for _, p := range updated {
|
||||||
|
productMap[p.CPE.CPENameID] = p
|
||||||
|
}
|
||||||
|
|
||||||
|
merged := make([]NVDProduct, 0, len(productMap))
|
||||||
|
for _, p := range productMap {
|
||||||
|
merged = append(merged, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
return merged
|
||||||
|
}
|
||||||
|
|
||||||
|
// updateMonthlyBatchMetadata updates the metadata for a monthly batch
|
||||||
|
func updateMonthlyBatchMetadata(metadata *CacheMetadata, monthKey string, newProducts, allProducts []NVDProduct, increment IncrementMetadata) {
|
||||||
|
if metadata.MonthlyBatches[monthKey] == nil {
|
||||||
|
metadata.MonthlyBatches[monthKey] = &MonthlyBatchMetadata{
|
||||||
|
Complete: false,
|
||||||
|
Increments: []IncrementMetadata{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
batchMeta := metadata.MonthlyBatches[monthKey]
|
||||||
|
batchMeta.Increments = append(batchMeta.Increments, IncrementMetadata{
|
||||||
|
FetchedAt: increment.FetchedAt,
|
||||||
|
LastModStartDate: increment.LastModStartDate,
|
||||||
|
LastModEndDate: increment.LastModEndDate,
|
||||||
|
Products: len(newProducts),
|
||||||
|
StartIndex: increment.StartIndex,
|
||||||
|
EndIndex: increment.EndIndex,
|
||||||
|
})
|
||||||
|
batchMeta.TotalProducts = len(allProducts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAllProducts loads and merges all cached products from monthly files
|
||||||
|
// returns a deduplicated slice of products (newer products override older ones by CPENameID)
|
||||||
|
func (m *CacheManager) LoadAllProducts() ([]NVDProduct, error) {
|
||||||
|
// check if cache directory exists
|
||||||
|
if _, err := os.Stat(m.cacheDir); os.IsNotExist(err) {
|
||||||
|
return []NVDProduct{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
productMap := make(map[string]NVDProduct)
|
||||||
|
|
||||||
|
// load initial.json first (if it exists)
|
||||||
|
initial, err := m.LoadMonthlyFile("initial.json")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to load initial.json: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, p := range initial {
|
||||||
|
productMap[p.CPE.CPENameID] = p
|
||||||
|
}
|
||||||
|
|
||||||
|
// load all monthly files (YYYY-MM.json)
|
||||||
|
entries, err := os.ReadDir(m.cacheDir)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read cache directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
if entry.IsDir() || filepath.Ext(entry.Name()) != ".json" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip metadata.json and initial.json
|
||||||
|
if entry.Name() == "metadata.json" || entry.Name() == "initial.json" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// load monthly file
|
||||||
|
products, err := m.LoadMonthlyFile(entry.Name())
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to load %s: %w", entry.Name(), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// merge products (newer wins based on lastModified)
|
||||||
|
for _, p := range products {
|
||||||
|
existing, exists := productMap[p.CPE.CPENameID]
|
||||||
|
if !exists {
|
||||||
|
productMap[p.CPE.CPENameID] = p
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// compare lastModified timestamps to keep the newer one
|
||||||
|
newMod, _ := time.Parse(time.RFC3339, p.CPE.LastModified)
|
||||||
|
existingMod, _ := time.Parse(time.RFC3339, existing.CPE.LastModified)
|
||||||
|
|
||||||
|
if newMod.After(existingMod) {
|
||||||
|
productMap[p.CPE.CPENameID] = p
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert map to slice
|
||||||
|
allProducts := make([]NVDProduct, 0, len(productMap))
|
||||||
|
for _, p := range productMap {
|
||||||
|
allProducts = append(allProducts, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
return allProducts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CleanCache removes the local cache directory
|
||||||
|
func (m *CacheManager) CleanCache() error {
|
||||||
|
if err := os.RemoveAll(m.cacheDir); err != nil {
|
||||||
|
return fmt.Errorf("failed to clean cache: %w", err)
|
||||||
|
}
|
||||||
|
fmt.Println("Cache cleaned successfully")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@ -0,0 +1,319 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCacheManager_MonthlyFileOperations(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
cacheManager := &CacheManager{cacheDir: tmpDir}
|
||||||
|
|
||||||
|
testProducts := []NVDProduct{
|
||||||
|
{
|
||||||
|
CPE: NVDProductDetails{
|
||||||
|
CPEName: "cpe:2.3:a:vendor:product1:1.0:*:*:*:*:*:*:*",
|
||||||
|
CPENameID: "product1-id",
|
||||||
|
LastModified: "2024-11-15T10:00:00.000Z",
|
||||||
|
Titles: []NVDTitle{{Title: "Test Product 1", Lang: "en"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
CPE: NVDProductDetails{
|
||||||
|
CPEName: "cpe:2.3:a:vendor:product2:2.0:*:*:*:*:*:*:*",
|
||||||
|
CPENameID: "product2-id",
|
||||||
|
LastModified: "2024-11-20T10:00:00.000Z",
|
||||||
|
Titles: []NVDTitle{{Title: "Test Product 2", Lang: "en"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run("save and load monthly file", func(t *testing.T) {
|
||||||
|
err := cacheManager.SaveProductsToMonthlyFile("2024-11.json", testProducts)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
expectedPath := filepath.Join(tmpDir, "2024-11.json")
|
||||||
|
require.FileExists(t, expectedPath)
|
||||||
|
|
||||||
|
loaded, err := cacheManager.LoadMonthlyFile("2024-11.json")
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, loaded, 2)
|
||||||
|
assert.Equal(t, testProducts[0].CPE.CPEName, loaded[0].CPE.CPEName)
|
||||||
|
assert.Equal(t, testProducts[1].CPE.CPEName, loaded[1].CPE.CPEName)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("atomic save with temp file", func(t *testing.T) {
|
||||||
|
err := cacheManager.SaveProductsToMonthlyFile("2024-12.json", testProducts)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// temp file should not exist after successful save
|
||||||
|
tempPath := filepath.Join(tmpDir, "2024-12.json.tmp")
|
||||||
|
require.NoFileExists(t, tempPath)
|
||||||
|
|
||||||
|
// actual file should exist
|
||||||
|
finalPath := filepath.Join(tmpDir, "2024-12.json")
|
||||||
|
require.FileExists(t, finalPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("load non-existent file returns empty", func(t *testing.T) {
|
||||||
|
loaded, err := cacheManager.LoadMonthlyFile("2025-01.json")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Empty(t, loaded)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCacheManager_Metadata(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
cacheManager := &CacheManager{cacheDir: tmpDir}
|
||||||
|
|
||||||
|
t.Run("load metadata on first run", func(t *testing.T) {
|
||||||
|
metadata, err := cacheManager.LoadMetadata()
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, metadata)
|
||||||
|
|
||||||
|
assert.NotNil(t, metadata.MonthlyBatches)
|
||||||
|
assert.True(t, metadata.LastFullRefresh.IsZero())
|
||||||
|
assert.Equal(t, 0, metadata.LastStartIndex)
|
||||||
|
assert.Equal(t, 0, metadata.TotalProducts)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("save and load metadata with monthly batches", func(t *testing.T) {
|
||||||
|
now := time.Now()
|
||||||
|
metadata := &CacheMetadata{
|
||||||
|
LastFullRefresh: now,
|
||||||
|
LastStartIndex: 4000,
|
||||||
|
TotalProducts: 1500,
|
||||||
|
MonthlyBatches: map[string]*MonthlyBatchMetadata{
|
||||||
|
"2024-11": {
|
||||||
|
Complete: true,
|
||||||
|
TotalProducts: 1000,
|
||||||
|
Increments: []IncrementMetadata{
|
||||||
|
{
|
||||||
|
FetchedAt: now,
|
||||||
|
LastModStartDate: now.Add(-24 * time.Hour),
|
||||||
|
LastModEndDate: now,
|
||||||
|
Products: 1000,
|
||||||
|
StartIndex: 0,
|
||||||
|
EndIndex: 2000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"2024-12": {
|
||||||
|
Complete: false,
|
||||||
|
TotalProducts: 500,
|
||||||
|
Increments: []IncrementMetadata{
|
||||||
|
{
|
||||||
|
FetchedAt: now,
|
||||||
|
LastModStartDate: now.Add(-12 * time.Hour),
|
||||||
|
LastModEndDate: now,
|
||||||
|
Products: 500,
|
||||||
|
StartIndex: 0,
|
||||||
|
EndIndex: 1000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := cacheManager.SaveMetadata(metadata)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
loadedMetadata, err := cacheManager.LoadMetadata()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, metadata.TotalProducts, loadedMetadata.TotalProducts)
|
||||||
|
assert.Equal(t, metadata.LastStartIndex, loadedMetadata.LastStartIndex)
|
||||||
|
assert.Equal(t, 2, len(loadedMetadata.MonthlyBatches))
|
||||||
|
assert.True(t, loadedMetadata.MonthlyBatches["2024-11"].Complete)
|
||||||
|
assert.False(t, loadedMetadata.MonthlyBatches["2024-12"].Complete)
|
||||||
|
assert.Equal(t, 1000, loadedMetadata.MonthlyBatches["2024-11"].TotalProducts)
|
||||||
|
assert.Len(t, loadedMetadata.MonthlyBatches["2024-11"].Increments, 1)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCacheManager_LoadAllProducts(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
cacheManager := &CacheManager{cacheDir: tmpDir}
|
||||||
|
|
||||||
|
t.Run("load and merge monthly files", func(t *testing.T) {
|
||||||
|
// save initial.json with base products
|
||||||
|
initialProducts := []NVDProduct{
|
||||||
|
{CPE: NVDProductDetails{
|
||||||
|
CPEName: "cpe:2.3:a:vendor:product1:*:*:*:*:*:*:*:*",
|
||||||
|
CPENameID: "product1-id",
|
||||||
|
LastModified: "2024-10-01T10:00:00.000Z",
|
||||||
|
}},
|
||||||
|
{CPE: NVDProductDetails{
|
||||||
|
CPEName: "cpe:2.3:a:vendor:product2:*:*:*:*:*:*:*:*",
|
||||||
|
CPENameID: "product2-id",
|
||||||
|
LastModified: "2024-10-15T10:00:00.000Z",
|
||||||
|
}},
|
||||||
|
}
|
||||||
|
err := cacheManager.SaveProductsToMonthlyFile("initial.json", initialProducts)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// save 2024-11.json with updated product2 and new product3
|
||||||
|
novemberProducts := []NVDProduct{
|
||||||
|
{CPE: NVDProductDetails{
|
||||||
|
CPEName: "cpe:2.3:a:vendor:product2:*:*:*:*:*:*:*:*",
|
||||||
|
CPENameID: "product2-id",
|
||||||
|
LastModified: "2024-11-05T10:00:00.000Z", // newer version
|
||||||
|
}},
|
||||||
|
{CPE: NVDProductDetails{
|
||||||
|
CPEName: "cpe:2.3:a:vendor:product3:*:*:*:*:*:*:*:*",
|
||||||
|
CPENameID: "product3-id",
|
||||||
|
LastModified: "2024-11-10T10:00:00.000Z",
|
||||||
|
}},
|
||||||
|
}
|
||||||
|
err = cacheManager.SaveProductsToMonthlyFile("2024-11.json", novemberProducts)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// load all products
|
||||||
|
allProducts, err := cacheManager.LoadAllProducts()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// should have 3 unique products (product2 from Nov overwrites Oct version)
|
||||||
|
require.Len(t, allProducts, 3)
|
||||||
|
|
||||||
|
// verify we got all products
|
||||||
|
cpeNames := make(map[string]string) // CPENameID -> LastModified
|
||||||
|
for _, product := range allProducts {
|
||||||
|
cpeNames[product.CPE.CPENameID] = product.CPE.LastModified
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Contains(t, cpeNames, "product1-id")
|
||||||
|
assert.Contains(t, cpeNames, "product2-id")
|
||||||
|
assert.Contains(t, cpeNames, "product3-id")
|
||||||
|
|
||||||
|
// product2 should be the newer version from November
|
||||||
|
assert.Equal(t, "2024-11-05T10:00:00.000Z", cpeNames["product2-id"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("empty directory", func(t *testing.T) {
|
||||||
|
emptyDir := t.TempDir()
|
||||||
|
emptyCache := &CacheManager{cacheDir: emptyDir}
|
||||||
|
|
||||||
|
allProducts, err := emptyCache.LoadAllProducts()
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Empty(t, allProducts)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCacheManager_CleanCache(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
cacheManager := &CacheManager{cacheDir: tmpDir}
|
||||||
|
|
||||||
|
// create some cache files
|
||||||
|
testProducts := []NVDProduct{
|
||||||
|
{CPE: NVDProductDetails{
|
||||||
|
CPEName: "cpe:2.3:a:vendor:product:*:*:*:*:*:*:*:*",
|
||||||
|
CPENameID: "test-id",
|
||||||
|
LastModified: "2024-11-01T10:00:00.000Z",
|
||||||
|
}},
|
||||||
|
}
|
||||||
|
err := cacheManager.SaveProductsToMonthlyFile("initial.json", testProducts)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// verify cache exists
|
||||||
|
require.DirExists(t, tmpDir)
|
||||||
|
|
||||||
|
// clean cache
|
||||||
|
err = cacheManager.CleanCache()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// verify cache is removed
|
||||||
|
_, err = os.Stat(tmpDir)
|
||||||
|
assert.True(t, os.IsNotExist(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCacheManager_SaveProducts(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
cacheManager := &CacheManager{cacheDir: tmpDir}
|
||||||
|
|
||||||
|
t.Run("full refresh saves to initial.json", func(t *testing.T) {
|
||||||
|
metadata := &CacheMetadata{
|
||||||
|
MonthlyBatches: make(map[string]*MonthlyBatchMetadata),
|
||||||
|
}
|
||||||
|
|
||||||
|
products := []NVDProduct{
|
||||||
|
{CPE: NVDProductDetails{
|
||||||
|
CPEName: "cpe:2.3:a:vendor:product1:*:*:*:*:*:*:*:*",
|
||||||
|
CPENameID: "p1",
|
||||||
|
LastModified: "2024-10-01T10:00:00.000Z",
|
||||||
|
}},
|
||||||
|
}
|
||||||
|
|
||||||
|
increment := IncrementMetadata{
|
||||||
|
FetchedAt: time.Now(),
|
||||||
|
Products: 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
err := cacheManager.SaveProducts(products, true, metadata, increment)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// verify initial.json exists
|
||||||
|
initialPath := filepath.Join(tmpDir, "initial.json")
|
||||||
|
require.FileExists(t, initialPath)
|
||||||
|
|
||||||
|
// verify metadata updated
|
||||||
|
assert.NotZero(t, metadata.LastFullRefresh)
|
||||||
|
assert.Equal(t, 1, metadata.TotalProducts)
|
||||||
|
assert.Empty(t, metadata.MonthlyBatches)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("incremental update groups by month", func(t *testing.T) {
|
||||||
|
metadata := &CacheMetadata{
|
||||||
|
LastFullRefresh: time.Now().Add(-30 * 24 * time.Hour),
|
||||||
|
MonthlyBatches: make(map[string]*MonthlyBatchMetadata),
|
||||||
|
}
|
||||||
|
|
||||||
|
products := []NVDProduct{
|
||||||
|
{CPE: NVDProductDetails{
|
||||||
|
CPEName: "cpe:2.3:a:vendor:product1:*:*:*:*:*:*:*:*",
|
||||||
|
CPENameID: "p1",
|
||||||
|
LastModified: "2024-11-05T10:00:00.000Z",
|
||||||
|
}},
|
||||||
|
{CPE: NVDProductDetails{
|
||||||
|
CPEName: "cpe:2.3:a:vendor:product2:*:*:*:*:*:*:*:*",
|
||||||
|
CPENameID: "p2",
|
||||||
|
LastModified: "2024-11-15T10:00:00.000Z",
|
||||||
|
}},
|
||||||
|
{CPE: NVDProductDetails{
|
||||||
|
CPEName: "cpe:2.3:a:vendor:product3:*:*:*:*:*:*:*:*",
|
||||||
|
CPENameID: "p3",
|
||||||
|
LastModified: "2024-12-01T10:00:00.000Z",
|
||||||
|
}},
|
||||||
|
}
|
||||||
|
|
||||||
|
increment := IncrementMetadata{
|
||||||
|
FetchedAt: time.Now(),
|
||||||
|
Products: 3,
|
||||||
|
}
|
||||||
|
|
||||||
|
err := cacheManager.SaveProducts(products, false, metadata, increment)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// verify monthly files exist
|
||||||
|
nov2024Path := filepath.Join(tmpDir, "2024-11.json")
|
||||||
|
dec2024Path := filepath.Join(tmpDir, "2024-12.json")
|
||||||
|
require.FileExists(t, nov2024Path)
|
||||||
|
require.FileExists(t, dec2024Path)
|
||||||
|
|
||||||
|
// verify metadata has monthly batches
|
||||||
|
assert.Len(t, metadata.MonthlyBatches, 2)
|
||||||
|
assert.Contains(t, metadata.MonthlyBatches, "2024-11")
|
||||||
|
assert.Contains(t, metadata.MonthlyBatches, "2024-12")
|
||||||
|
|
||||||
|
// verify 2024-11 has 2 products
|
||||||
|
assert.Equal(t, 2, metadata.MonthlyBatches["2024-11"].TotalProducts)
|
||||||
|
assert.Len(t, metadata.MonthlyBatches["2024-11"].Increments, 1)
|
||||||
|
|
||||||
|
// verify 2024-12 has 1 product
|
||||||
|
assert.Equal(t, 1, metadata.MonthlyBatches["2024-12"].TotalProducts)
|
||||||
|
})
|
||||||
|
}
|
||||||
@ -1,11 +1,6 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"compress/gzip"
|
|
||||||
"encoding/json"
|
|
||||||
"encoding/xml"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"log"
|
"log"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
@ -15,39 +10,6 @@ import (
|
|||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/cpegenerate/dictionary"
|
"github.com/anchore/syft/syft/pkg/cataloger/internal/cpegenerate/dictionary"
|
||||||
)
|
)
|
||||||
|
|
||||||
func generateIndexedDictionaryJSON(rawGzipData io.Reader) ([]byte, error) {
|
|
||||||
gzipReader, err := gzip.NewReader(rawGzipData)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("unable to decompress CPE dictionary: %w", err)
|
|
||||||
}
|
|
||||||
defer gzipReader.Close()
|
|
||||||
|
|
||||||
// Read XML data
|
|
||||||
data, err := io.ReadAll(gzipReader)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("unable to read CPE dictionary: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unmarshal XML
|
|
||||||
var cpeList CpeList
|
|
||||||
if err := xml.Unmarshal(data, &cpeList); err != nil {
|
|
||||||
return nil, fmt.Errorf("unable to unmarshal CPE dictionary XML: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter out data that's not applicable here
|
|
||||||
cpeList = filterCpeList(cpeList)
|
|
||||||
|
|
||||||
// Create indexed dictionary to help with looking up CPEs
|
|
||||||
indexedDictionary := indexCPEList(cpeList)
|
|
||||||
|
|
||||||
// Convert to JSON
|
|
||||||
jsonData, err := json.MarshalIndent(indexedDictionary, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("unable to marshal CPE dictionary to JSON: %w", err)
|
|
||||||
}
|
|
||||||
return jsonData, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// filterCpeList removes CPE items that are not applicable to software packages.
|
// filterCpeList removes CPE items that are not applicable to software packages.
|
||||||
func filterCpeList(cpeList CpeList) CpeList {
|
func filterCpeList(cpeList CpeList) CpeList {
|
||||||
var processedCpeList CpeList
|
var processedCpeList CpeList
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user