Compare commits

..

64 Commits

Author SHA1 Message Date
Christopher Phillips
bfe63bb006
fix: add green fixes before pr fixes
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 01:59:46 -05:00
Christopher Phillips
1a8562554a
fix: update after rebase
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 01:47:49 -05:00
Christopher Phillips
ec978f01c5
fix: use OCI title annotation for virtual path in GGUF layer extraction
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 01:44:47 -05:00
Christopher Phillips
80319572cf
wip: wip
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 01:19:05 -05:00
Christopher Phillips
efcfecb2db
wip: wip no lrg file oci client
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 01:18:59 -05:00
Christopher Phillips
7ed34c81f8
chore: refactor to use gguf-parser-go; 50mb limit
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 01:10:44 -05:00
Christopher Phillips
2976df5b92
chore: schema and test additions
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 01:04:13 -05:00
Christopher Phillips
9609ce2b36
chore: remove test-binary
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 00:46:38 -05:00
Christopher Phillips
56761cee6f
fix: raise model version on package
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 00:44:19 -05:00
Christopher Phillips
b80592f735
chore: pr comments
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 00:32:08 -05:00
Christopher Phillips
cdb41b0c76
chore: ignore local agent files
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 00:12:15 -05:00
Christopher Phillips
b18f7bb7a8
chore: regenerate json schema
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 00:02:44 -05:00
Christopher Phillips
6daea43c32
fix: pr comments
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 00:02:21 -05:00
Christopher Phillips
9b31c0480f
wip: wip
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 00:01:27 -05:00
Christopher Phillips
9a2a45f91d
chore: pr feedback
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 00:01:27 -05:00
Christopher Phillips
38c0e6e899
chore: warn -> debug
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 00:01:26 -05:00
Christopher Phillips
64dc451345
fix: update gguf data to be GGUFFileHeader
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-12 23:59:56 -05:00
Christopher Phillips
c689dcfeef
chore: refactor to use gguf-parser-go; 50mb limit
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-12 23:59:13 -05:00
Christopher Phillips
f664f9eaf2
fix: first pass pr fixes
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-12 23:58:44 -05:00
Christopher Phillips
08c0572fb7
test: fix local flake
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-12 23:58:44 -05:00
Christopher Phillips
b702952c8c
tests: account for epoch in dedupe test
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-12 23:58:44 -05:00
Christopher Phillips
bcd47d109a
chore: schema and test additions
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-12 23:58:39 -05:00
Christopher Phillips
1ad4a2752a
test: migrate gguf tests over
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-12 23:56:52 -05:00
Christopher Phillips
f92b7d2fc9
chore: lint-fix
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-12 23:56:52 -05:00
Christopher Phillips
6ceef5fe4a
feat: migrate gguf parser to separate PR from oci
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-12 23:56:52 -05:00
anchore-actions-token-generator[bot]
2e100f33f3
chore(deps): update tools to latest versions (#4358)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
2025-11-12 13:27:47 -05:00
dependabot[bot]
b444f0c2ed
chore(deps): bump golang.org/x/mod from 0.29.0 to 0.30.0 (#4359)
Bumps [golang.org/x/mod](https://github.com/golang/mod) from 0.29.0 to 0.30.0.
- [Commits](https://github.com/golang/mod/compare/v0.29.0...v0.30.0)

---
updated-dependencies:
- dependency-name: golang.org/x/mod
  dependency-version: 0.30.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-12 13:27:33 -05:00
Adam Chovanec
102d362daf
feat: CPEs format decoder (#4207)
Signed-off-by: Adam Chovanec <git@adamchovanec.cz>
2025-11-12 10:45:09 -05:00
Alex Goodman
66c78d44af
Document additional json schema fields (#4356)
* add documentation to key fields

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* regenerate json schema

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

---------

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-11-10 16:29:06 -05:00
dependabot[bot]
78a4ab8ced
chore(deps): bump github.com/olekukonko/tablewriter from 1.0.9 to 1.1.1 (#4354)
Bumps [github.com/olekukonko/tablewriter](https://github.com/olekukonko/tablewriter) from 1.0.9 to 1.1.1.
- [Commits](https://github.com/olekukonko/tablewriter/compare/v1.0.9...v1.1.1)

---
updated-dependencies:
- dependency-name: github.com/olekukonko/tablewriter
  dependency-version: 1.1.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-10 13:31:15 -05:00
dependabot[bot]
25ca33d20e
chore(deps): bump github.com/jedib0t/go-pretty/v6 from 6.7.0 to 6.7.1 (#4355)
Bumps [github.com/jedib0t/go-pretty/v6](https://github.com/jedib0t/go-pretty) from 6.7.0 to 6.7.1.
- [Release notes](https://github.com/jedib0t/go-pretty/releases)
- [Commits](https://github.com/jedib0t/go-pretty/compare/v6.7.0...v6.7.1)

---
updated-dependencies:
- dependency-name: github.com/jedib0t/go-pretty/v6
  dependency-version: 6.7.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-10 13:30:56 -05:00
anchore-actions-token-generator[bot]
60ca241593
chore(deps): update tools to latest versions (#4347)
* chore: new tool checks
---------
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
2025-11-07 20:56:44 +00:00
dependabot[bot]
0f475c8bcd
chore(deps): bump github.com/opencontainers/selinux (#4349)
Bumps [github.com/opencontainers/selinux](https://github.com/opencontainers/selinux) from 1.11.0 to 1.13.0.
- [Release notes](https://github.com/opencontainers/selinux/releases)
- [Commits](https://github.com/opencontainers/selinux/compare/v1.11.0...v1.13.0)

---
updated-dependencies:
- dependency-name: github.com/opencontainers/selinux
  dependency-version: 1.13.0
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-07 15:21:35 -05:00
Alex Goodman
199394934d
preserve --from order (#4350)
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-11-07 10:17:10 -05:00
dependabot[bot]
8a22d394ed
chore(deps): bump golang.org/x/time from 0.12.0 to 0.14.0 (#4348)
Bumps [golang.org/x/time](https://github.com/golang/time) from 0.12.0 to 0.14.0.
- [Commits](https://github.com/golang/time/compare/v0.12.0...v0.14.0)

---
updated-dependencies:
- dependency-name: golang.org/x/time
  dependency-version: 0.14.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-07 08:48:20 -05:00
Tim Olshansky
bbef262b8f
feat: Add license enrichment from pypi to python packages (#4295)
* feat: Add license enrichment from pypi to python packages
* Implement license caching and improve test coverage
---------
Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>
2025-11-06 16:05:08 -05:00
Tim Olshansky
4e06a7ab32
feat(javascript): Add dependency parsing (#4304)
* feat: Add dependency parsing to javascript package locks

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* Bump schema version

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* Add support for yarn and pnpm, excl. yarn v1

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* Add support for dependencies for v1 yarn lock files

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* Ensure schema is correctly generated

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* Fix tests

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* PR feedback

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

---------

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>
2025-11-06 16:03:43 -05:00
Alex Goodman
e5711e9b42
Update CPE processing to use NVD API (#4332)
* update NVD CPE dictionary processor to use API

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* pass linting with exceptions

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

---------

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-11-06 16:02:26 -05:00
Rez Moss
f69b1db099
feat: detect elixir bin (#4334)
* Elixir detection, fixed #4333
---------
Signed-off-by: Rez Moss <hi@rezmoss.com>
2025-11-06 16:02:02 -05:00
dependabot[bot]
fe1ea443c2
chore(deps): bump github.com/jedib0t/go-pretty/v6 from 6.6.9 to 6.7.0 (#4337)
Bumps [github.com/jedib0t/go-pretty/v6](https://github.com/jedib0t/go-pretty) from 6.6.9 to 6.7.0.
- [Release notes](https://github.com/jedib0t/go-pretty/releases)
- [Commits](https://github.com/jedib0t/go-pretty/compare/v6.6.9...v6.7.0)

---
updated-dependencies:
- dependency-name: github.com/jedib0t/go-pretty/v6
  dependency-version: 6.7.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-06 15:47:49 -05:00
dependabot[bot]
bfcbf266df
chore(deps): bump github.com/containerd/containerd from 1.7.28 to 1.7.29 (#4340)
Bumps [github.com/containerd/containerd](https://github.com/containerd/containerd) from 1.7.28 to 1.7.29.
- [Release notes](https://github.com/containerd/containerd/releases)
- [Changelog](https://github.com/containerd/containerd/blob/main/RELEASES.md)
- [Commits](https://github.com/containerd/containerd/compare/v1.7.28...v1.7.29)

---
updated-dependencies:
- dependency-name: github.com/containerd/containerd
  dependency-version: 1.7.29
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-06 15:46:32 -05:00
Keith Zantow
a400c675fc
feat: license file search (#4327)
Signed-off-by: Keith Zantow <kzantow@gmail.com>
2025-11-03 14:16:05 -05:00
Alex Goodman
7c154e7c37
use official action for token generation (#4331)
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-11-03 13:08:42 -05:00
anchore-actions-token-generator[bot]
4c93394bc2
chore(deps): update anchore dependencies (#4330)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
2025-11-03 12:44:07 -05:00
kdt523
3e4e82f03e
Canonicalize Ghostscript CPE/PURL for ghostscript packages from PE Binaries (#4308)
* binary(pe): canonicalize Ghostscript CPE to artifex:ghostscript and add generic purl for PE (#4275)\n\n- Detect Ghostscript via PE version resources and set purl pkg:generic/ghostscript@<version>\n- Add PE-specific CPE candidates: vendor 'artifex', product 'ghostscript'\n- Add focused unit tests for purl and CPE generation

Signed-off-by: kdt523 <krushna.datir231@vit.edu>

* fix: gofmt formatting for static analysis pass (pe-ghostscript-cpe-purl-4275)

Signed-off-by: kdt523 <krushna.datir231@vit.edu>

---------

Signed-off-by: kdt523 <krushna.datir231@vit.edu>
2025-11-03 14:54:48 +00:00
dependabot[bot]
793b0a346f
chore(deps): bump github/codeql-action from 4.31.1 to 4.31.2 (#4325)
Bumps [github/codeql-action](https://github.com/github/codeql-action) from 4.31.1 to 4.31.2.
- [Release notes](https://github.com/github/codeql-action/releases)
- [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md)
- [Commits](5fe9434cd2...0499de31b9)

---
updated-dependencies:
- dependency-name: github/codeql-action
  dependency-version: 4.31.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-03 09:11:20 -05:00
dependabot[bot]
a0dac519db
chore(deps): bump github.com/hashicorp/go-getter from 1.8.2 to 1.8.3 (#4326)
Bumps [github.com/hashicorp/go-getter](https://github.com/hashicorp/go-getter) from 1.8.2 to 1.8.3.
- [Release notes](https://github.com/hashicorp/go-getter/releases)
- [Changelog](https://github.com/hashicorp/go-getter/blob/main/.goreleaser.yml)
- [Commits](https://github.com/hashicorp/go-getter/compare/v1.8.2...v1.8.3)

---
updated-dependencies:
- dependency-name: github.com/hashicorp/go-getter
  dependency-version: 1.8.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-03 09:11:12 -05:00
dependabot[bot]
34f5e521c1
chore(deps): bump modernc.org/sqlite from 1.39.1 to 1.40.0 (#4329)
Bumps [modernc.org/sqlite](https://gitlab.com/cznic/sqlite) from 1.39.1 to 1.40.0.
- [Commits](https://gitlab.com/cznic/sqlite/compare/v1.39.1...v1.40.0)

---
updated-dependencies:
- dependency-name: modernc.org/sqlite
  dependency-version: 1.40.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-03 09:11:05 -05:00
dependabot[bot]
774b1e97b9
chore(deps): bump github/codeql-action from 4.31.0 to 4.31.1 (#4321)
Bumps [github/codeql-action](https://github.com/github/codeql-action) from 4.31.0 to 4.31.1.
- [Release notes](https://github.com/github/codeql-action/releases)
- [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md)
- [Commits](4e94bd11f7...5fe9434cd2)

---
updated-dependencies:
- dependency-name: github/codeql-action
  dependency-version: 4.31.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-30 13:19:57 -04:00
Alex Goodman
538430d65d
describe cataloger capabilities via test observations (#4318)
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-10-30 13:19:42 -04:00
Alex Goodman
5db3a9bf55
add workflow to create PR for spdx license list updates (#4319)
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-10-30 12:14:13 -04:00
Stepan
efc2f0012c
fix: go binary replace handling in path (#4156)
* Fix issue with relative paths on go binary

Signed-off-by: Stepan <stepworm@yandex.ru>

* Linting

Signed-off-by: Stepan <stepworm@yandex.ru>

---------

Signed-off-by: Stepan <stepworm@yandex.ru>
Co-authored-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-10-29 15:59:47 +00:00
kyounghoonJang
c5c1454848
feat(java): Add support for .far (Feature Archive) files (#4193)
* feat(java): add support for .far archivesEnables the Java cataloger to recognize and catalog dependencies within .far files, which are used in Apache Sling applications.

Signed-off-by: Kyounghoon Jang <matkimchi_@naver.com>

* feat(java): Add tests for .far (Feature Archive) file support

Signed-off-by: Kyounghoon Jang <matkimchi_@naver.com>

---------

Signed-off-by: Kyounghoon Jang <matkimchi_@naver.com>
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
Co-authored-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-10-29 15:41:27 +00:00
Kudryavcev Nikolay
f5c765192c
Refactor fileresolver to not require base path (#4298)
* ref: close source in test and examples

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>

* ref: pretty file/directory source resolver (make them more similar)

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>

* ref: move absoluteSymlinkFreePathToParent to file resolver

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>

* revert breaking change

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>

---------

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>
2025-10-29 10:41:18 -04:00
Will Murphy
728feea620
ci: use apple creds before pushing tags (#4313)
We have had a few releases fail because the Apple credentials needed
some sort of fix. These release were operationally more interesting
because they failed after pushing a git tag (which effectively releases
the golagn package). Therefore, try to use these creds early, before
there's a tag pushed.

Signed-off-by: Will Murphy <willmurphyscode@users.noreply.github.com>
2025-10-29 10:07:47 -04:00
dependabot[bot]
45fb52dca1
chore(deps): bump github.com/jedib0t/go-pretty/v6 from 6.6.8 to 6.6.9 (#4315)
Bumps [github.com/jedib0t/go-pretty/v6](https://github.com/jedib0t/go-pretty) from 6.6.8 to 6.6.9.
- [Release notes](https://github.com/jedib0t/go-pretty/releases)
- [Commits](https://github.com/jedib0t/go-pretty/compare/v6.6.8...v6.6.9)

---
updated-dependencies:
- dependency-name: github.com/jedib0t/go-pretty/v6
  dependency-version: 6.6.9
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-29 10:06:37 -04:00
Rez Moss
45bf8b14ab
fix: omit records with empty PURL in GitHub format (#4312)
Signed-off-by: Rez Moss <hi@rezmoss.com>
2025-10-28 18:34:10 -04:00
Brian Muenzenmeyer
9478cd974b
docs: update template link in README.md (#4306)
Signed-off-by: Brian Muenzenmeyer <brian.muenzenmeyer@gmail.com>
2025-10-28 11:29:07 -04:00
Will Murphy
0d9ea69a66
Respect "rpmmod" PURL qualifier (#4314)
Red Hat purls the RPM modularity info in a query param in the PURLs in
their vulnerability data. It would be nice if Syft respected this
qualifier so that Grype can use it when a Red Hat purl is passed.

Signed-off-by: Will Murphy <willmurphyscode@users.noreply.github.com>
2025-10-28 09:35:11 -04:00
dependabot[bot]
bee78c0b16
chore(deps): bump github/codeql-action from 4.30.9 to 4.31.0 (#4310)
Bumps [github/codeql-action](https://github.com/github/codeql-action) from 4.30.9 to 4.31.0.
- [Release notes](https://github.com/github/codeql-action/releases)
- [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md)
- [Commits](16140ae1a1...4e94bd11f7)

---
updated-dependencies:
- dependency-name: github/codeql-action
  dependency-version: 4.31.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-27 10:43:04 -04:00
dependabot[bot]
88bbcbe9c6
chore(deps): bump anchore/sbom-action from 0.20.8 to 0.20.9 (#4305) 2025-10-27 02:03:09 -04:00
anchore-actions-token-generator[bot]
e0680eb704
chore(deps): update tools to latest versions (#4307) 2025-10-27 02:02:47 -04:00
Marc
16f851c5d9
feat: include .rar files as Java archives for Java resource adapters (#4137)
Signed-off-by: Marc Thomas <marc.thomas@t-systems.com>
2025-10-24 11:55:02 -04:00
Ross Kirk
d5ca1ad543
fix: ignore dpkg entries with "deinstall" status (#4231)
Signed-off-by: Ross Kirk <ross.kirk@upwind.io>
2025-10-23 16:23:58 -04:00
188 changed files with 11943 additions and 1692 deletions

View File

@ -26,7 +26,7 @@ tools:
# used for linting # used for linting
- name: golangci-lint - name: golangci-lint
version: version:
want: v2.5.0 want: v2.6.1
method: github-release method: github-release
with: with:
repo: golangci/golangci-lint repo: golangci/golangci-lint
@ -58,7 +58,7 @@ tools:
# used to release all artifacts # used to release all artifacts
- name: goreleaser - name: goreleaser
version: version:
want: v2.12.6 want: v2.12.7
method: github-release method: github-release
with: with:
repo: goreleaser/goreleaser repo: goreleaser/goreleaser
@ -90,7 +90,7 @@ tools:
# used for running all local and CI tasks # used for running all local and CI tasks
- name: task - name: task
version: version:
want: v3.45.4 want: v3.45.5
method: github-release method: github-release
with: with:
repo: go-task/task repo: go-task/task
@ -98,7 +98,7 @@ tools:
# used for triggering a release # used for triggering a release
- name: gh - name: gh
version: version:
want: v2.82.1 want: v2.83.0
method: github-release method: github-release
with: with:
repo: cli/cli repo: cli/cli

View File

@ -47,7 +47,7 @@ jobs:
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 #v3.29.5 uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee #v3.29.5
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file. # If you wish to specify custom queries, you can do so here or in a config file.
@ -58,7 +58,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below) # If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild - name: Autobuild
uses: github/codeql-action/autobuild@16140ae1a102900babc80a33c44059580f687047 #v3.29.5 uses: github/codeql-action/autobuild@0499de31b99561a6d14a36a5f662c2a54f91beee #v3.29.5
# Command-line programs to run using the OS shell. # Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl # 📚 https://git.io/JvXDl
@ -72,4 +72,4 @@ jobs:
# make release # make release
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 #v3.29.5 uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee #v3.29.5

View File

@ -19,6 +19,16 @@ jobs:
with: with:
persist-credentials: false persist-credentials: false
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
- name: Validate Apple notarization credentials
run: .tool/quill submission list
env:
QUILL_NOTARY_ISSUER: ${{ secrets.APPLE_NOTARY_ISSUER }}
QUILL_NOTARY_KEY_ID: ${{ secrets.APPLE_NOTARY_KEY_ID }}
QUILL_NOTARY_KEY: ${{ secrets.APPLE_NOTARY_KEY }}
- name: Check if running on main - name: Check if running on main
if: github.ref != 'refs/heads/main' if: github.ref != 'refs/heads/main'
# we are using the following flag when running `cosign blob-verify` for checksum signature verification: # we are using the following flag when running `cosign blob-verify` for checksum signature verification:
@ -161,7 +171,7 @@ jobs:
# for updating brew formula in anchore/homebrew-syft # for updating brew formula in anchore/homebrew-syft
GITHUB_BREW_TOKEN: ${{ secrets.ANCHOREOPS_GITHUB_OSS_WRITE_TOKEN }} GITHUB_BREW_TOKEN: ${{ secrets.ANCHOREOPS_GITHUB_OSS_WRITE_TOKEN }}
- uses: anchore/sbom-action@aa0e114b2e19480f157109b9922bda359bd98b90 #v0.20.8 - uses: anchore/sbom-action@8e94d75ddd33f69f691467e42275782e4bfefe84 #v0.20.9
continue-on-error: true continue-on-error: true
with: with:
file: go.mod file: go.mod

View File

@ -31,11 +31,11 @@ jobs:
with: with:
repos: ${{ github.event.inputs.repos }} repos: ${{ github.event.inputs.repos }}
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0 - uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 #v2.1.4
id: generate-token id: generate-token
with: with:
app_id: ${{ secrets.TOKEN_APP_ID }} app-id: ${{ secrets.TOKEN_APP_ID }}
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }} private-key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8 - uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
with: with:

View File

@ -45,11 +45,11 @@ jobs:
echo "\`\`\`" echo "\`\`\`"
} >> $GITHUB_STEP_SUMMARY } >> $GITHUB_STEP_SUMMARY
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0 - uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 #v2.1.4
id: generate-token id: generate-token
with: with:
app_id: ${{ secrets.TOKEN_APP_ID }} app-id: ${{ secrets.TOKEN_APP_ID }}
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }} private-key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8 - uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
with: with:

View File

@ -14,6 +14,9 @@ env:
jobs: jobs:
upgrade-cpe-dictionary-index: upgrade-cpe-dictionary-index:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
packages: write
if: github.repository == 'anchore/syft' # only run for main repo if: github.repository == 'anchore/syft' # only run for main repo
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
@ -22,18 +25,31 @@ jobs:
- name: Bootstrap environment - name: Bootstrap environment
uses: ./.github/actions/bootstrap uses: ./.github/actions/bootstrap
id: bootstrap
- name: Bootstrap environment - name: Login to GitHub Container Registry
uses: ./.github/actions/bootstrap run: |
echo "${{ secrets.GITHUB_TOKEN }}" | ${{ steps.bootstrap.outputs.oras }} login ghcr.io -u ${{ github.actor }} --password-stdin
- run: | - name: Pull CPE cache from registry
make generate-cpe-dictionary-index run: make generate:cpe-index:cache:pull
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0 - name: Update CPE cache from NVD API
run: make generate:cpe-index:cache:update
env:
NVD_API_KEY: ${{ secrets.NVD_API_KEY }}
- name: Generate CPE dictionary index
run: make generate:cpe-index:build
- name: Push updated CPE cache to registry
run: make generate:cpe-index:cache:push
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 #v2.1.4
id: generate-token id: generate-token
with: with:
app_id: ${{ secrets.TOKEN_APP_ID }} app-id: ${{ secrets.TOKEN_APP_ID }}
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }} private-key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8 - uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
with: with:

View File

@ -0,0 +1,54 @@
name: PR to update SPDX license list
on:
schedule:
- cron: "0 6 * * 1" # every monday at 6 AM UTC
workflow_dispatch:
permissions:
contents: read
env:
SLACK_NOTIFICATIONS: true
jobs:
upgrade-spdx-license-list:
runs-on: ubuntu-latest
if: github.repository == 'anchore/syft' # only run for main repo
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
with:
persist-credentials: false
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
- run: |
make generate-license-list
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0
id: generate-token
with:
app_id: ${{ secrets.TOKEN_APP_ID }}
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
with:
signoff: true
delete-branch: true
branch: auto/latest-spdx-license-list
labels: dependencies
commit-message: "chore(deps): update SPDX license list"
title: "chore(deps): update SPDX license list"
body: |
Update SPDX license list based on the latest available list from spdx.org
token: ${{ steps.generate-token.outputs.token }}
- uses: 8398a7/action-slack@77eaa4f1c608a7d68b38af4e3f739dcd8cba273e #v3.19.0
with:
status: ${{ job.status }}
fields: workflow,eventName,job
text: Syft SPDX license list update failed
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TOOLBOX_WEBHOOK_URL }}
if: ${{ failure() && env.SLACK_NOTIFICATIONS == 'true' }}

View File

@ -116,7 +116,7 @@ Where the `formats` available are:
- `spdx-json@2.2`: A JSON report conforming to the [SPDX 2.2 JSON Schema](https://github.com/spdx/spdx-spec/blob/v2.2/schemas/spdx-schema.json). - `spdx-json@2.2`: A JSON report conforming to the [SPDX 2.2 JSON Schema](https://github.com/spdx/spdx-spec/blob/v2.2/schemas/spdx-schema.json).
- `github-json`: A JSON report conforming to GitHub's dependency snapshot format. - `github-json`: A JSON report conforming to GitHub's dependency snapshot format.
- `syft-table`: A columnar summary (default). - `syft-table`: A columnar summary (default).
- `template`: Lets the user specify the output format. See ["Using templates"](#using-templates) below. - `template`: Lets the user specify the output format. See ["Using templates"](https://github.com/anchore/syft/wiki/using-templates) below.
Note that flags using the @<version> can be used for earlier versions of each specification as well. Note that flags using the @<version> can be used for earlier versions of each specification as well.
@ -135,7 +135,7 @@ Note that flags using the @<version> can be used for earlier versions of each sp
- Go (go.mod, Go binaries) - Go (go.mod, Go binaries)
- GitHub (workflows, actions) - GitHub (workflows, actions)
- Haskell (cabal, stack) - Haskell (cabal, stack)
- Java (jar, ear, war, par, sar, nar, native-image) - Java (jar, ear, war, par, sar, nar, rar, native-image)
- JavaScript (npm, yarn) - JavaScript (npm, yarn)
- Jenkins Plugins (jpi, hpi) - Jenkins Plugins (jpi, hpi)
- Linux kernel archives (vmlinz) - Linux kernel archives (vmlinz)

View File

@ -1,5 +1,9 @@
version: "3" version: "3"
includes:
generate:cpe-index: ./task.d/generate/cpe-index.yaml
vars: vars:
OWNER: anchore OWNER: anchore
PROJECT: syft PROJECT: syft
@ -511,10 +515,11 @@ tasks:
- "gofmt -s -w ./internal/spdxlicense" - "gofmt -s -w ./internal/spdxlicense"
generate-cpe-dictionary-index: generate-cpe-dictionary-index:
desc: Generate the CPE index based off of the latest available CPE dictionary desc: Generate the CPE index from local cache
dir: "syft/pkg/cataloger/internal/cpegenerate/dictionary"
cmds: cmds:
- "go generate" - task: generate:cpe-index:cache:pull
- task: generate:cpe-index:cache:update
- task: generate:cpe-index:build
## Build-related targets ################################# ## Build-related targets #################################

View File

@ -253,7 +253,6 @@ func generateSBOMForAttestation(ctx context.Context, id clio.Identification, opt
} }
src, err := getSource(ctx, opts, userInput, stereoscope.RegistryTag) src, err := getSource(ctx, opts, userInput, stereoscope.RegistryTag)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -87,8 +87,8 @@ func runCatalogerList(opts *catalogerListOptions) error {
} }
func catalogerListReport(opts *catalogerListOptions, allTaskGroups [][]task.Task) (string, error) { func catalogerListReport(opts *catalogerListOptions, allTaskGroups [][]task.Task) (string, error) {
defaultCatalogers := options.Flatten(opts.DefaultCatalogers) defaultCatalogers := options.FlattenAndSort(opts.DefaultCatalogers)
selectCatalogers := options.Flatten(opts.SelectCatalogers) selectCatalogers := options.FlattenAndSort(opts.SelectCatalogers)
selectedTaskGroups, selectionEvidence, err := task.SelectInGroups( selectedTaskGroups, selectionEvidence, err := task.SelectInGroups(
allTaskGroups, allTaskGroups,
cataloging.NewSelectionRequest(). cataloging.NewSelectionRequest().

View File

@ -185,7 +185,6 @@ func runScan(ctx context.Context, id clio.Identification, opts *scanOptions, use
} }
src, err := getSource(ctx, &opts.Catalog, userInput, sources...) src, err := getSource(ctx, &opts.Catalog, userInput, sources...)
if err != nil { if err != nil {
return err return err
} }

View File

@ -198,9 +198,10 @@ func (cfg Catalog) ToPackagesConfig() pkgcataloging.Config {
}, },
Nix: nix.DefaultConfig(). Nix: nix.DefaultConfig().
WithCaptureOwnedFiles(cfg.Nix.CaptureOwnedFiles), WithCaptureOwnedFiles(cfg.Nix.CaptureOwnedFiles),
Python: python.CatalogerConfig{ Python: python.DefaultCatalogerConfig().
GuessUnpinnedRequirements: cfg.Python.GuessUnpinnedRequirements, WithSearchRemoteLicenses(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Python), cfg.Python.SearchRemoteLicenses)).
}, WithPypiBaseURL(cfg.Python.PypiBaseURL).
WithGuessUnpinnedRequirements(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Python), cfg.Python.GuessUnpinnedRequirements)),
JavaArchive: java.DefaultArchiveCatalogerConfig(). JavaArchive: java.DefaultArchiveCatalogerConfig().
WithUseMavenLocalRepository(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Java, task.Maven), cfg.Java.UseMavenLocalRepository)). WithUseMavenLocalRepository(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Java, task.Maven), cfg.Java.UseMavenLocalRepository)).
WithMavenLocalRepositoryDir(cfg.Java.MavenLocalRepositoryDir). WithMavenLocalRepositoryDir(cfg.Java.MavenLocalRepositoryDir).
@ -283,10 +284,10 @@ func (cfg *Catalog) PostLoad() error {
cfg.From = Flatten(cfg.From) cfg.From = Flatten(cfg.From)
cfg.Catalogers = Flatten(cfg.Catalogers) cfg.Catalogers = FlattenAndSort(cfg.Catalogers)
cfg.DefaultCatalogers = Flatten(cfg.DefaultCatalogers) cfg.DefaultCatalogers = FlattenAndSort(cfg.DefaultCatalogers)
cfg.SelectCatalogers = Flatten(cfg.SelectCatalogers) cfg.SelectCatalogers = FlattenAndSort(cfg.SelectCatalogers)
cfg.Enrich = Flatten(cfg.Enrich) cfg.Enrich = FlattenAndSort(cfg.Enrich)
// for backwards compatibility // for backwards compatibility
cfg.DefaultCatalogers = append(cfg.DefaultCatalogers, cfg.Catalogers...) cfg.DefaultCatalogers = append(cfg.DefaultCatalogers, cfg.Catalogers...)
@ -311,6 +312,11 @@ func Flatten(commaSeparatedEntries []string) []string {
out = append(out, strings.TrimSpace(s)) out = append(out, strings.TrimSpace(s))
} }
} }
return out
}
func FlattenAndSort(commaSeparatedEntries []string) []string {
out := Flatten(commaSeparatedEntries)
sort.Strings(out) sort.Strings(out)
return out return out
} }
@ -320,6 +326,7 @@ var publicisedEnrichmentOptions = []string{
task.Golang, task.Golang,
task.Java, task.Java,
task.JavaScript, task.JavaScript,
task.Python,
} }
func enrichmentEnabled(enrichDirectives []string, features ...string) *bool { func enrichmentEnabled(enrichDirectives []string, features ...string) *bool {

View File

@ -79,6 +79,98 @@ func TestCatalog_PostLoad(t *testing.T) {
} }
} }
func TestFlatten(t *testing.T) {
tests := []struct {
name string
input []string
expected []string
}{
{
name: "preserves order of comma-separated values",
input: []string{"registry,docker,oci-dir"},
expected: []string{"registry", "docker", "oci-dir"},
},
{
name: "preserves order across multiple entries",
input: []string{"registry,docker", "oci-dir"},
expected: []string{"registry", "docker", "oci-dir"},
},
{
name: "trims whitespace",
input: []string{" registry , docker ", " oci-dir "},
expected: []string{"registry", "docker", "oci-dir"},
},
{
name: "handles single value",
input: []string{"registry"},
expected: []string{"registry"},
},
{
name: "handles empty input",
input: []string{},
expected: nil,
},
{
name: "preserves reverse alphabetical order",
input: []string{"zebra,yankee,xray"},
expected: []string{"zebra", "yankee", "xray"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := Flatten(tt.input)
assert.Equal(t, tt.expected, got)
})
}
}
func TestFlattenAndSort(t *testing.T) {
tests := []struct {
name string
input []string
expected []string
}{
{
name: "sorts comma-separated values",
input: []string{"registry,docker,oci-dir"},
expected: []string{"docker", "oci-dir", "registry"},
},
{
name: "sorts across multiple entries",
input: []string{"registry,docker", "oci-dir"},
expected: []string{"docker", "oci-dir", "registry"},
},
{
name: "trims whitespace and sorts",
input: []string{" registry , docker ", " oci-dir "},
expected: []string{"docker", "oci-dir", "registry"},
},
{
name: "handles single value",
input: []string{"registry"},
expected: []string{"registry"},
},
{
name: "handles empty input",
input: []string{},
expected: nil,
},
{
name: "sorts reverse alphabetical order",
input: []string{"zebra,yankee,xray"},
expected: []string{"xray", "yankee", "zebra"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := FlattenAndSort(tt.input)
assert.Equal(t, tt.expected, got)
})
}
}
func Test_enrichmentEnabled(t *testing.T) { func Test_enrichmentEnabled(t *testing.T) {
tests := []struct { tests := []struct {
directives string directives string
@ -139,7 +231,7 @@ func Test_enrichmentEnabled(t *testing.T) {
for _, test := range tests { for _, test := range tests {
t.Run(test.directives, func(t *testing.T) { t.Run(test.directives, func(t *testing.T) {
got := enrichmentEnabled(Flatten([]string{test.directives}), test.test) got := enrichmentEnabled(FlattenAndSort([]string{test.directives}), test.test)
assert.Equal(t, test.expected, got) assert.Equal(t, test.expected, got)
}) })
} }

View File

@ -3,7 +3,9 @@ package options
import "github.com/anchore/clio" import "github.com/anchore/clio"
type pythonConfig struct { type pythonConfig struct {
GuessUnpinnedRequirements bool `json:"guess-unpinned-requirements" yaml:"guess-unpinned-requirements" mapstructure:"guess-unpinned-requirements"` SearchRemoteLicenses *bool `json:"search-remote-licenses" yaml:"search-remote-licenses" mapstructure:"search-remote-licenses"`
PypiBaseURL string `json:"pypi-base-url" yaml:"pypi-base-url" mapstructure:"pypi-base-url"`
GuessUnpinnedRequirements *bool `json:"guess-unpinned-requirements" yaml:"guess-unpinned-requirements" mapstructure:"guess-unpinned-requirements"`
} }
var _ interface { var _ interface {
@ -11,6 +13,8 @@ var _ interface {
} = (*pythonConfig)(nil) } = (*pythonConfig)(nil)
func (o *pythonConfig) DescribeFields(descriptions clio.FieldDescriptionSet) { func (o *pythonConfig) DescribeFields(descriptions clio.FieldDescriptionSet) {
descriptions.Add(&o.SearchRemoteLicenses, `enables Syft to use the network to fill in more detailed license information`)
descriptions.Add(&o.PypiBaseURL, `base Pypi url to use`)
descriptions.Add(&o.GuessUnpinnedRequirements, `when running across entries in requirements.txt that do not specify a specific version descriptions.Add(&o.GuessUnpinnedRequirements, `when running across entries in requirements.txt that do not specify a specific version
(e.g. "sqlalchemy >= 1.0.0, <= 2.0.0, != 3.0.0, <= 3.0.0"), attempt to guess what the version could (e.g. "sqlalchemy >= 1.0.0, <= 2.0.0, != 3.0.0, <= 3.0.0"), attempt to guess what the version could
be based on the version requirements specified (e.g. "1.0.0"). When enabled the lowest expressible version be based on the version requirements specified (e.g. "1.0.0"). When enabled the lowest expressible version

View File

@ -25,7 +25,6 @@ func BenchmarkImagePackageCatalogers(b *testing.B) {
// get the source object for the image // get the source object for the image
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive")) theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
require.NoError(b, err) require.NoError(b, err)
b.Cleanup(func() { b.Cleanup(func() {
require.NoError(b, theSource.Close()) require.NoError(b, theSource.Close())
}) })

View File

@ -38,11 +38,11 @@ func catalogFixtureImageWithConfig(t *testing.T, fixtureImageName string, cfg *s
// get the source to build an SBOM against // get the source to build an SBOM against
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive")) theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
require.NoError(t, err) require.NoError(t, err)
t.Cleanup(func() { t.Cleanup(func() {
require.NoError(t, theSource.Close()) require.NoError(t, theSource.Close())
}) })
// build the SBOM
s, err := syft.CreateSBOM(context.Background(), theSource, cfg) s, err := syft.CreateSBOM(context.Background(), theSource, cfg)
require.NoError(t, err) require.NoError(t, err)
@ -66,7 +66,7 @@ func catalogDirectory(t *testing.T, dir string, catalogerSelection ...string) (s
func catalogDirectoryWithConfig(t *testing.T, dir string, cfg *syft.CreateSBOMConfig) (sbom.SBOM, source.Source) { func catalogDirectoryWithConfig(t *testing.T, dir string, cfg *syft.CreateSBOMConfig) (sbom.SBOM, source.Source) {
cfg.CatalogerSelection = cfg.CatalogerSelection.WithDefaults(pkgcataloging.DirectoryTag) cfg.CatalogerSelection = cfg.CatalogerSelection.WithDefaults(pkgcataloging.DirectoryTag)
// get the source to build an sbom against // get the source to build an SBOM against
theSource, err := syft.GetSource(context.Background(), dir, syft.DefaultGetSourceConfig().WithSources("dir")) theSource, err := syft.GetSource(context.Background(), dir, syft.DefaultGetSourceConfig().WithSources("dir"))
require.NoError(t, err) require.NoError(t, err)
t.Cleanup(func() { t.Cleanup(func() {

View File

@ -23,6 +23,7 @@ const defaultImage = "alpine:3.19"
func main() { func main() {
// automagically get a source.Source for arbitrary string input // automagically get a source.Source for arbitrary string input
src := getSource(imageReference()) src := getSource(imageReference())
defer src.Close()
// will catalog the given source and return a SBOM keeping in mind several configurable options // will catalog the given source and return a SBOM keeping in mind several configurable options
sbom := getSBOM(src) sbom := getSBOM(src)
@ -46,7 +47,6 @@ func getSource(input string) source.Source {
fmt.Println("detecting source type for input:", input, "...") fmt.Println("detecting source type for input:", input, "...")
src, err := syft.GetSource(context.Background(), input, nil) src, err := syft.GetSource(context.Background(), input, nil)
if err != nil { if err != nil {
panic(err) panic(err)
} }

View File

@ -19,6 +19,7 @@ const defaultImage = "alpine:3.19"
func main() { func main() {
// automagically get a source.Source for arbitrary string input // automagically get a source.Source for arbitrary string input
src := getSource(imageReference()) src := getSource(imageReference())
defer src.Close()
// catalog the given source and return a SBOM // catalog the given source and return a SBOM
sbom := getSBOM(src) sbom := getSBOM(src)
@ -40,7 +41,6 @@ func imageReference() string {
func getSource(input string) source.Source { func getSource(input string) source.Source {
src, err := syft.GetSource(context.Background(), input, nil) src, err := syft.GetSource(context.Background(), input, nil)
if err != nil { if err != nil {
panic(err) panic(err)
} }

View File

@ -19,6 +19,7 @@ const defaultImage = "alpine:3.19"
func main() { func main() {
// automagically get a source.Source for arbitrary string input // automagically get a source.Source for arbitrary string input
src := getSource(imageReference()) src := getSource(imageReference())
defer src.Close()
// catalog the given source and return a SBOM // catalog the given source and return a SBOM
// let's explicitly use catalogers that are: // let's explicitly use catalogers that are:
@ -44,7 +45,6 @@ func imageReference() string {
func getSource(input string) source.Source { func getSource(input string) source.Source {
src, err := syft.GetSource(context.Background(), input, nil) src, err := syft.GetSource(context.Background(), input, nil)
if err != nil { if err != nil {
panic(err) panic(err)
} }

View File

@ -15,6 +15,7 @@ func main() {
image := "alpine:3.19" image := "alpine:3.19"
src, _ := syft.GetSource(context.Background(), image, syft.DefaultGetSourceConfig().WithSources("registry")) src, _ := syft.GetSource(context.Background(), image, syft.DefaultGetSourceConfig().WithSources("registry"))
defer src.Close()
sbom, _ := syft.CreateSBOM(context.Background(), src, syft.DefaultCreateSBOMConfig()) sbom, _ := syft.CreateSBOM(context.Background(), src, syft.DefaultCreateSBOMConfig())

34
go.mod
View File

@ -24,7 +24,7 @@ require (
github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04 github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115 github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115
github.com/anchore/stereoscope v0.1.11 github.com/anchore/stereoscope v0.1.12
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be
github.com/aquasecurity/go-pep440-version v0.0.1 github.com/aquasecurity/go-pep440-version v0.0.1
github.com/bitnami/go-version v0.0.0-20250131085805-b1f57a8634ef github.com/bitnami/go-version v0.0.0-20250131085805-b1f57a8634ef
@ -53,19 +53,19 @@ require (
github.com/google/uuid v1.6.0 github.com/google/uuid v1.6.0
github.com/gookit/color v1.6.0 github.com/gookit/color v1.6.0
github.com/hashicorp/go-cleanhttp v0.5.2 github.com/hashicorp/go-cleanhttp v0.5.2
github.com/hashicorp/go-getter v1.8.2 github.com/hashicorp/go-getter v1.8.3
github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-multierror v1.1.1
github.com/hashicorp/hcl/v2 v2.24.0 github.com/hashicorp/hcl/v2 v2.24.0
github.com/iancoleman/strcase v0.3.0 github.com/iancoleman/strcase v0.3.0
github.com/invopop/jsonschema v0.7.0 github.com/invopop/jsonschema v0.7.0
github.com/jedib0t/go-pretty/v6 v6.6.8 github.com/jedib0t/go-pretty/v6 v6.7.1
github.com/jinzhu/copier v0.4.0 github.com/jinzhu/copier v0.4.0
github.com/kastenhq/goversion v0.0.0-20230811215019-93b2f8823953 github.com/kastenhq/goversion v0.0.0-20230811215019-93b2f8823953
github.com/magiconair/properties v1.8.10 github.com/magiconair/properties v1.8.10
github.com/mholt/archives v0.1.5 github.com/mholt/archives v0.1.5
github.com/moby/sys/mountinfo v0.7.2 github.com/moby/sys/mountinfo v0.7.2
github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1 github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1
github.com/olekukonko/tablewriter v1.0.9 github.com/olekukonko/tablewriter v1.1.1
github.com/opencontainers/go-digest v1.0.0 github.com/opencontainers/go-digest v1.0.0
github.com/pelletier/go-toml v1.9.5 github.com/pelletier/go-toml v1.9.5
github.com/quasilyte/go-ruleguard/dsl v0.3.23 github.com/quasilyte/go-ruleguard/dsl v0.3.23
@ -90,9 +90,9 @@ require (
go.uber.org/goleak v1.3.0 go.uber.org/goleak v1.3.0
go.yaml.in/yaml/v3 v3.0.4 go.yaml.in/yaml/v3 v3.0.4
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
golang.org/x/mod v0.29.0 golang.org/x/mod v0.30.0
golang.org/x/net v0.46.0 golang.org/x/net v0.46.0
modernc.org/sqlite v1.39.1 modernc.org/sqlite v1.40.0
) )
require ( require (
@ -131,7 +131,7 @@ require (
github.com/charmbracelet/x/term v0.2.1 // indirect github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/cloudflare/circl v1.6.1 // indirect github.com/cloudflare/circl v1.6.1 // indirect
github.com/containerd/cgroups v1.1.0 // indirect github.com/containerd/cgroups v1.1.0 // indirect
github.com/containerd/containerd v1.7.28 // indirect github.com/containerd/containerd v1.7.29 // indirect
github.com/containerd/containerd/api v1.8.0 // indirect github.com/containerd/containerd/api v1.8.0 // indirect
github.com/containerd/continuity v0.4.4 // indirect github.com/containerd/continuity v0.4.4 // indirect
github.com/containerd/errdefs v1.0.0 // indirect github.com/containerd/errdefs v1.0.0 // indirect
@ -142,7 +142,7 @@ require (
github.com/containerd/stargz-snapshotter/estargz v0.16.3 // indirect github.com/containerd/stargz-snapshotter/estargz v0.16.3 // indirect
github.com/containerd/ttrpc v1.2.7 // indirect github.com/containerd/ttrpc v1.2.7 // indirect
github.com/containerd/typeurl/v2 v2.2.0 // indirect github.com/containerd/typeurl/v2 v2.2.0 // indirect
github.com/cyphar/filepath-securejoin v0.4.1 // indirect github.com/cyphar/filepath-securejoin v0.6.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/docker/cli v28.5.1+incompatible // indirect github.com/docker/cli v28.5.1+incompatible // indirect
github.com/docker/distribution v2.8.3+incompatible // indirect github.com/docker/distribution v2.8.3+incompatible // indirect
@ -191,7 +191,7 @@ require (
github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 // indirect github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect github.com/mattn/go-runewidth v0.0.19 // indirect
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/mikelolasagasti/xz v1.0.1 // indirect github.com/mikelolasagasti/xz v1.0.1 // indirect
github.com/minio/minlz v1.0.1 // indirect github.com/minio/minlz v1.0.1 // indirect
@ -212,10 +212,10 @@ require (
github.com/nwaples/rardecode v1.1.3 // indirect github.com/nwaples/rardecode v1.1.3 // indirect
github.com/nwaples/rardecode/v2 v2.2.0 // indirect github.com/nwaples/rardecode/v2 v2.2.0 // indirect
github.com/olekukonko/errors v1.1.0 // indirect github.com/olekukonko/errors v1.1.0 // indirect
github.com/olekukonko/ll v0.0.9 // indirect github.com/olekukonko/ll v0.1.2 // indirect
github.com/opencontainers/image-spec v1.1.1 // indirect github.com/opencontainers/image-spec v1.1.1 // indirect
github.com/opencontainers/runtime-spec v1.1.0 // indirect github.com/opencontainers/runtime-spec v1.1.0 // indirect
github.com/opencontainers/selinux v1.11.0 // indirect github.com/opencontainers/selinux v1.13.0 // indirect
github.com/pborman/indent v1.2.1 // indirect github.com/pborman/indent v1.2.1 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect github.com/pierrec/lz4/v4 v4.1.22 // indirect
@ -270,7 +270,7 @@ require (
golang.org/x/sys v0.37.0 // indirect golang.org/x/sys v0.37.0 // indirect
golang.org/x/term v0.36.0 // indirect golang.org/x/term v0.36.0 // indirect
golang.org/x/text v0.30.0 // indirect golang.org/x/text v0.30.0 // indirect
golang.org/x/time v0.12.0 // indirect golang.org/x/time v0.14.0
golang.org/x/tools v0.38.0 golang.org/x/tools v0.38.0
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
google.golang.org/api v0.203.0 // indirect google.golang.org/api v0.203.0 // indirect
@ -286,9 +286,13 @@ require (
modernc.org/memory v1.11.0 // indirect modernc.org/memory v1.11.0 // indirect
) )
require github.com/gpustack/gguf-parser-go v0.22.1 require (
github.com/cespare/xxhash/v2 v2.3.0
github.com/gpustack/gguf-parser-go v0.22.1
)
require ( require (
cyphar.com/go-pathrs v0.2.1 // indirect
github.com/aws/aws-sdk-go-v2 v1.36.5 // indirect github.com/aws/aws-sdk-go-v2 v1.36.5 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 // indirect
github.com/aws/aws-sdk-go-v2/config v1.29.17 // indirect github.com/aws/aws-sdk-go-v2/config v1.29.17 // indirect
@ -307,11 +311,15 @@ require (
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.3 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.3 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.34.0 // indirect github.com/aws/aws-sdk-go-v2/service/sts v1.34.0 // indirect
github.com/aws/smithy-go v1.22.4 // indirect github.com/aws/smithy-go v1.22.4 // indirect
github.com/clipperhouse/displaywidth v0.3.1 // indirect
github.com/clipperhouse/stringish v0.1.1 // indirect
github.com/clipperhouse/uax29/v2 v2.2.0 // indirect
github.com/hashicorp/aws-sdk-go-base/v2 v2.0.0-beta.65 // indirect github.com/hashicorp/aws-sdk-go-base/v2 v2.0.0-beta.65 // indirect
github.com/henvic/httpretty v0.1.4 // indirect github.com/henvic/httpretty v0.1.4 // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 // indirect
github.com/smallnest/ringbuffer v0.0.0-20241116012123-461381446e3d // indirect github.com/smallnest/ringbuffer v0.0.0-20241116012123-461381446e3d // indirect
gonum.org/v1/gonum v0.15.1 // indirect gonum.org/v1/gonum v0.15.1 // indirect
) )

60
go.sum
View File

@ -59,6 +59,8 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
cloud.google.com/go/storage v1.43.0 h1:CcxnSohZwizt4LCzQHWvBf1/kvtHUn7gk9QERXPyXFs= cloud.google.com/go/storage v1.43.0 h1:CcxnSohZwizt4LCzQHWvBf1/kvtHUn7gk9QERXPyXFs=
cloud.google.com/go/storage v1.43.0/go.mod h1:ajvxEa7WmZS1PxvKRq4bq0tFT3vMd502JwstCcYv0Q0= cloud.google.com/go/storage v1.43.0/go.mod h1:ajvxEa7WmZS1PxvKRq4bq0tFT3vMd502JwstCcYv0Q0=
cyphar.com/go-pathrs v0.2.1 h1:9nx1vOgwVvX1mNBWDu93+vaceedpbsDqo+XuBGL40b8=
cyphar.com/go-pathrs v0.2.1/go.mod h1:y8f1EMG7r+hCuFf/rXsKqMJrJAUoADZGNh5/vZPKcGc=
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
@ -138,8 +140,8 @@ github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b h1:e1bmaoJfZV
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E= github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E=
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115 h1:ZyRCmiEjnoGJZ1+Ah0ZZ/mKKqNhGcUZBl0s7PTTDzvY= github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115 h1:ZyRCmiEjnoGJZ1+Ah0ZZ/mKKqNhGcUZBl0s7PTTDzvY=
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115/go.mod h1:KoYIv7tdP5+CC9VGkeZV4/vGCKsY55VvoG+5dadg4YI= github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115/go.mod h1:KoYIv7tdP5+CC9VGkeZV4/vGCKsY55VvoG+5dadg4YI=
github.com/anchore/stereoscope v0.1.11 h1:YP/XUNcJyMbOOPAWPkeZNCVlKKTRO2cnBTEeUW6I40Y= github.com/anchore/stereoscope v0.1.12 h1:4T/10G7Nb98UoJBKVvAIhsAtrR63lZXxMJb/Qfw5inw=
github.com/anchore/stereoscope v0.1.11/go.mod h1:G3PZlzPbxFhylj9pQwtqfVPaahuWmy/UCtv5FTIIMvg= github.com/anchore/stereoscope v0.1.12/go.mod h1:G3PZlzPbxFhylj9pQwtqfVPaahuWmy/UCtv5FTIIMvg=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ= github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY= github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
@ -227,7 +229,6 @@ github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqy
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
@ -263,6 +264,12 @@ github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/clipperhouse/displaywidth v0.3.1 h1:k07iN9gD32177o1y4O1jQMzbLdCrsGJh+blirVYybsk=
github.com/clipperhouse/displaywidth v0.3.1/go.mod h1:tgLJKKyaDOCadywag3agw4snxS5kYEuYR6Y9+qWDDYM=
github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs=
github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA=
github.com/clipperhouse/uax29/v2 v2.2.0 h1:ChwIKnQN3kcZteTXMgb1wztSgaU+ZemkgWdohwgs8tY=
github.com/clipperhouse/uax29/v2 v2.2.0/go.mod h1:EFJ2TJMRUaplDxHKj1qAEhCtQPW2tJSwu5BF98AuoVM=
github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0=
github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
@ -277,8 +284,8 @@ github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWH
github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM= github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM=
github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw= github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw=
github.com/containerd/containerd v1.7.28 h1:Nsgm1AtcmEh4AHAJ4gGlNSaKgXiNccU270Dnf81FQ3c= github.com/containerd/containerd v1.7.29 h1:90fWABQsaN9mJhGkoVnuzEY+o1XDPbg9BTC9QTAHnuE=
github.com/containerd/containerd v1.7.28/go.mod h1:azUkWcOvHrWvaiUjSQH0fjzuHIwSPg1WL5PshGP4Szs= github.com/containerd/containerd v1.7.29/go.mod h1:azUkWcOvHrWvaiUjSQH0fjzuHIwSPg1WL5PshGP4Szs=
github.com/containerd/containerd/api v1.8.0 h1:hVTNJKR8fMc/2Tiw60ZRijntNMd1U+JVMyTRdsD2bS0= github.com/containerd/containerd/api v1.8.0 h1:hVTNJKR8fMc/2Tiw60ZRijntNMd1U+JVMyTRdsD2bS0=
github.com/containerd/containerd/api v1.8.0/go.mod h1:dFv4lt6S20wTu/hMcP4350RL87qPWLVa/OHOwmmdnYc= github.com/containerd/containerd/api v1.8.0/go.mod h1:dFv4lt6S20wTu/hMcP4350RL87qPWLVa/OHOwmmdnYc=
github.com/containerd/continuity v0.4.4 h1:/fNVfTJ7wIl/YPMHjf+5H32uFhl63JucB34PlCpMKII= github.com/containerd/continuity v0.4.4 h1:/fNVfTJ7wIl/YPMHjf+5H32uFhl63JucB34PlCpMKII=
@ -304,8 +311,8 @@ github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSV
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.6.0 h1:BtGB77njd6SVO6VztOHfPxKitJvd/VPT+OFBFMOi1Is=
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= github.com/cyphar/filepath-securejoin v0.6.0/go.mod h1:A8hd4EnAeyujCJRrICiOWqjS1AX0a9kM5XL+NwKoYSc=
github.com/dave/jennifer v1.7.1 h1:B4jJJDHelWcDhlRQxWeo0Npa/pYKBLrirAQoTN45txo= github.com/dave/jennifer v1.7.1 h1:B4jJJDHelWcDhlRQxWeo0Npa/pYKBLrirAQoTN45txo=
github.com/dave/jennifer v1.7.1/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc= github.com/dave/jennifer v1.7.1/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc=
github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -558,8 +565,8 @@ github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtng
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
github.com/hashicorp/go-getter v1.8.2 h1:CGCK+bZQLl44PYiwJweVzfpjg7bBwtuXu3AGcLiod2o= github.com/hashicorp/go-getter v1.8.3 h1:gIS+oTNv3kyYAvlUVgMR46MiG0bM0KuSON/KZEvRoRg=
github.com/hashicorp/go-getter v1.8.2/go.mod h1:CUTt9x2bCtJ/sV8ihgrITL3IUE+0BE1j/e4n5P/GIM4= github.com/hashicorp/go-getter v1.8.3/go.mod h1:CUTt9x2bCtJ/sV8ihgrITL3IUE+0BE1j/e4n5P/GIM4=
github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
@ -613,8 +620,8 @@ github.com/invopop/jsonschema v0.7.0 h1:2vgQcBz1n256N+FpX3Jq7Y17AjYt46Ig3zIWyy77
github.com/invopop/jsonschema v0.7.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0= github.com/invopop/jsonschema v0.7.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
github.com/jedib0t/go-pretty/v6 v6.6.8 h1:JnnzQeRz2bACBobIaa/r+nqjvws4yEhcmaZ4n1QzsEc= github.com/jedib0t/go-pretty/v6 v6.7.1 h1:bHDSsj93NuJ563hHuM7ohk/wpX7BmRFNIsVv1ssI2/M=
github.com/jedib0t/go-pretty/v6 v6.6.8/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU= github.com/jedib0t/go-pretty/v6 v6.7.1/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8= github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
@ -681,8 +688,8 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 h1:P8UmIzZMYDR+NGImiFvErt6VWfIRPuGM+vyjiEdkmIw= github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 h1:P8UmIzZMYDR+NGImiFvErt6VWfIRPuGM+vyjiEdkmIw=
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
@ -752,12 +759,14 @@ github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9l
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A= github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A=
github.com/nwaples/rardecode/v2 v2.2.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw= github.com/nwaples/rardecode/v2 v2.2.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 h1:zrbMGy9YXpIeTnGj4EljqMiZsIcE09mmF8XsD5AYOJc=
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6/go.mod h1:rEKTHC9roVVicUIfZK7DYrdIoM0EOr8mK1Hj5s3JjH0=
github.com/olekukonko/errors v1.1.0 h1:RNuGIh15QdDenh+hNvKrJkmxxjV4hcS50Db478Ou5sM= github.com/olekukonko/errors v1.1.0 h1:RNuGIh15QdDenh+hNvKrJkmxxjV4hcS50Db478Ou5sM=
github.com/olekukonko/errors v1.1.0/go.mod h1:ppzxA5jBKcO1vIpCXQ9ZqgDh8iwODz6OXIGKU8r5m4Y= github.com/olekukonko/errors v1.1.0/go.mod h1:ppzxA5jBKcO1vIpCXQ9ZqgDh8iwODz6OXIGKU8r5m4Y=
github.com/olekukonko/ll v0.0.9 h1:Y+1YqDfVkqMWuEQMclsF9HUR5+a82+dxJuL1HHSRpxI= github.com/olekukonko/ll v0.1.2 h1:lkg/k/9mlsy0SxO5aC+WEpbdT5K83ddnNhAepz7TQc0=
github.com/olekukonko/ll v0.0.9/go.mod h1:En+sEW0JNETl26+K8eZ6/W4UQ7CYSrrgg/EdIYT2H8g= github.com/olekukonko/ll v0.1.2/go.mod h1:b52bVQRRPObe+yyBl0TxNfhesL0nedD4Cht0/zx55Ew=
github.com/olekukonko/tablewriter v1.0.9 h1:XGwRsYLC2bY7bNd93Dk51bcPZksWZmLYuaTHR0FqfL8= github.com/olekukonko/tablewriter v1.1.1 h1:b3reP6GCfrHwmKkYwNRFh2rxidGHcT6cgxj/sHiDDx0=
github.com/olekukonko/tablewriter v1.0.9/go.mod h1:5c+EBPeSqvXnLLgkm9isDdzR3wjfBkHR9Nhfp3NWrzo= github.com/olekukonko/tablewriter v1.1.1/go.mod h1:De/bIcTF+gpBDB3Alv3fEsZA+9unTsSzAg/ZGADCtn4=
github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k= github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k=
github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY= github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
@ -766,8 +775,8 @@ github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJw
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg= github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg=
github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/selinux v1.11.0 h1:+5Zbo97w3Lbmb3PeqQtpmTkMwsW5nRI3YaLpt7tQ7oU= github.com/opencontainers/selinux v1.13.0 h1:Zza88GWezyT7RLql12URvoxsbLfjFx988+LGaWfbL84=
github.com/opencontainers/selinux v1.11.0/go.mod h1:E5dMC3VPuVvVHDYmi78qvhJp8+M586T4DlDRYpFkyec= github.com/opencontainers/selinux v1.13.0/go.mod h1:XxWTed+A/s5NNq4GmYScVy+9jzXhGBVEOAyucdRUY8s=
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0= github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
@ -816,7 +825,6 @@ github.com/quasilyte/go-ruleguard/dsl v0.3.23 h1:lxjt5B6ZCiBeeNO8/oQsegE6fLeCzuM
github.com/quasilyte/go-ruleguard/dsl v0.3.23/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= github.com/quasilyte/go-ruleguard/dsl v0.3.23/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
@ -1070,8 +1078,8 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -1248,8 +1256,8 @@ golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
@ -1520,8 +1528,8 @@ modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns= modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w= modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE= modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
modernc.org/sqlite v1.39.1 h1:H+/wGFzuSCIEVCvXYVHX5RQglwhMOvtHSv+VtidL2r4= modernc.org/sqlite v1.40.0 h1:bNWEDlYhNPAUdUdBzjAvn8icAs/2gaKlj4vM+tQ6KdQ=
modernc.org/sqlite v1.39.1/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE= modernc.org/sqlite v1.40.0/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE=
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0= modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A= modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y= modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=

View File

@ -0,0 +1,46 @@
package pkgtestobservation
import "time"
// Observations represents capability observations during testing
type Observations struct {
License bool `json:"license"`
Relationships Relationship `json:"relationships"`
FileListing Count `json:"file_listing"`
FileDigests Count `json:"file_digests"`
IntegrityHash Count `json:"integrity_hash"`
}
// Relationship tracks dependency relationship observations
type Relationship struct {
Found bool `json:"found"`
Count int `json:"count"`
}
// Count tracks whether a capability was found and how many times
type Count struct {
Found bool `json:"found"`
Count int `json:"count"`
}
// Test is the root structure for test-observations.json
type Test struct {
Package string `json:"package"`
UpdatedAt time.Time `json:"updated_at"`
Catalogers map[string]*Cataloger `json:"catalogers"`
Parsers map[string]*Parser `json:"parsers"`
}
// Parser captures all observations for a parser
type Parser struct {
MetadataTypes []string `json:"metadata_types"`
PackageTypes []string `json:"package_types"`
Observations Observations `json:"observations"`
}
// Cataloger captures all observations for a cataloger
type Cataloger struct {
MetadataTypes []string `json:"metadata_types"`
PackageTypes []string `json:"package_types"`
Observations Observations `json:"observations"`
}

View File

@ -3,5 +3,5 @@ package internal
const ( const (
// JSONSchemaVersion is the current schema version output by the JSON encoder // JSONSchemaVersion is the current schema version output by the JSON encoder
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment. // This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
JSONSchemaVersion = "16.0.42" JSONSchemaVersion = "16.0.43"
) )

View File

@ -81,6 +81,10 @@ func Test_EnvironmentTask(t *testing.T) {
// get the source // get the source
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive")) theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
require.NoError(t, err) require.NoError(t, err)
t.Cleanup(func() {
require.NoError(t, theSource.Close())
})
resolver, err := theSource.FileResolver(source.SquashedScope) resolver, err := theSource.FileResolver(source.SquashedScope)
require.NoError(t, err) require.NoError(t, err)

View File

@ -50,6 +50,7 @@ func AllTypes() []any {
pkg.PhpComposerLockEntry{}, pkg.PhpComposerLockEntry{},
pkg.PhpPearEntry{}, pkg.PhpPearEntry{},
pkg.PhpPeclEntry{}, pkg.PhpPeclEntry{},
pkg.PnpmLockEntry{},
pkg.PortageEntry{}, pkg.PortageEntry{},
pkg.PythonPackage{}, pkg.PythonPackage{},
pkg.PythonPdmLockEntry{}, pkg.PythonPdmLockEntry{},

View File

@ -95,10 +95,11 @@ var jsonTypes = makeJSONTypes(
jsonNames(pkg.NpmPackage{}, "javascript-npm-package", "NpmPackageJsonMetadata"), jsonNames(pkg.NpmPackage{}, "javascript-npm-package", "NpmPackageJsonMetadata"),
jsonNames(pkg.NpmPackageLockEntry{}, "javascript-npm-package-lock-entry", "NpmPackageLockJsonMetadata"), jsonNames(pkg.NpmPackageLockEntry{}, "javascript-npm-package-lock-entry", "NpmPackageLockJsonMetadata"),
jsonNames(pkg.YarnLockEntry{}, "javascript-yarn-lock-entry", "YarnLockJsonMetadata"), jsonNames(pkg.YarnLockEntry{}, "javascript-yarn-lock-entry", "YarnLockJsonMetadata"),
jsonNames(pkg.PnpmLockEntry{}, "javascript-pnpm-lock-entry"),
jsonNames(pkg.PEBinary{}, "pe-binary"), jsonNames(pkg.PEBinary{}, "pe-binary"),
jsonNames(pkg.PhpComposerLockEntry{}, "php-composer-lock-entry", "PhpComposerJsonMetadata"), jsonNames(pkg.PhpComposerLockEntry{}, "php-composer-lock-entry", "PhpComposerJsonMetadata"),
jsonNamesWithoutLookup(pkg.PhpComposerInstalledEntry{}, "php-composer-installed-entry", "PhpComposerJsonMetadata"), // the legacy value is split into two types, where the other is preferred jsonNamesWithoutLookup(pkg.PhpComposerInstalledEntry{}, "php-composer-installed-entry", "PhpComposerJsonMetadata"), // the legacy value is split into two types, where the other is preferred
jsonNames(pkg.PhpPeclEntry{}, "php-pecl-entry", "PhpPeclMetadata"), jsonNames(pkg.PhpPeclEntry{}, "php-pecl-entry", "PhpPeclMetadata"), //nolint:staticcheck
jsonNames(pkg.PhpPearEntry{}, "php-pear-entry"), jsonNames(pkg.PhpPearEntry{}, "php-pear-entry"),
jsonNames(pkg.PortageEntry{}, "portage-db-entry", "PortageMetadata"), jsonNames(pkg.PortageEntry{}, "portage-db-entry", "PortageMetadata"),
jsonNames(pkg.PythonPackage{}, "python-package", "PythonPackageMetadata"), jsonNames(pkg.PythonPackage{}, "python-package", "PythonPackageMetadata"),

View File

@ -10,7 +10,6 @@ import (
"sort" "sort"
"strings" "strings"
"text/template" "text/template"
"time"
) )
// This program generates license_list.go. // This program generates license_list.go.
@ -20,8 +19,7 @@ const (
) )
var tmp = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT. var tmp = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT.
// This file was generated by robots at {{ .Timestamp }} // This file was generated using data from {{ .URL }}
// using data from {{ .URL }}
package spdxlicense package spdxlicense
const Version = {{ printf "%q" .Version }} const Version = {{ printf "%q" .Version }}
@ -78,13 +76,11 @@ func run() error {
urlToLicense := buildURLToLicenseMap(result) urlToLicense := buildURLToLicenseMap(result)
err = tmp.Execute(f, struct { err = tmp.Execute(f, struct {
Timestamp time.Time
URL string URL string
Version string Version string
LicenseIDs map[string]string LicenseIDs map[string]string
URLToLicense map[string]string URLToLicense map[string]string
}{ }{
Timestamp: time.Now(),
URL: url, URL: url,
Version: result.Version, Version: result.Version,
LicenseIDs: licenseIDs, LicenseIDs: licenseIDs,

View File

@ -53,6 +53,9 @@ const (
JavaScript = "javascript" JavaScript = "javascript"
Node = "node" Node = "node"
NPM = "npm" NPM = "npm"
// Python ecosystem labels
Python = "python"
) )
//nolint:funlen //nolint:funlen
@ -110,7 +113,7 @@ func DefaultPackageTaskFactories() Factories {
func(cfg CatalogingFactoryConfig) pkg.Cataloger { func(cfg CatalogingFactoryConfig) pkg.Cataloger {
return python.NewPackageCataloger(cfg.PackagesConfig.Python) return python.NewPackageCataloger(cfg.PackagesConfig.Python)
}, },
pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "python", pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, Python,
), ),
newSimplePackageTaskFactory(ruby.NewGemFileLockCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "ruby", "gem"), newSimplePackageTaskFactory(ruby.NewGemFileLockCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "ruby", "gem"),
newSimplePackageTaskFactory(ruby.NewGemSpecCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "ruby", "gem", "gemspec"), newSimplePackageTaskFactory(ruby.NewGemSpecCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "ruby", "gem", "gemspec"),
@ -128,7 +131,7 @@ func DefaultPackageTaskFactories() Factories {
pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "dotnet", "c#", pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "dotnet", "c#",
), ),
newSimplePackageTaskFactory(dotnet.NewDotnetPackagesLockCataloger, pkgcataloging.DeclaredTag, pkgcataloging.ImageTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "dotnet", "c#"), newSimplePackageTaskFactory(dotnet.NewDotnetPackagesLockCataloger, pkgcataloging.DeclaredTag, pkgcataloging.ImageTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "dotnet", "c#"),
newSimplePackageTaskFactory(python.NewInstalledPackageCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "python"), newSimplePackageTaskFactory(python.NewInstalledPackageCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, Python),
newPackageTaskFactory( newPackageTaskFactory(
func(cfg CatalogingFactoryConfig) pkg.Cataloger { func(cfg CatalogingFactoryConfig) pkg.Cataloger {
return golang.NewGoModuleBinaryCataloger(cfg.PackagesConfig.Golang) return golang.NewGoModuleBinaryCataloger(cfg.PackagesConfig.Golang)
@ -180,9 +183,9 @@ func DefaultPackageTaskFactories() Factories {
// deprecated catalogers //////////////////////////////////////// // deprecated catalogers ////////////////////////////////////////
// these are catalogers that should not be selectable other than specific inclusion via name or "deprecated" tag (to remain backwards compatible) // these are catalogers that should not be selectable other than specific inclusion via name or "deprecated" tag (to remain backwards compatible)
newSimplePackageTaskFactory(dotnet.NewDotnetDepsCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0 newSimplePackageTaskFactory(dotnet.NewDotnetDepsCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
newSimplePackageTaskFactory(dotnet.NewDotnetPortableExecutableCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0 newSimplePackageTaskFactory(dotnet.NewDotnetPortableExecutableCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
newSimplePackageTaskFactory(php.NewPeclCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0 newSimplePackageTaskFactory(php.NewPeclCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
newSimplePackageTaskFactory(nix.NewStoreCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0 newSimplePackageTaskFactory(nix.NewStoreCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
} }
} }

View File

@ -130,7 +130,8 @@
"description": "Digests contains file content hashes for integrity verification" "description": "Digests contains file content hashes for integrity verification"
} }
}, },
"type": "object" "type": "object",
"description": "AlpmFileRecord represents a single file entry within an Arch Linux package with its associated metadata tracked by pacman."
}, },
"ApkDbEntry": { "ApkDbEntry": {
"properties": { "properties": {
@ -433,16 +434,19 @@
"CPE": { "CPE": {
"properties": { "properties": {
"cpe": { "cpe": {
"type": "string" "type": "string",
"description": "Value is the CPE string identifier."
}, },
"source": { "source": {
"type": "string" "type": "string",
"description": "Source is the source where this CPE was obtained or generated from."
} }
}, },
"type": "object", "type": "object",
"required": [ "required": [
"cpe" "cpe"
] ],
"description": "CPE represents a Common Platform Enumeration identifier used for matching packages to known vulnerabilities in security databases."
}, },
"ClassifierMatch": { "ClassifierMatch": {
"properties": { "properties": {
@ -747,19 +751,23 @@
"Descriptor": { "Descriptor": {
"properties": { "properties": {
"name": { "name": {
"type": "string" "type": "string",
"description": "Name is the name of the tool that generated this SBOM (e.g., \"syft\")."
}, },
"version": { "version": {
"type": "string" "type": "string",
"description": "Version is the version of the tool that generated this SBOM."
}, },
"configuration": true "configuration": {
"description": "Configuration contains the tool configuration used during SBOM generation."
}
}, },
"type": "object", "type": "object",
"required": [ "required": [
"name", "name",
"version" "version"
], ],
"description": "Descriptor describes what created the document as well as surrounding metadata" "description": "Descriptor identifies the tool that generated this SBOM document, including its name, version, and configuration used during catalog generation."
}, },
"Digest": { "Digest": {
"properties": { "properties": {
@ -1285,58 +1293,71 @@
"File": { "File": {
"properties": { "properties": {
"id": { "id": {
"type": "string" "type": "string",
"description": "ID is a unique identifier for this file within the SBOM."
}, },
"location": { "location": {
"$ref": "#/$defs/Coordinates" "$ref": "#/$defs/Coordinates",
"description": "Location is the file path and layer information where this file was found."
}, },
"metadata": { "metadata": {
"$ref": "#/$defs/FileMetadataEntry" "$ref": "#/$defs/FileMetadataEntry",
"description": "Metadata contains filesystem metadata such as permissions, ownership, and file type."
}, },
"contents": { "contents": {
"type": "string" "type": "string",
"description": "Contents is the file contents for small files."
}, },
"digests": { "digests": {
"items": { "items": {
"$ref": "#/$defs/Digest" "$ref": "#/$defs/Digest"
}, },
"type": "array" "type": "array",
"description": "Digests contains cryptographic hashes of the file contents."
}, },
"licenses": { "licenses": {
"items": { "items": {
"$ref": "#/$defs/FileLicense" "$ref": "#/$defs/FileLicense"
}, },
"type": "array" "type": "array",
"description": "Licenses contains license information discovered within this file."
}, },
"executable": { "executable": {
"$ref": "#/$defs/Executable" "$ref": "#/$defs/Executable",
"description": "Executable contains executable metadata if this file is a binary."
}, },
"unknowns": { "unknowns": {
"items": { "items": {
"type": "string" "type": "string"
}, },
"type": "array" "type": "array",
"description": "Unknowns contains unknown fields for forward compatibility."
} }
}, },
"type": "object", "type": "object",
"required": [ "required": [
"id", "id",
"location" "location"
] ],
"description": "File represents a file discovered during cataloging with its metadata, content digests, licenses, and relationships to packages."
}, },
"FileLicense": { "FileLicense": {
"properties": { "properties": {
"value": { "value": {
"type": "string" "type": "string",
"description": "Value is the raw license identifier or text as found in the file."
}, },
"spdxExpression": { "spdxExpression": {
"type": "string" "type": "string",
"description": "SPDXExpression is the parsed SPDX license expression."
}, },
"type": { "type": {
"type": "string" "type": "string",
"description": "Type is the license type classification (e.g., declared, concluded, discovered)."
}, },
"evidence": { "evidence": {
"$ref": "#/$defs/FileLicenseEvidence" "$ref": "#/$defs/FileLicenseEvidence",
"description": "Evidence contains supporting evidence for this license detection."
} }
}, },
"type": "object", "type": "object",
@ -1344,18 +1365,22 @@
"value", "value",
"spdxExpression", "spdxExpression",
"type" "type"
] ],
"description": "FileLicense represents license information discovered within a file's contents or metadata, including the matched license text and SPDX expression."
}, },
"FileLicenseEvidence": { "FileLicenseEvidence": {
"properties": { "properties": {
"confidence": { "confidence": {
"type": "integer" "type": "integer",
"description": "Confidence is the confidence score for this license detection (0-100)."
}, },
"offset": { "offset": {
"type": "integer" "type": "integer",
"description": "Offset is the byte offset where the license text starts in the file."
}, },
"extent": { "extent": {
"type": "integer" "type": "integer",
"description": "Extent is the length of the license text in bytes."
} }
}, },
"type": "object", "type": "object",
@ -1363,30 +1388,38 @@
"confidence", "confidence",
"offset", "offset",
"extent" "extent"
] ],
"description": "FileLicenseEvidence contains supporting evidence for a license detection in a file, including the byte offset, extent, and confidence level."
}, },
"FileMetadataEntry": { "FileMetadataEntry": {
"properties": { "properties": {
"mode": { "mode": {
"type": "integer" "type": "integer",
"description": "Mode is the Unix file permission mode in octal format."
}, },
"type": { "type": {
"type": "string" "type": "string",
"description": "Type is the file type (e.g., \"RegularFile\", \"Directory\", \"SymbolicLink\")."
}, },
"linkDestination": { "linkDestination": {
"type": "string" "type": "string",
"description": "LinkDestination is the target path for symbolic links."
}, },
"userID": { "userID": {
"type": "integer" "type": "integer",
"description": "UserID is the file owner user ID."
}, },
"groupID": { "groupID": {
"type": "integer" "type": "integer",
"description": "GroupID is the file owner group ID."
}, },
"mimeType": { "mimeType": {
"type": "string" "type": "string",
"description": "MIMEType is the MIME type of the file contents."
}, },
"size": { "size": {
"type": "integer" "type": "integer",
"description": "Size is the file size in bytes."
} }
}, },
"type": "object", "type": "object",
@ -1397,71 +1430,8 @@
"groupID", "groupID",
"mimeType", "mimeType",
"size" "size"
]
},
"GgufFileMetadata": {
"properties": {
"modelFormat": {
"type": "string",
"description": "ModelFormat is always \"gguf\""
},
"modelName": {
"type": "string",
"description": "ModelName is the name of the model (from general.name or filename)"
},
"modelVersion": {
"type": "string",
"description": "ModelVersion is the version of the model (if available in header, else \"unknown\")"
},
"fileSize": {
"type": "integer",
"description": "FileSize is the size of the GGUF file in bytes (best-effort if available from resolver)"
},
"hash": {
"type": "string",
"description": "Hash is a content hash of the metadata (for stable global identifiers across remotes)"
},
"license": {
"type": "string",
"description": "License is the license identifier (from general.license if present)"
},
"ggufVersion": {
"type": "integer",
"description": "GGUFVersion is the GGUF format version (e.g., 3)"
},
"architecture": {
"type": "string",
"description": "Architecture is the model architecture (from general.architecture, e.g., \"qwen3moe\", \"llama\")"
},
"quantization": {
"type": "string",
"description": "Quantization is the quantization type (e.g., \"IQ4_NL\", \"Q4_K_M\")"
},
"parameters": {
"type": "integer",
"description": "Parameters is the number of model parameters (if present in header)"
},
"tensorCount": {
"type": "integer",
"description": "TensorCount is the number of tensors in the model"
},
"header": {
"type": "object",
"description": "Header contains the remaining key-value pairs from the GGUF header that are not already\nrepresented as typed fields above. This preserves additional metadata fields for reference\n(namespaced with general.*, llama.*, etc.) while avoiding duplication."
},
"truncatedHeader": {
"type": "boolean",
"description": "TruncatedHeader indicates if the header was truncated during parsing (for very large headers)"
}
},
"type": "object",
"required": [
"modelFormat",
"modelName",
"ggufVersion",
"tensorCount"
], ],
"description": "GGUFFileHeader represents metadata extracted from a GGUF (GPT-Generated Unified Format) model file." "description": "FileMetadataEntry contains filesystem-level metadata attributes such as permissions, ownership, type, and size for a cataloged file."
}, },
"GithubActionsUseStatement": { "GithubActionsUseStatement": {
"properties": { "properties": {
@ -1609,7 +1579,8 @@
"items": { "items": {
"type": "string" "type": "string"
}, },
"type": "array" "type": "array",
"description": "IDLikes represents a list of distribution IDs that this Linux distribution is similar to or derived from, as defined in os-release ID_LIKE field."
}, },
"JavaArchive": { "JavaArchive": {
"properties": { "properties": {
@ -1940,15 +1911,48 @@
"integrity": { "integrity": {
"type": "string", "type": "string",
"description": "Integrity is Subresource Integrity hash for verification using standard SRI format (sha512-... or sha1-...). npm changed from SHA-1 to SHA-512 in newer versions. For registry sources this is the integrity from registry, for remote tarballs it's SHA-512 of the file. npm verifies tarball matches this hash before unpacking, throwing EINTEGRITY error if mismatch detected." "description": "Integrity is Subresource Integrity hash for verification using standard SRI format (sha512-... or sha1-...). npm changed from SHA-1 to SHA-512 in newer versions. For registry sources this is the integrity from registry, for remote tarballs it's SHA-512 of the file. npm verifies tarball matches this hash before unpacking, throwing EINTEGRITY error if mismatch detected."
},
"dependencies": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object",
"description": "Dependencies is a map of dependencies and their version markers, i.e. \"lodash\": \"^1.0.0\""
} }
}, },
"type": "object", "type": "object",
"required": [ "required": [
"resolved", "resolved",
"integrity" "integrity",
"dependencies"
], ],
"description": "NpmPackageLockEntry represents a single entry within the \"packages\" section of a package-lock.json file." "description": "NpmPackageLockEntry represents a single entry within the \"packages\" section of a package-lock.json file."
}, },
"JavascriptPnpmLockEntry": {
"properties": {
"resolution": {
"$ref": "#/$defs/PnpmLockResolution",
"description": "Resolution is the resolution information for the package"
},
"dependencies": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object",
"description": "Dependencies is a map of dependencies and their versions"
}
},
"type": "object",
"required": [
"resolution",
"dependencies"
],
"description": "PnpmLockEntry represents a single entry in the \"packages\" section of a pnpm-lock.yaml file."
},
"JavascriptYarnLockEntry": { "JavascriptYarnLockEntry": {
"properties": { "properties": {
"resolved": { "resolved": {
@ -1958,12 +1962,22 @@
"integrity": { "integrity": {
"type": "string", "type": "string",
"description": "Integrity is Subresource Integrity hash for verification (SRI format)" "description": "Integrity is Subresource Integrity hash for verification (SRI format)"
},
"dependencies": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object",
"description": "Dependencies is a map of dependencies and their versions"
} }
}, },
"type": "object", "type": "object",
"required": [ "required": [
"resolved", "resolved",
"integrity" "integrity",
"dependencies"
], ],
"description": "YarnLockEntry represents a single entry section of a yarn.lock file." "description": "YarnLockEntry represents a single entry section of a yarn.lock file."
}, },
@ -1995,28 +2009,34 @@
"License": { "License": {
"properties": { "properties": {
"value": { "value": {
"type": "string" "type": "string",
"description": "Value is the raw license identifier or expression as found."
}, },
"spdxExpression": { "spdxExpression": {
"type": "string" "type": "string",
"description": "SPDXExpression is the parsed SPDX license expression."
}, },
"type": { "type": {
"type": "string" "type": "string",
"description": "Type is the license type classification (e.g., declared, concluded, discovered)."
}, },
"urls": { "urls": {
"items": { "items": {
"type": "string" "type": "string"
}, },
"type": "array" "type": "array",
"description": "URLs are URLs where license text or information can be found."
}, },
"locations": { "locations": {
"items": { "items": {
"$ref": "#/$defs/Location" "$ref": "#/$defs/Location"
}, },
"type": "array" "type": "array",
"description": "Locations are file locations where this license was discovered."
}, },
"contents": { "contents": {
"type": "string" "type": "string",
"description": "Contents is the full license text content."
} }
}, },
"type": "object", "type": "object",
@ -2026,7 +2046,8 @@
"type", "type",
"urls", "urls",
"locations" "locations"
] ],
"description": "License represents software license information discovered for a package, including SPDX expressions and supporting evidence locations."
}, },
"LinuxKernelArchive": { "LinuxKernelArchive": {
"properties": { "properties": {
@ -2151,64 +2172,84 @@
"LinuxRelease": { "LinuxRelease": {
"properties": { "properties": {
"prettyName": { "prettyName": {
"type": "string" "type": "string",
"description": "PrettyName is a human-readable operating system name with version."
}, },
"name": { "name": {
"type": "string" "type": "string",
"description": "Name is the operating system name without version information."
}, },
"id": { "id": {
"type": "string" "type": "string",
"description": "ID is the lower-case operating system identifier (e.g., \"ubuntu\", \"rhel\")."
}, },
"idLike": { "idLike": {
"$ref": "#/$defs/IDLikes" "$ref": "#/$defs/IDLikes",
"description": "IDLike is a list of operating system IDs this distribution is similar to or derived from."
}, },
"version": { "version": {
"type": "string" "type": "string",
"description": "Version is the operating system version including codename if available."
}, },
"versionID": { "versionID": {
"type": "string" "type": "string",
"description": "VersionID is the operating system version number or identifier."
}, },
"versionCodename": { "versionCodename": {
"type": "string" "type": "string",
"description": "VersionCodename is the operating system release codename (e.g., \"jammy\", \"bullseye\")."
}, },
"buildID": { "buildID": {
"type": "string" "type": "string",
"description": "BuildID is a build identifier for the operating system."
}, },
"imageID": { "imageID": {
"type": "string" "type": "string",
"description": "ImageID is an identifier for container or cloud images."
}, },
"imageVersion": { "imageVersion": {
"type": "string" "type": "string",
"description": "ImageVersion is the version for container or cloud images."
}, },
"variant": { "variant": {
"type": "string" "type": "string",
"description": "Variant is the operating system variant name (e.g., \"Server\", \"Workstation\")."
}, },
"variantID": { "variantID": {
"type": "string" "type": "string",
"description": "VariantID is the lower-case operating system variant identifier."
}, },
"homeURL": { "homeURL": {
"type": "string" "type": "string",
"description": "HomeURL is the homepage URL for the operating system."
}, },
"supportURL": { "supportURL": {
"type": "string" "type": "string",
"description": "SupportURL is the support or help URL for the operating system."
}, },
"bugReportURL": { "bugReportURL": {
"type": "string" "type": "string",
"description": "BugReportURL is the bug reporting URL for the operating system."
}, },
"privacyPolicyURL": { "privacyPolicyURL": {
"type": "string" "type": "string",
"description": "PrivacyPolicyURL is the privacy policy URL for the operating system."
}, },
"cpeName": { "cpeName": {
"type": "string" "type": "string",
"description": "CPEName is the Common Platform Enumeration name for the operating system."
}, },
"supportEnd": { "supportEnd": {
"type": "string" "type": "string",
"description": "SupportEnd is the end of support date or version identifier."
}, },
"extendedSupport": { "extendedSupport": {
"type": "boolean" "type": "boolean",
"description": "ExtendedSupport indicates whether extended security or support is available."
} }
}, },
"type": "object" "type": "object",
"description": "LinuxRelease contains Linux distribution identification and version information extracted from /etc/os-release or similar system files."
}, },
"Location": { "Location": {
"properties": { "properties": {
@ -2304,7 +2345,7 @@
"product_id", "product_id",
"kb" "kb"
], ],
"description": "MicrosoftKbPatch is slightly odd in how it is expected to map onto data." "description": "MicrosoftKbPatch represents a Windows Knowledge Base patch identifier associated with a specific Microsoft product from the MSRC (Microsoft Security Response Center)."
}, },
"NixDerivation": { "NixDerivation": {
"properties": { "properties": {
@ -2538,9 +2579,6 @@
{ {
"$ref": "#/$defs/ErlangRebarLockEntry" "$ref": "#/$defs/ErlangRebarLockEntry"
}, },
{
"$ref": "#/$defs/GgufFileMetadata"
},
{ {
"$ref": "#/$defs/GithubActionsUseStatement" "$ref": "#/$defs/GithubActionsUseStatement"
}, },
@ -2574,6 +2612,9 @@
{ {
"$ref": "#/$defs/JavascriptNpmPackageLockEntry" "$ref": "#/$defs/JavascriptNpmPackageLockEntry"
}, },
{
"$ref": "#/$defs/JavascriptPnpmLockEntry"
},
{ {
"$ref": "#/$defs/JavascriptYarnLockEntry" "$ref": "#/$defs/JavascriptYarnLockEntry"
}, },
@ -3025,6 +3066,19 @@
], ],
"description": "PhpPeclEntry represents a single package entry found within php pecl metadata files." "description": "PhpPeclEntry represents a single package entry found within php pecl metadata files."
}, },
"PnpmLockResolution": {
"properties": {
"integrity": {
"type": "string",
"description": "Integrity is Subresource Integrity hash for verification (SRI format)"
}
},
"type": "object",
"required": [
"integrity"
],
"description": "PnpmLockResolution contains package resolution metadata from pnpm lockfiles, including the integrity hash used for verification."
},
"PortageDbEntry": { "PortageDbEntry": {
"properties": { "properties": {
"installedSize": { "installedSize": {
@ -3510,22 +3564,28 @@
"Relationship": { "Relationship": {
"properties": { "properties": {
"parent": { "parent": {
"type": "string" "type": "string",
"description": "Parent is the ID of the parent artifact in this relationship."
}, },
"child": { "child": {
"type": "string" "type": "string",
"description": "Child is the ID of the child artifact in this relationship."
}, },
"type": { "type": {
"type": "string" "type": "string",
"description": "Type is the relationship type (e.g., \"contains\", \"dependency-of\", \"ancestor-of\")."
}, },
"metadata": true "metadata": {
"description": "Metadata contains additional relationship-specific metadata."
}
}, },
"type": "object", "type": "object",
"required": [ "required": [
"parent", "parent",
"child", "child",
"type" "type"
] ],
"description": "Relationship represents a directed relationship between two artifacts in the SBOM, such as package-contains-file or package-depends-on-package."
}, },
"RpmArchive": { "RpmArchive": {
"properties": { "properties": {
@ -3872,17 +3932,20 @@
"Schema": { "Schema": {
"properties": { "properties": {
"version": { "version": {
"type": "string" "type": "string",
"description": "Version is the JSON schema version for this document format."
}, },
"url": { "url": {
"type": "string" "type": "string",
"description": "URL is the URL to the JSON schema definition document."
} }
}, },
"type": "object", "type": "object",
"required": [ "required": [
"version", "version",
"url" "url"
] ],
"description": "Schema specifies the JSON schema version and URL reference that defines the structure and validation rules for this document format."
}, },
"SnapEntry": { "SnapEntry": {
"properties": { "properties": {
@ -3920,21 +3983,28 @@
"Source": { "Source": {
"properties": { "properties": {
"id": { "id": {
"type": "string" "type": "string",
"description": "ID is a unique identifier for the analyzed source artifact."
}, },
"name": { "name": {
"type": "string" "type": "string",
"description": "Name is the name of the analyzed artifact (e.g., image name, directory path)."
}, },
"version": { "version": {
"type": "string" "type": "string",
"description": "Version is the version of the analyzed artifact (e.g., image tag)."
}, },
"supplier": { "supplier": {
"type": "string" "type": "string",
"description": "Supplier is supplier information, which can be user-provided for NTIA minimum elements compliance."
}, },
"type": { "type": {
"type": "string" "type": "string",
"description": "Type is the source type (e.g., \"image\", \"directory\", \"file\")."
}, },
"metadata": true "metadata": {
"description": "Metadata contains additional source-specific metadata."
}
}, },
"type": "object", "type": "object",
"required": [ "required": [
@ -3944,7 +4014,7 @@
"type", "type",
"metadata" "metadata"
], ],
"description": "Instead, the Supplier can be determined by the user of syft and passed as a config or flag to help fulfill the NTIA minimum elements." "description": "Source represents the artifact that was analyzed to generate this SBOM, such as a container image, directory, or file archive."
}, },
"SwiftPackageManagerLockEntry": { "SwiftPackageManagerLockEntry": {
"properties": { "properties": {

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{ {
"$schema": "https://json-schema.org/draft/2020-12/schema", "$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "anchore.io/schema/syft/json/16.0.42/document", "$id": "anchore.io/schema/syft/json/16.0.43/document",
"$ref": "#/$defs/Document", "$ref": "#/$defs/Document",
"$defs": { "$defs": {
"AlpmDbEntry": { "AlpmDbEntry": {
@ -130,7 +130,8 @@
"description": "Digests contains file content hashes for integrity verification" "description": "Digests contains file content hashes for integrity verification"
} }
}, },
"type": "object" "type": "object",
"description": "AlpmFileRecord represents a single file entry within an Arch Linux package with its associated metadata tracked by pacman."
}, },
"ApkDbEntry": { "ApkDbEntry": {
"properties": { "properties": {
@ -433,16 +434,19 @@
"CPE": { "CPE": {
"properties": { "properties": {
"cpe": { "cpe": {
"type": "string" "type": "string",
"description": "Value is the CPE string identifier."
}, },
"source": { "source": {
"type": "string" "type": "string",
"description": "Source is the source where this CPE was obtained or generated from."
} }
}, },
"type": "object", "type": "object",
"required": [ "required": [
"cpe" "cpe"
] ],
"description": "CPE represents a Common Platform Enumeration identifier used for matching packages to known vulnerabilities in security databases."
}, },
"ClassifierMatch": { "ClassifierMatch": {
"properties": { "properties": {
@ -747,19 +751,23 @@
"Descriptor": { "Descriptor": {
"properties": { "properties": {
"name": { "name": {
"type": "string" "type": "string",
"description": "Name is the name of the tool that generated this SBOM (e.g., \"syft\")."
}, },
"version": { "version": {
"type": "string" "type": "string",
"description": "Version is the version of the tool that generated this SBOM."
}, },
"configuration": true "configuration": {
"description": "Configuration contains the tool configuration used during SBOM generation."
}
}, },
"type": "object", "type": "object",
"required": [ "required": [
"name", "name",
"version" "version"
], ],
"description": "Descriptor describes what created the document as well as surrounding metadata" "description": "Descriptor identifies the tool that generated this SBOM document, including its name, version, and configuration used during catalog generation."
}, },
"Digest": { "Digest": {
"properties": { "properties": {
@ -1285,58 +1293,71 @@
"File": { "File": {
"properties": { "properties": {
"id": { "id": {
"type": "string" "type": "string",
"description": "ID is a unique identifier for this file within the SBOM."
}, },
"location": { "location": {
"$ref": "#/$defs/Coordinates" "$ref": "#/$defs/Coordinates",
"description": "Location is the file path and layer information where this file was found."
}, },
"metadata": { "metadata": {
"$ref": "#/$defs/FileMetadataEntry" "$ref": "#/$defs/FileMetadataEntry",
"description": "Metadata contains filesystem metadata such as permissions, ownership, and file type."
}, },
"contents": { "contents": {
"type": "string" "type": "string",
"description": "Contents is the file contents for small files."
}, },
"digests": { "digests": {
"items": { "items": {
"$ref": "#/$defs/Digest" "$ref": "#/$defs/Digest"
}, },
"type": "array" "type": "array",
"description": "Digests contains cryptographic hashes of the file contents."
}, },
"licenses": { "licenses": {
"items": { "items": {
"$ref": "#/$defs/FileLicense" "$ref": "#/$defs/FileLicense"
}, },
"type": "array" "type": "array",
"description": "Licenses contains license information discovered within this file."
}, },
"executable": { "executable": {
"$ref": "#/$defs/Executable" "$ref": "#/$defs/Executable",
"description": "Executable contains executable metadata if this file is a binary."
}, },
"unknowns": { "unknowns": {
"items": { "items": {
"type": "string" "type": "string"
}, },
"type": "array" "type": "array",
"description": "Unknowns contains unknown fields for forward compatibility."
} }
}, },
"type": "object", "type": "object",
"required": [ "required": [
"id", "id",
"location" "location"
] ],
"description": "File represents a file discovered during cataloging with its metadata, content digests, licenses, and relationships to packages."
}, },
"FileLicense": { "FileLicense": {
"properties": { "properties": {
"value": { "value": {
"type": "string" "type": "string",
"description": "Value is the raw license identifier or text as found in the file."
}, },
"spdxExpression": { "spdxExpression": {
"type": "string" "type": "string",
"description": "SPDXExpression is the parsed SPDX license expression."
}, },
"type": { "type": {
"type": "string" "type": "string",
"description": "Type is the license type classification (e.g., declared, concluded, discovered)."
}, },
"evidence": { "evidence": {
"$ref": "#/$defs/FileLicenseEvidence" "$ref": "#/$defs/FileLicenseEvidence",
"description": "Evidence contains supporting evidence for this license detection."
} }
}, },
"type": "object", "type": "object",
@ -1344,18 +1365,22 @@
"value", "value",
"spdxExpression", "spdxExpression",
"type" "type"
] ],
"description": "FileLicense represents license information discovered within a file's contents or metadata, including the matched license text and SPDX expression."
}, },
"FileLicenseEvidence": { "FileLicenseEvidence": {
"properties": { "properties": {
"confidence": { "confidence": {
"type": "integer" "type": "integer",
"description": "Confidence is the confidence score for this license detection (0-100)."
}, },
"offset": { "offset": {
"type": "integer" "type": "integer",
"description": "Offset is the byte offset where the license text starts in the file."
}, },
"extent": { "extent": {
"type": "integer" "type": "integer",
"description": "Extent is the length of the license text in bytes."
} }
}, },
"type": "object", "type": "object",
@ -1363,30 +1388,38 @@
"confidence", "confidence",
"offset", "offset",
"extent" "extent"
] ],
"description": "FileLicenseEvidence contains supporting evidence for a license detection in a file, including the byte offset, extent, and confidence level."
}, },
"FileMetadataEntry": { "FileMetadataEntry": {
"properties": { "properties": {
"mode": { "mode": {
"type": "integer" "type": "integer",
"description": "Mode is the Unix file permission mode in octal format."
}, },
"type": { "type": {
"type": "string" "type": "string",
"description": "Type is the file type (e.g., \"RegularFile\", \"Directory\", \"SymbolicLink\")."
}, },
"linkDestination": { "linkDestination": {
"type": "string" "type": "string",
"description": "LinkDestination is the target path for symbolic links."
}, },
"userID": { "userID": {
"type": "integer" "type": "integer",
"description": "UserID is the file owner user ID."
}, },
"groupID": { "groupID": {
"type": "integer" "type": "integer",
"description": "GroupID is the file owner group ID."
}, },
"mimeType": { "mimeType": {
"type": "string" "type": "string",
"description": "MIMEType is the MIME type of the file contents."
}, },
"size": { "size": {
"type": "integer" "type": "integer",
"description": "Size is the file size in bytes."
} }
}, },
"type": "object", "type": "object",
@ -1397,38 +1430,27 @@
"groupID", "groupID",
"mimeType", "mimeType",
"size" "size"
] ],
"description": "FileMetadataEntry contains filesystem-level metadata attributes such as permissions, ownership, type, and size for a cataloged file."
}, },
"GgufFileMetadata": { "GgufFileMetadata": {
"properties": { "properties": {
"modelFormat": { "ggufVersion": {
"type": "string", "type": "integer",
"description": "ModelFormat is always \"gguf\"" "description": "GGUFVersion is the GGUF format version (e.g., 3)"
}, },
"modelName": { "modelName": {
"type": "string", "type": "string",
"description": "ModelName is the name of the model (from general.name or filename)" "description": "ModelName is the name of the model (from general.name or filename)"
}, },
"modelVersion": {
"type": "string",
"description": "ModelVersion is the version of the model (if available in header, else \"unknown\")"
},
"fileSize": { "fileSize": {
"type": "integer", "type": "integer",
"description": "FileSize is the size of the GGUF file in bytes (best-effort if available from resolver)" "description": "FileSize is the size of the GGUF file in bytes (best-effort if available from resolver)"
}, },
"hash": {
"type": "string",
"description": "Hash is a content hash of the metadata (for stable global identifiers across remotes)"
},
"license": { "license": {
"type": "string", "type": "string",
"description": "License is the license identifier (from general.license if present)" "description": "License is the license identifier (from general.license if present)"
}, },
"ggufVersion": {
"type": "integer",
"description": "GGUFVersion is the GGUF format version (e.g., 3)"
},
"architecture": { "architecture": {
"type": "string", "type": "string",
"description": "Architecture is the model architecture (from general.architecture, e.g., \"qwen3moe\", \"llama\")" "description": "Architecture is the model architecture (from general.architecture, e.g., \"qwen3moe\", \"llama\")"
@ -1449,16 +1471,15 @@
"type": "object", "type": "object",
"description": "Header contains the remaining key-value pairs from the GGUF header that are not already\nrepresented as typed fields above. This preserves additional metadata fields for reference\n(namespaced with general.*, llama.*, etc.) while avoiding duplication." "description": "Header contains the remaining key-value pairs from the GGUF header that are not already\nrepresented as typed fields above. This preserves additional metadata fields for reference\n(namespaced with general.*, llama.*, etc.) while avoiding duplication."
}, },
"truncatedHeader": { "metadataHash": {
"type": "boolean", "type": "string",
"description": "TruncatedHeader indicates if the header was truncated during parsing (for very large headers)" "description": "MetadataHash is a xx64 hash of all key-value pairs from the GGUF header metadata.\nThis hash is computed over the complete header metadata (including the fields extracted\ninto typed fields above) and provides a stable identifier for the model configuration\nacross different file locations or remotes. It allows matching identical models even\nwhen stored in different repositories or with different filenames."
} }
}, },
"type": "object", "type": "object",
"required": [ "required": [
"modelFormat",
"modelName",
"ggufVersion", "ggufVersion",
"modelName",
"tensorCount" "tensorCount"
], ],
"description": "GGUFFileHeader represents metadata extracted from a GGUF (GPT-Generated Unified Format) model file." "description": "GGUFFileHeader represents metadata extracted from a GGUF (GPT-Generated Unified Format) model file."
@ -1609,7 +1630,8 @@
"items": { "items": {
"type": "string" "type": "string"
}, },
"type": "array" "type": "array",
"description": "IDLikes represents a list of distribution IDs that this Linux distribution is similar to or derived from, as defined in os-release ID_LIKE field."
}, },
"JavaArchive": { "JavaArchive": {
"properties": { "properties": {
@ -1940,15 +1962,48 @@
"integrity": { "integrity": {
"type": "string", "type": "string",
"description": "Integrity is Subresource Integrity hash for verification using standard SRI format (sha512-... or sha1-...). npm changed from SHA-1 to SHA-512 in newer versions. For registry sources this is the integrity from registry, for remote tarballs it's SHA-512 of the file. npm verifies tarball matches this hash before unpacking, throwing EINTEGRITY error if mismatch detected." "description": "Integrity is Subresource Integrity hash for verification using standard SRI format (sha512-... or sha1-...). npm changed from SHA-1 to SHA-512 in newer versions. For registry sources this is the integrity from registry, for remote tarballs it's SHA-512 of the file. npm verifies tarball matches this hash before unpacking, throwing EINTEGRITY error if mismatch detected."
},
"dependencies": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object",
"description": "Dependencies is a map of dependencies and their version markers, i.e. \"lodash\": \"^1.0.0\""
} }
}, },
"type": "object", "type": "object",
"required": [ "required": [
"resolved", "resolved",
"integrity" "integrity",
"dependencies"
], ],
"description": "NpmPackageLockEntry represents a single entry within the \"packages\" section of a package-lock.json file." "description": "NpmPackageLockEntry represents a single entry within the \"packages\" section of a package-lock.json file."
}, },
"JavascriptPnpmLockEntry": {
"properties": {
"resolution": {
"$ref": "#/$defs/PnpmLockResolution",
"description": "Resolution is the resolution information for the package"
},
"dependencies": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object",
"description": "Dependencies is a map of dependencies and their versions"
}
},
"type": "object",
"required": [
"resolution",
"dependencies"
],
"description": "PnpmLockEntry represents a single entry in the \"packages\" section of a pnpm-lock.yaml file."
},
"JavascriptYarnLockEntry": { "JavascriptYarnLockEntry": {
"properties": { "properties": {
"resolved": { "resolved": {
@ -1958,12 +2013,22 @@
"integrity": { "integrity": {
"type": "string", "type": "string",
"description": "Integrity is Subresource Integrity hash for verification (SRI format)" "description": "Integrity is Subresource Integrity hash for verification (SRI format)"
},
"dependencies": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object",
"description": "Dependencies is a map of dependencies and their versions"
} }
}, },
"type": "object", "type": "object",
"required": [ "required": [
"resolved", "resolved",
"integrity" "integrity",
"dependencies"
], ],
"description": "YarnLockEntry represents a single entry section of a yarn.lock file." "description": "YarnLockEntry represents a single entry section of a yarn.lock file."
}, },
@ -1995,28 +2060,34 @@
"License": { "License": {
"properties": { "properties": {
"value": { "value": {
"type": "string" "type": "string",
"description": "Value is the raw license identifier or expression as found."
}, },
"spdxExpression": { "spdxExpression": {
"type": "string" "type": "string",
"description": "SPDXExpression is the parsed SPDX license expression."
}, },
"type": { "type": {
"type": "string" "type": "string",
"description": "Type is the license type classification (e.g., declared, concluded, discovered)."
}, },
"urls": { "urls": {
"items": { "items": {
"type": "string" "type": "string"
}, },
"type": "array" "type": "array",
"description": "URLs are URLs where license text or information can be found."
}, },
"locations": { "locations": {
"items": { "items": {
"$ref": "#/$defs/Location" "$ref": "#/$defs/Location"
}, },
"type": "array" "type": "array",
"description": "Locations are file locations where this license was discovered."
}, },
"contents": { "contents": {
"type": "string" "type": "string",
"description": "Contents is the full license text content."
} }
}, },
"type": "object", "type": "object",
@ -2026,7 +2097,8 @@
"type", "type",
"urls", "urls",
"locations" "locations"
] ],
"description": "License represents software license information discovered for a package, including SPDX expressions and supporting evidence locations."
}, },
"LinuxKernelArchive": { "LinuxKernelArchive": {
"properties": { "properties": {
@ -2151,64 +2223,84 @@
"LinuxRelease": { "LinuxRelease": {
"properties": { "properties": {
"prettyName": { "prettyName": {
"type": "string" "type": "string",
"description": "PrettyName is a human-readable operating system name with version."
}, },
"name": { "name": {
"type": "string" "type": "string",
"description": "Name is the operating system name without version information."
}, },
"id": { "id": {
"type": "string" "type": "string",
"description": "ID is the lower-case operating system identifier (e.g., \"ubuntu\", \"rhel\")."
}, },
"idLike": { "idLike": {
"$ref": "#/$defs/IDLikes" "$ref": "#/$defs/IDLikes",
"description": "IDLike is a list of operating system IDs this distribution is similar to or derived from."
}, },
"version": { "version": {
"type": "string" "type": "string",
"description": "Version is the operating system version including codename if available."
}, },
"versionID": { "versionID": {
"type": "string" "type": "string",
"description": "VersionID is the operating system version number or identifier."
}, },
"versionCodename": { "versionCodename": {
"type": "string" "type": "string",
"description": "VersionCodename is the operating system release codename (e.g., \"jammy\", \"bullseye\")."
}, },
"buildID": { "buildID": {
"type": "string" "type": "string",
"description": "BuildID is a build identifier for the operating system."
}, },
"imageID": { "imageID": {
"type": "string" "type": "string",
"description": "ImageID is an identifier for container or cloud images."
}, },
"imageVersion": { "imageVersion": {
"type": "string" "type": "string",
"description": "ImageVersion is the version for container or cloud images."
}, },
"variant": { "variant": {
"type": "string" "type": "string",
"description": "Variant is the operating system variant name (e.g., \"Server\", \"Workstation\")."
}, },
"variantID": { "variantID": {
"type": "string" "type": "string",
"description": "VariantID is the lower-case operating system variant identifier."
}, },
"homeURL": { "homeURL": {
"type": "string" "type": "string",
"description": "HomeURL is the homepage URL for the operating system."
}, },
"supportURL": { "supportURL": {
"type": "string" "type": "string",
"description": "SupportURL is the support or help URL for the operating system."
}, },
"bugReportURL": { "bugReportURL": {
"type": "string" "type": "string",
"description": "BugReportURL is the bug reporting URL for the operating system."
}, },
"privacyPolicyURL": { "privacyPolicyURL": {
"type": "string" "type": "string",
"description": "PrivacyPolicyURL is the privacy policy URL for the operating system."
}, },
"cpeName": { "cpeName": {
"type": "string" "type": "string",
"description": "CPEName is the Common Platform Enumeration name for the operating system."
}, },
"supportEnd": { "supportEnd": {
"type": "string" "type": "string",
"description": "SupportEnd is the end of support date or version identifier."
}, },
"extendedSupport": { "extendedSupport": {
"type": "boolean" "type": "boolean",
"description": "ExtendedSupport indicates whether extended security or support is available."
} }
}, },
"type": "object" "type": "object",
"description": "LinuxRelease contains Linux distribution identification and version information extracted from /etc/os-release or similar system files."
}, },
"Location": { "Location": {
"properties": { "properties": {
@ -2304,7 +2396,7 @@
"product_id", "product_id",
"kb" "kb"
], ],
"description": "MicrosoftKbPatch is slightly odd in how it is expected to map onto data." "description": "MicrosoftKbPatch represents a Windows Knowledge Base patch identifier associated with a specific Microsoft product from the MSRC (Microsoft Security Response Center)."
}, },
"NixDerivation": { "NixDerivation": {
"properties": { "properties": {
@ -2574,6 +2666,9 @@
{ {
"$ref": "#/$defs/JavascriptNpmPackageLockEntry" "$ref": "#/$defs/JavascriptNpmPackageLockEntry"
}, },
{
"$ref": "#/$defs/JavascriptPnpmLockEntry"
},
{ {
"$ref": "#/$defs/JavascriptYarnLockEntry" "$ref": "#/$defs/JavascriptYarnLockEntry"
}, },
@ -3025,6 +3120,19 @@
], ],
"description": "PhpPeclEntry represents a single package entry found within php pecl metadata files." "description": "PhpPeclEntry represents a single package entry found within php pecl metadata files."
}, },
"PnpmLockResolution": {
"properties": {
"integrity": {
"type": "string",
"description": "Integrity is Subresource Integrity hash for verification (SRI format)"
}
},
"type": "object",
"required": [
"integrity"
],
"description": "PnpmLockResolution contains package resolution metadata from pnpm lockfiles, including the integrity hash used for verification."
},
"PortageDbEntry": { "PortageDbEntry": {
"properties": { "properties": {
"installedSize": { "installedSize": {
@ -3510,22 +3618,28 @@
"Relationship": { "Relationship": {
"properties": { "properties": {
"parent": { "parent": {
"type": "string" "type": "string",
"description": "Parent is the ID of the parent artifact in this relationship."
}, },
"child": { "child": {
"type": "string" "type": "string",
"description": "Child is the ID of the child artifact in this relationship."
}, },
"type": { "type": {
"type": "string" "type": "string",
"description": "Type is the relationship type (e.g., \"contains\", \"dependency-of\", \"ancestor-of\")."
}, },
"metadata": true "metadata": {
"description": "Metadata contains additional relationship-specific metadata."
}
}, },
"type": "object", "type": "object",
"required": [ "required": [
"parent", "parent",
"child", "child",
"type" "type"
] ],
"description": "Relationship represents a directed relationship between two artifacts in the SBOM, such as package-contains-file or package-depends-on-package."
}, },
"RpmArchive": { "RpmArchive": {
"properties": { "properties": {
@ -3872,17 +3986,20 @@
"Schema": { "Schema": {
"properties": { "properties": {
"version": { "version": {
"type": "string" "type": "string",
"description": "Version is the JSON schema version for this document format."
}, },
"url": { "url": {
"type": "string" "type": "string",
"description": "URL is the URL to the JSON schema definition document."
} }
}, },
"type": "object", "type": "object",
"required": [ "required": [
"version", "version",
"url" "url"
] ],
"description": "Schema specifies the JSON schema version and URL reference that defines the structure and validation rules for this document format."
}, },
"SnapEntry": { "SnapEntry": {
"properties": { "properties": {
@ -3920,21 +4037,28 @@
"Source": { "Source": {
"properties": { "properties": {
"id": { "id": {
"type": "string" "type": "string",
"description": "ID is a unique identifier for the analyzed source artifact."
}, },
"name": { "name": {
"type": "string" "type": "string",
"description": "Name is the name of the analyzed artifact (e.g., image name, directory path)."
}, },
"version": { "version": {
"type": "string" "type": "string",
"description": "Version is the version of the analyzed artifact (e.g., image tag)."
}, },
"supplier": { "supplier": {
"type": "string" "type": "string",
"description": "Supplier is supplier information, which can be user-provided for NTIA minimum elements compliance."
}, },
"type": { "type": {
"type": "string" "type": "string",
"description": "Type is the source type (e.g., \"image\", \"directory\", \"file\")."
}, },
"metadata": true "metadata": {
"description": "Metadata contains additional source-specific metadata."
}
}, },
"type": "object", "type": "object",
"required": [ "required": [
@ -3944,7 +4068,7 @@
"type", "type",
"metadata" "metadata"
], ],
"description": "Instead, the Supplier can be determined by the user of syft and passed as a config or flag to help fulfill the NTIA minimum elements." "description": "Source represents the artifact that was analyzed to generate this SBOM, such as a container image, directory, or file archive."
}, },
"SwiftPackageManagerLockEntry": { "SwiftPackageManagerLockEntry": {
"properties": { "properties": {

View File

@ -0,0 +1,95 @@
package cpes
import (
"bufio"
"errors"
"fmt"
"io"
"strings"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/format/internal"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/sbom"
)
const ID sbom.FormatID = "cpes"
const version = "1"
var _ sbom.FormatDecoder = (*decoder)(nil)
type decoder struct{}
func NewFormatDecoder() sbom.FormatDecoder {
return decoder{}
}
func (d decoder) Decode(r io.Reader) (*sbom.SBOM, sbom.FormatID, string, error) {
if r == nil {
return nil, "", "", fmt.Errorf("no reader provided")
}
s, err := toSyftModel(r)
return s, ID, version, err
}
func (d decoder) Identify(r io.Reader) (sbom.FormatID, string) {
if r == nil {
return "", ""
}
scanner := bufio.NewScanner(r)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line == "" {
// skip whitespace only lines
continue
}
err := cpe.ValidateString(line)
if err != nil {
return "", ""
}
return ID, version
}
return "", ""
}
func toSyftModel(r io.Reader) (*sbom.SBOM, error) {
var errs []error
pkgs := pkg.NewCollection()
scanner := bufio.NewScanner(r)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line == "" {
continue
}
// skip invalid CPEs
c, err := cpe.New(line, "")
if err != nil {
log.WithFields("error", err, "line", line).Debug("unable to parse cpe")
continue
}
p := pkg.Package{
Name: c.Attributes.Product,
Version: c.Attributes.Version,
CPEs: []cpe.CPE{c},
}
internal.Backfill(&p)
p.SetID()
pkgs.Add(p)
}
return &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkgs,
},
}, errors.Join(errs...)
}

View File

@ -0,0 +1,171 @@
package cpes
import (
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/stretchr/testify/require"
"github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/sbom"
)
func Test_CPEProvider(t *testing.T) {
tests := []struct {
name string
userInput string
sbom *sbom.SBOM
}{
{
name: "takes a single cpe",
userInput: "cpe:/a:apache:log4j:2.14.1",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(pkg.Package{
Name: "log4j",
Version: "2.14.1",
CPEs: []cpe.CPE{
cpe.Must("cpe:/a:apache:log4j:2.14.1", ""),
},
}),
},
},
},
{
name: "takes multiple cpes",
userInput: `cpe:/a:apache:log4j:2.14.1
cpe:2.3:a:f5:nginx:*:*:*:*:*:*:*:*;
cpe:2.3:a:f5:nginx:0.5.2:*:*:*:*:*:*:*;
cpe:2.3:a:f5:nginx:0.5.3:*:*:*:*:*:*:*;`,
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(
pkg.Package{
Name: "log4j",
Version: "2.14.1",
CPEs: []cpe.CPE{
cpe.Must("cpe:/a:apache:log4j:2.14.1", ""),
},
},
pkg.Package{
Name: "nginx",
Version: "",
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:f5:nginx:*:*:*:*:*:*:*:*;", ""),
},
},
pkg.Package{
Name: "nginx",
Version: "0.5.2",
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:f5:nginx:0.5.2:*:*:*:*:*:*:*;", ""),
},
},
pkg.Package{
Name: "nginx",
Version: "0.5.3",
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:f5:nginx:0.5.3:*:*:*:*:*:*:*;", ""),
},
},
),
},
},
},
{
name: "takes cpe with no version",
userInput: "cpe:/a:apache:log4j",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(pkg.Package{
Name: "log4j",
CPEs: []cpe.CPE{
cpe.Must("cpe:/a:apache:log4j", ""),
},
}),
},
},
},
{
name: "takes CPE 2.3 format",
userInput: "cpe:2.3:a:apache:log4j:2.14.1:*:*:*:*:*:*:*",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(pkg.Package{
Name: "log4j",
Version: "2.14.1",
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:apache:log4j:2.14.1:*:*:*:*:*:*:*", ""),
},
}),
},
},
},
{
name: "deduces target SW from CPE - known target_sw",
userInput: "cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(pkg.Package{
Name: "opensearch",
Type: pkg.GemPkg,
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*", ""),
},
}),
},
},
},
{
name: "handles unknown target_sw CPE field",
userInput: "cpe:2.3:a:amazon:opensearch:*:*:*:*:*:loremipsum:*:*",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(pkg.Package{
Name: "opensearch",
Type: "",
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:loremipsum:*:*", ""),
},
}),
},
},
},
{
name: "invalid prefix",
userInput: "dir:test-fixtures/cpe",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(),
},
},
},
}
syftPkgOpts := []cmp.Option{
cmpopts.IgnoreFields(pkg.Package{}, "id", "Language"),
cmpopts.IgnoreUnexported(pkg.Package{}, file.LocationSet{}, pkg.LicenseSet{}),
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
dec := NewFormatDecoder()
decodedSBOM, _, _, err := dec.Decode(strings.NewReader(tc.userInput))
require.NoError(t, err)
gotSyftPkgs := decodedSBOM.Artifacts.Packages.Sorted()
wantSyftPkgs := tc.sbom.Artifacts.Packages.Sorted()
require.Equal(t, len(gotSyftPkgs), len(wantSyftPkgs))
for idx, wantPkg := range wantSyftPkgs {
if d := cmp.Diff(wantPkg, gotSyftPkgs[idx], syftPkgOpts...); d != "" {
t.Errorf("unexpected Syft Package (-want +got):\n%s", d)
}
}
})
}
}

View File

@ -3,6 +3,7 @@ package format
import ( import (
"io" "io"
"github.com/anchore/syft/syft/format/cpes"
"github.com/anchore/syft/syft/format/cyclonedxjson" "github.com/anchore/syft/syft/format/cyclonedxjson"
"github.com/anchore/syft/syft/format/cyclonedxxml" "github.com/anchore/syft/syft/format/cyclonedxxml"
"github.com/anchore/syft/syft/format/purls" "github.com/anchore/syft/syft/format/purls"
@ -26,6 +27,7 @@ func Decoders() []sbom.FormatDecoder {
spdxtagvalue.NewFormatDecoder(), spdxtagvalue.NewFormatDecoder(),
spdxjson.NewFormatDecoder(), spdxjson.NewFormatDecoder(),
purls.NewFormatDecoder(), purls.NewFormatDecoder(),
cpes.NewFormatDecoder(),
} }
} }

View File

@ -87,6 +87,9 @@ func toGithubManifests(s *sbom.SBOM) Manifests {
} }
name := dependencyName(p) name := dependencyName(p)
if name == "" || p.PURL == "" {
continue
}
manifest.Resolved[name] = DependencyNode{ manifest.Resolved[name] = DependencyNode{
PackageURL: p.PURL, PackageURL: p.PURL,
Metadata: toDependencyMetadata(p), Metadata: toDependencyMetadata(p),

View File

@ -16,11 +16,6 @@
"source_location": "redacted/some/path/some/path/pkg1" "source_location": "redacted/some/path/some/path/pkg1"
}, },
"resolved": { "resolved": {
"": {
"package_url": "a-purl-2",
"relationship": "direct",
"scope": "runtime"
},
"pkg:deb/debian/package-2@2.0.1": { "pkg:deb/debian/package-2@2.0.1": {
"package_url": "pkg:deb/debian/package-2@2.0.1", "package_url": "pkg:deb/debian/package-2@2.0.1",
"relationship": "direct", "relationship": "direct",

View File

@ -17,13 +17,6 @@
}, },
"metadata": { "metadata": {
"syft:filesystem":"redacted" "syft:filesystem":"redacted"
},
"resolved": {
"": {
"package_url": "a-purl-1",
"relationship": "direct",
"scope": "runtime"
}
} }
}, },
"user-image-input:/somefile-2.txt": { "user-image-input:/somefile-2.txt": {

View File

@ -10,13 +10,31 @@ import (
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/cpe" "github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
cataloger "github.com/anchore/syft/syft/pkg/cataloger/common/cpe"
) )
// Backfill takes all information present in the package and attempts to fill in any missing information // Backfill takes all information present in the package and attempts to fill in any missing information
// from any available sources, such as the Metadata and PURL. // from any available sources, such as the Metadata, PURL, or CPEs.
// //
// Backfill does not call p.SetID(), but this needs to be called later to ensure it's up to date // Backfill does not call p.SetID(), but this needs to be called later to ensure it's up to date
func Backfill(p *pkg.Package) { func Backfill(p *pkg.Package) {
backfillFromPurl(p)
backfillFromCPE(p)
}
func backfillFromCPE(p *pkg.Package) {
if len(p.CPEs) == 0 {
return
}
c := p.CPEs[0]
if p.Type == "" {
p.Type = cataloger.TargetSoftwareToPackageType(c.Attributes.TargetSW)
}
}
func backfillFromPurl(p *pkg.Package) {
if p.PURL == "" { if p.PURL == "" {
return return
} }
@ -29,6 +47,7 @@ func Backfill(p *pkg.Package) {
var cpes []cpe.CPE var cpes []cpe.CPE
epoch := "" epoch := ""
rpmmod := ""
for _, qualifier := range purl.Qualifiers { for _, qualifier := range purl.Qualifiers {
switch qualifier.Key { switch qualifier.Key {
@ -44,6 +63,8 @@ func Backfill(p *pkg.Package) {
} }
case pkg.PURLQualifierEpoch: case pkg.PURLQualifierEpoch:
epoch = qualifier.Value epoch = qualifier.Value
case pkg.PURLQualifierRpmModularity:
rpmmod = qualifier.Value
} }
} }
@ -63,6 +84,10 @@ func Backfill(p *pkg.Package) {
setJavaMetadataFromPurl(p, purl) setJavaMetadataFromPurl(p, purl)
} }
if p.Type == pkg.RpmPkg {
setRpmMetadataFromPurl(p, rpmmod)
}
for _, c := range cpes { for _, c := range cpes {
if slices.Contains(p.CPEs, c) { if slices.Contains(p.CPEs, c) {
continue continue
@ -82,6 +107,35 @@ func setJavaMetadataFromPurl(p *pkg.Package, _ packageurl.PackageURL) {
} }
} }
func setRpmMetadataFromPurl(p *pkg.Package, rpmmod string) {
if p.Type != pkg.RpmPkg {
return
}
if rpmmod == "" {
return
}
if p.Metadata == nil {
p.Metadata = pkg.RpmDBEntry{
ModularityLabel: &rpmmod,
}
return
}
switch m := p.Metadata.(type) {
case pkg.RpmDBEntry:
if m.ModularityLabel == nil {
m.ModularityLabel = &rpmmod
p.Metadata = m
}
case pkg.RpmArchive:
if m.ModularityLabel == nil {
m.ModularityLabel = &rpmmod
p.Metadata = m
}
}
}
func setVersionFromPurl(p *pkg.Package, purl packageurl.PackageURL, epoch string) { func setVersionFromPurl(p *pkg.Package, purl packageurl.PackageURL, epoch string) {
if p.Version == "" { if p.Version == "" {
p.Version = purl.Version p.Version = purl.Version

View File

@ -53,6 +53,21 @@ func Test_Backfill(t *testing.T) {
Version: "1:1.12.8-26.el8", Version: "1:1.12.8-26.el8",
}, },
}, },
{
name: "rpm with rpmmod",
in: pkg.Package{
PURL: "pkg:rpm/redhat/httpd@2.4.37-51?arch=x86_64&distro=rhel-8.7&rpmmod=httpd:2.4",
},
expected: pkg.Package{
PURL: "pkg:rpm/redhat/httpd@2.4.37-51?arch=x86_64&distro=rhel-8.7&rpmmod=httpd:2.4",
Type: pkg.RpmPkg,
Name: "httpd",
Version: "2.4.37-51",
Metadata: pkg.RpmDBEntry{
ModularityLabel: strRef("httpd:2.4"),
},
},
},
{ {
name: "bad cpe", name: "bad cpe",
in: pkg.Package{ in: pkg.Package{
@ -106,6 +121,20 @@ func Test_Backfill(t *testing.T) {
Metadata: pkg.JavaArchive{}, Metadata: pkg.JavaArchive{},
}, },
}, },
{
name: "target-sw from CPE",
in: pkg.Package{
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*", ""),
},
},
expected: pkg.Package{
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*", ""),
},
Type: pkg.GemPkg,
},
},
} }
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
@ -171,3 +200,7 @@ func Test_nameFromPurl(t *testing.T) {
}) })
} }
} }
func strRef(s string) *string {
return &s
}

View File

@ -40,6 +40,7 @@ func Test_OriginatorSupplier(t *testing.T) {
pkg.PhpComposerInstalledEntry{}, pkg.PhpComposerInstalledEntry{},
pkg.PhpPearEntry{}, pkg.PhpPearEntry{},
pkg.PhpPeclEntry{}, pkg.PhpPeclEntry{},
pkg.PnpmLockEntry{},
pkg.PortageEntry{}, pkg.PortageEntry{},
pkg.PythonPipfileLockEntry{}, pkg.PythonPipfileLockEntry{},
pkg.PythonPdmLockEntry{}, pkg.PythonPdmLockEntry{},

View File

@ -83,7 +83,7 @@ func SourceInfo(p pkg.Package) string {
case pkg.TerraformPkg: case pkg.TerraformPkg:
answer = "acquired package info from Terraform dependency lock file" answer = "acquired package info from Terraform dependency lock file"
case pkg.ModelPkg: case pkg.ModelPkg:
answer = "acquired package info from AI artifact (e.g. GGUF File" answer = "acquired package info from AI artifact (e.g. GGUF File)"
default: default:
answer = "acquired package info from the following paths" answer = "acquired package info from the following paths"
} }

View File

@ -35,14 +35,23 @@ func (d *Document) UnmarshalJSON(data []byte) error {
return nil return nil
} }
// Descriptor describes what created the document as well as surrounding metadata // Descriptor identifies the tool that generated this SBOM document, including its name, version, and configuration used during catalog generation.
type Descriptor struct { type Descriptor struct {
// Name is the name of the tool that generated this SBOM (e.g., "syft").
Name string `json:"name"` Name string `json:"name"`
// Version is the version of the tool that generated this SBOM.
Version string `json:"version"` Version string `json:"version"`
// Configuration contains the tool configuration used during SBOM generation.
Configuration interface{} `json:"configuration,omitempty"` Configuration interface{} `json:"configuration,omitempty"`
} }
// Schema specifies the JSON schema version and URL reference that defines the structure and validation rules for this document format.
type Schema struct { type Schema struct {
// Version is the JSON schema version for this document format.
Version string `json:"version"` Version string `json:"version"`
// URL is the URL to the JSON schema definition document.
URL string `json:"url"` URL string `json:"url"`
} }

View File

@ -10,24 +10,54 @@ import (
"github.com/anchore/syft/syft/license" "github.com/anchore/syft/syft/license"
) )
// File represents a file discovered during cataloging with its metadata, content digests, licenses, and relationships to packages.
type File struct { type File struct {
// ID is a unique identifier for this file within the SBOM.
ID string `json:"id"` ID string `json:"id"`
// Location is the file path and layer information where this file was found.
Location file.Coordinates `json:"location"` Location file.Coordinates `json:"location"`
// Metadata contains filesystem metadata such as permissions, ownership, and file type.
Metadata *FileMetadataEntry `json:"metadata,omitempty"` Metadata *FileMetadataEntry `json:"metadata,omitempty"`
// Contents is the file contents for small files.
Contents string `json:"contents,omitempty"` Contents string `json:"contents,omitempty"`
// Digests contains cryptographic hashes of the file contents.
Digests []file.Digest `json:"digests,omitempty"` Digests []file.Digest `json:"digests,omitempty"`
// Licenses contains license information discovered within this file.
Licenses []FileLicense `json:"licenses,omitempty"` Licenses []FileLicense `json:"licenses,omitempty"`
// Executable contains executable metadata if this file is a binary.
Executable *file.Executable `json:"executable,omitempty"` Executable *file.Executable `json:"executable,omitempty"`
// Unknowns contains unknown fields for forward compatibility.
Unknowns []string `json:"unknowns,omitempty"` Unknowns []string `json:"unknowns,omitempty"`
} }
// FileMetadataEntry contains filesystem-level metadata attributes such as permissions, ownership, type, and size for a cataloged file.
type FileMetadataEntry struct { type FileMetadataEntry struct {
// Mode is the Unix file permission mode in octal format.
Mode int `json:"mode"` Mode int `json:"mode"`
// Type is the file type (e.g., "RegularFile", "Directory", "SymbolicLink").
Type string `json:"type"` Type string `json:"type"`
// LinkDestination is the target path for symbolic links.
LinkDestination string `json:"linkDestination,omitempty"` LinkDestination string `json:"linkDestination,omitempty"`
// UserID is the file owner user ID.
UserID int `json:"userID"` UserID int `json:"userID"`
// GroupID is the file owner group ID.
GroupID int `json:"groupID"` GroupID int `json:"groupID"`
// MIMEType is the MIME type of the file contents.
MIMEType string `json:"mimeType"` MIMEType string `json:"mimeType"`
// Size is the file size in bytes.
Size int64 `json:"size"` Size int64 `json:"size"`
} }
@ -82,16 +112,30 @@ type sbomImportLegacyFileMetadataEntry struct {
Size int64 `json:"Size"` Size int64 `json:"Size"`
} }
// FileLicense represents license information discovered within a file's contents or metadata, including the matched license text and SPDX expression.
type FileLicense struct { type FileLicense struct {
// Value is the raw license identifier or text as found in the file.
Value string `json:"value"` Value string `json:"value"`
// SPDXExpression is the parsed SPDX license expression.
SPDXExpression string `json:"spdxExpression"` SPDXExpression string `json:"spdxExpression"`
// Type is the license type classification (e.g., declared, concluded, discovered).
Type license.Type `json:"type"` Type license.Type `json:"type"`
// Evidence contains supporting evidence for this license detection.
Evidence *FileLicenseEvidence `json:"evidence,omitempty"` Evidence *FileLicenseEvidence `json:"evidence,omitempty"`
} }
// FileLicenseEvidence contains supporting evidence for a license detection in a file, including the byte offset, extent, and confidence level.
type FileLicenseEvidence struct { type FileLicenseEvidence struct {
// Confidence is the confidence score for this license detection (0-100).
Confidence int `json:"confidence"` Confidence int `json:"confidence"`
// Offset is the byte offset where the license text starts in the file.
Offset int `json:"offset"` Offset int `json:"offset"`
// Extent is the length of the license text in bytes.
Extent int `json:"extent"` Extent int `json:"extent"`
} }

View File

@ -4,27 +4,66 @@ import (
"encoding/json" "encoding/json"
) )
// IDLikes represents a list of distribution IDs that this Linux distribution is similar to or derived from, as defined in os-release ID_LIKE field.
type IDLikes []string type IDLikes []string
// LinuxRelease contains Linux distribution identification and version information extracted from /etc/os-release or similar system files.
type LinuxRelease struct { type LinuxRelease struct {
// PrettyName is a human-readable operating system name with version.
PrettyName string `json:"prettyName,omitempty"` PrettyName string `json:"prettyName,omitempty"`
// Name is the operating system name without version information.
Name string `json:"name,omitempty"` Name string `json:"name,omitempty"`
// ID is the lower-case operating system identifier (e.g., "ubuntu", "rhel").
ID string `json:"id,omitempty"` ID string `json:"id,omitempty"`
// IDLike is a list of operating system IDs this distribution is similar to or derived from.
IDLike IDLikes `json:"idLike,omitempty"` IDLike IDLikes `json:"idLike,omitempty"`
// Version is the operating system version including codename if available.
Version string `json:"version,omitempty"` Version string `json:"version,omitempty"`
// VersionID is the operating system version number or identifier.
VersionID string `json:"versionID,omitempty"` VersionID string `json:"versionID,omitempty"`
// VersionCodename is the operating system release codename (e.g., "jammy", "bullseye").
VersionCodename string `json:"versionCodename,omitempty"` VersionCodename string `json:"versionCodename,omitempty"`
// BuildID is a build identifier for the operating system.
BuildID string `json:"buildID,omitempty"` BuildID string `json:"buildID,omitempty"`
// ImageID is an identifier for container or cloud images.
ImageID string `json:"imageID,omitempty"` ImageID string `json:"imageID,omitempty"`
// ImageVersion is the version for container or cloud images.
ImageVersion string `json:"imageVersion,omitempty"` ImageVersion string `json:"imageVersion,omitempty"`
// Variant is the operating system variant name (e.g., "Server", "Workstation").
Variant string `json:"variant,omitempty"` Variant string `json:"variant,omitempty"`
// VariantID is the lower-case operating system variant identifier.
VariantID string `json:"variantID,omitempty"` VariantID string `json:"variantID,omitempty"`
// HomeURL is the homepage URL for the operating system.
HomeURL string `json:"homeURL,omitempty"` HomeURL string `json:"homeURL,omitempty"`
// SupportURL is the support or help URL for the operating system.
SupportURL string `json:"supportURL,omitempty"` SupportURL string `json:"supportURL,omitempty"`
// BugReportURL is the bug reporting URL for the operating system.
BugReportURL string `json:"bugReportURL,omitempty"` BugReportURL string `json:"bugReportURL,omitempty"`
// PrivacyPolicyURL is the privacy policy URL for the operating system.
PrivacyPolicyURL string `json:"privacyPolicyURL,omitempty"` PrivacyPolicyURL string `json:"privacyPolicyURL,omitempty"`
// CPEName is the Common Platform Enumeration name for the operating system.
CPEName string `json:"cpeName,omitempty"` CPEName string `json:"cpeName,omitempty"`
// SupportEnd is the end of support date or version identifier.
SupportEnd string `json:"supportEnd,omitempty"` SupportEnd string `json:"supportEnd,omitempty"`
// ExtendedSupport indicates whether extended security or support is available.
ExtendedSupport bool `json:"extendedSupport,omitempty"` ExtendedSupport bool `json:"extendedSupport,omitempty"`
} }

View File

@ -36,21 +36,39 @@ type PackageBasicData struct {
PURL string `json:"purl"` PURL string `json:"purl"`
} }
// cpes is a collection of Common Platform Enumeration identifiers for a package.
type cpes []CPE type cpes []CPE
// CPE represents a Common Platform Enumeration identifier used for matching packages to known vulnerabilities in security databases.
type CPE struct { type CPE struct {
// Value is the CPE string identifier.
Value string `json:"cpe"` Value string `json:"cpe"`
// Source is the source where this CPE was obtained or generated from.
Source string `json:"source,omitempty"` Source string `json:"source,omitempty"`
} }
// licenses is a collection of license findings associated with a package.
type licenses []License type licenses []License
// License represents software license information discovered for a package, including SPDX expressions and supporting evidence locations.
type License struct { type License struct {
// Value is the raw license identifier or expression as found.
Value string `json:"value"` Value string `json:"value"`
// SPDXExpression is the parsed SPDX license expression.
SPDXExpression string `json:"spdxExpression"` SPDXExpression string `json:"spdxExpression"`
// Type is the license type classification (e.g., declared, concluded, discovered).
Type license.Type `json:"type"` Type license.Type `json:"type"`
// URLs are URLs where license text or information can be found.
URLs []string `json:"urls"` URLs []string `json:"urls"`
// Locations are file locations where this license was discovered.
Locations []file.Location `json:"locations"` Locations []file.Location `json:"locations"`
// Contents is the full license text content.
Contents string `json:"contents,omitempty"` Contents string `json:"contents,omitempty"`
} }

View File

@ -1,8 +1,16 @@
package model package model
// Relationship represents a directed relationship between two artifacts in the SBOM, such as package-contains-file or package-depends-on-package.
type Relationship struct { type Relationship struct {
// Parent is the ID of the parent artifact in this relationship.
Parent string `json:"parent"` Parent string `json:"parent"`
// Child is the ID of the child artifact in this relationship.
Child string `json:"child"` Child string `json:"child"`
// Type is the relationship type (e.g., "contains", "dependency-of", "ancestor-of").
Type string `json:"type"` Type string `json:"type"`
// Metadata contains additional relationship-specific metadata.
Metadata interface{} `json:"metadata,omitempty"` Metadata interface{} `json:"metadata,omitempty"`
} }

View File

@ -11,18 +11,25 @@ import (
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
// Source object represents the thing that was cataloged // Source represents the artifact that was analyzed to generate this SBOM, such as a container image, directory, or file archive.
// Note: syft currently makes no claims or runs any logic to determine the Supplier field below // The Supplier field can be provided by users to fulfill NTIA minimum elements requirements.
// Instead, the Supplier can be determined by the user of syft and passed as a config or flag to help fulfill
// the NTIA minimum elements. For mor information see the NTIA framing document below
// https://www.ntia.gov/files/ntia/publications/framingsbom_20191112.pdf
type Source struct { type Source struct {
// ID is a unique identifier for the analyzed source artifact.
ID string `json:"id"` ID string `json:"id"`
// Name is the name of the analyzed artifact (e.g., image name, directory path).
Name string `json:"name"` Name string `json:"name"`
// Version is the version of the analyzed artifact (e.g., image tag).
Version string `json:"version"` Version string `json:"version"`
// Supplier is supplier information, which can be user-provided for NTIA minimum elements compliance.
Supplier string `json:"supplier,omitempty"` Supplier string `json:"supplier,omitempty"`
// Type is the source type (e.g., "image", "directory", "file").
Type string `json:"type"` Type string `json:"type"`
// Metadata contains additional source-specific metadata.
Metadata interface{} `json:"metadata"` Metadata interface{} `json:"metadata"`
} }

View File

@ -19,16 +19,16 @@ type Directory struct {
indexer *directoryIndexer indexer *directoryIndexer
} }
func NewFromDirectory(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) { func NewFromDirectory(root, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
r, err := newFromDirectoryWithoutIndex(root, base, pathFilters...) resolver, err := newFromDirectoryWithoutIndex(root, base, pathFilters...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return r, r.buildIndex() return resolver, resolver.buildIndex()
} }
func newFromDirectoryWithoutIndex(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) { func newFromDirectoryWithoutIndex(root, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
chroot, err := NewChrootContextFromCWD(root, base) chroot, err := NewChrootContextFromCWD(root, base)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to interpret chroot context: %w", err) return nil, fmt.Errorf("unable to interpret chroot context: %w", err)
@ -66,6 +66,6 @@ func (r *Directory) buildIndex() error {
} }
// Stringer to represent a directory path data source // Stringer to represent a directory path data source
func (r Directory) String() string { func (r *Directory) String() string {
return fmt.Sprintf("dir:%s", r.path) return fmt.Sprintf("dir:%s", r.path)
} }

View File

@ -17,17 +17,31 @@ type File struct {
indexer *fileIndexer indexer *fileIndexer
} }
// parent should be the symlink free absolute path to the parent directory // NewFromFile single file analyser
// path is the filepath of the file we're creating content access for // path is the filepath of the file we're creating content access for
func NewFromFile(parent, path string, pathFilters ...PathIndexVisitor) (*File, error) { func NewFromFile(path string, pathFilters ...PathIndexVisitor) (*File, error) {
chroot, err := NewChrootContextFromCWD(parent, parent) resolver, err := newFromFileWithoutIndex(path, pathFilters...)
if err != nil {
return nil, err
}
return resolver, resolver.buildIndex()
}
func newFromFileWithoutIndex(path string, pathFilters ...PathIndexVisitor) (*File, error) {
absParentDir, err := absoluteSymlinkFreePathToParent(path)
if err != nil {
return nil, err
}
chroot, err := NewChrootContextFromCWD(absParentDir, absParentDir)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to interpret chroot context: %w", err) return nil, fmt.Errorf("unable to interpret chroot context: %w", err)
} }
cleanBase := chroot.Base() cleanBase := chroot.Base()
file := &File{ return &File{
path: path, path: path,
FiletreeResolver: FiletreeResolver{ FiletreeResolver: FiletreeResolver{
Chroot: *chroot, Chroot: *chroot,
@ -36,9 +50,7 @@ func NewFromFile(parent, path string, pathFilters ...PathIndexVisitor) (*File, e
Opener: nativeOSFileOpener, Opener: nativeOSFileOpener,
}, },
indexer: newFileIndexer(path, cleanBase, pathFilters...), indexer: newFileIndexer(path, cleanBase, pathFilters...),
} }, nil
return file, file.buildIndex()
} }
func (r *File) buildIndex() error { func (r *File) buildIndex() error {
@ -58,6 +70,6 @@ func (r *File) buildIndex() error {
} }
// Stringer to represent a file path data source // Stringer to represent a file path data source
func (r File) String() string { func (r *File) String() string {
return fmt.Sprintf("file:%s", r.path) return fmt.Sprintf("file:%s", r.path)
} }

View File

@ -1384,9 +1384,10 @@ func TestFileResolver_FilesByPath(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, parentPath) require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, tt.filePath) resolver, err := NewFromFile(tt.filePath)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, resolver) require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
refs, err := resolver.FilesByPath(tt.fileByPathInput) refs, err := resolver.FilesByPath(tt.fileByPathInput)
require.NoError(t, err) require.NoError(t, err)
@ -1431,8 +1432,11 @@ func TestFileResolver_MultipleFilesByPath(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, parentPath) require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath) resolver, err := NewFromFile(filePath)
assert.NoError(t, err) assert.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
refs, err := resolver.FilesByPath(tt.input...) refs, err := resolver.FilesByPath(tt.input...)
assert.NoError(t, err) assert.NoError(t, err)
@ -1449,8 +1453,11 @@ func TestFileResolver_FilesByGlob(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, parentPath) require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath) resolver, err := NewFromFile(filePath)
assert.NoError(t, err) assert.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
refs, err := resolver.FilesByGlob("**/*.txt") refs, err := resolver.FilesByGlob("**/*.txt")
assert.NoError(t, err) assert.NoError(t, err)
@ -1476,8 +1483,11 @@ func Test_fileResolver_FilesByMIMEType(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, parentPath) require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath) resolver, err := NewFromFile(filePath)
assert.NoError(t, err) assert.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
locations, err := resolver.FilesByMIMEType(test.mimeType) locations, err := resolver.FilesByMIMEType(test.mimeType)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, test.expectedPaths.Size(), len(locations)) assert.Equal(t, test.expectedPaths.Size(), len(locations))
@ -1497,10 +1507,12 @@ func Test_fileResolver_FileContentsByLocation(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, parentPath) require.NotNil(t, parentPath)
r, err := NewFromFile(parentPath, filePath) resolver, err := NewFromFile(filePath)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
exists, existingPath, err := r.Tree.File(stereoscopeFile.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt"))) exists, existingPath, err := resolver.Tree.File(stereoscopeFile.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt")))
require.True(t, exists) require.True(t, exists)
require.NoError(t, err) require.NoError(t, err)
require.True(t, existingPath.HasReference()) require.True(t, existingPath.HasReference())
@ -1525,7 +1537,7 @@ func Test_fileResolver_FileContentsByLocation(t *testing.T) {
for _, test := range tests { for _, test := range tests {
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
actual, err := r.FileContentsByLocation(test.location) actual, err := resolver.FileContentsByLocation(test.location)
if test.err { if test.err {
require.Error(t, err) require.Error(t, err)
return return
@ -1546,8 +1558,11 @@ func TestFileResolver_AllLocations_errorOnDirRequest(t *testing.T) {
parentPath, err := absoluteSymlinkFreePathToParent(filePath) parentPath, err := absoluteSymlinkFreePathToParent(filePath)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, parentPath) require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath)
resolver, err := NewFromFile(filePath)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
var dirLoc *file.Location var dirLoc *file.Location
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())
@ -1575,8 +1590,11 @@ func TestFileResolver_AllLocations(t *testing.T) {
parentPath, err := absoluteSymlinkFreePathToParent(filePath) parentPath, err := absoluteSymlinkFreePathToParent(filePath)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, parentPath) require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath)
resolver, err := NewFromFile(filePath)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
paths := strset.New() paths := strset.New()
for loc := range resolver.AllLocations(context.Background()) { for loc := range resolver.AllLocations(context.Background()) {
@ -1600,8 +1618,11 @@ func Test_FileResolver_AllLocationsDoesNotLeakGoRoutine(t *testing.T) {
parentPath, err := absoluteSymlinkFreePathToParent(filePath) parentPath, err := absoluteSymlinkFreePathToParent(filePath)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, parentPath) require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath)
resolver, err := NewFromFile(filePath)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
require.NoError(t, err) require.NoError(t, err)
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())

View File

@ -4,9 +4,10 @@ import (
"os" "os"
"testing" "testing"
"github.com/anchore/stereoscope/pkg/file"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/anchore/stereoscope/pkg/file"
) )
func TestFileMetadataFromPath(t *testing.T) { func TestFileMetadataFromPath(t *testing.T) {

View File

@ -58,6 +58,7 @@ type AlpmDBEntry struct {
Depends []string `mapstructure:"depends" json:"depends,omitempty"` Depends []string `mapstructure:"depends" json:"depends,omitempty"`
} }
// AlpmFileRecord represents a single file entry within an Arch Linux package with its associated metadata tracked by pacman.
type AlpmFileRecord struct { type AlpmFileRecord struct {
// Path is the file path relative to the filesystem root // Path is the file path relative to the filesystem root
Path string `mapstruture:"path" json:"path,omitempty"` Path string `mapstruture:"path" json:"path,omitempty"`

2
syft/pkg/cataloger/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
# these are generated by pkgtest helpers, no need to check them in
**/test-fixtures/test-observations.json

View File

@ -6,8 +6,6 @@ import (
"testing" "testing"
"github.com/google/go-cmp/cmp/cmpopts" "github.com/google/go-cmp/cmp/cmpopts"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
@ -17,67 +15,23 @@ import (
func TestGGUFCataloger_Globs(t *testing.T) { func TestGGUFCataloger_Globs(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
setup func(t *testing.T) string // returns fixture directory fixture string
expected []string expected []string
}{ }{
{ {
name: "finds GGUF files in root", name: "obtain gguf files",
setup: func(t *testing.T) string { fixture: "test-fixtures/glob-paths",
dir := t.TempDir()
createTestGGUFInDir(t, dir, "model1.gguf")
createTestGGUFInDir(t, dir, "model2.gguf")
return dir
},
expected: []string{ expected: []string{
"model1.gguf", "models/model.gguf",
"model2.gguf",
},
},
{
name: "finds GGUF files in subdirectories",
setup: func(t *testing.T) string {
dir := t.TempDir()
modelsDir := filepath.Join(dir, "models")
os.MkdirAll(modelsDir, 0755)
createTestGGUFInDir(t, modelsDir, "llama.gguf")
deepDir := filepath.Join(dir, "deep", "nested", "path")
os.MkdirAll(deepDir, 0755)
createTestGGUFInDir(t, deepDir, "mistral.gguf")
return dir
},
expected: []string{
"models/llama.gguf",
"deep/nested/path/mistral.gguf",
},
},
{
name: "ignores non-GGUF files",
setup: func(t *testing.T) string {
dir := t.TempDir()
createTestGGUFInDir(t, dir, "model.gguf")
// Create non-GGUF files
os.WriteFile(filepath.Join(dir, "readme.txt"), []byte("readme"), 0644)
os.WriteFile(filepath.Join(dir, "model.bin"), []byte("binary"), 0644)
os.WriteFile(filepath.Join(dir, "config.json"), []byte("{}"), 0644)
return dir
},
expected: []string{
"model.gguf",
}, },
}, },
} }
for _, tt := range tests { for _, test := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
fixtureDir := tt.setup(t)
pkgtest.NewCatalogTester(). pkgtest.NewCatalogTester().
FromDirectory(t, fixtureDir). FromDirectory(t, test.fixture).
ExpectsResolverContentQueries(tt.expected). ExpectsResolverContentQueries(test.expected).
TestCataloger(t, NewGGUFCataloger()) TestCataloger(t, NewGGUFCataloger())
}) })
} }
@ -117,9 +71,7 @@ func TestGGUFCataloger_Integration(t *testing.T) {
pkg.NewLicenseFromFields("Apache-2.0", "", nil), pkg.NewLicenseFromFields("Apache-2.0", "", nil),
), ),
Metadata: pkg.GGUFFileHeader{ Metadata: pkg.GGUFFileHeader{
ModelFormat: "gguf",
ModelName: "llama3-8b", ModelName: "llama3-8b",
ModelVersion: "3.0",
License: "Apache-2.0", License: "Apache-2.0",
Architecture: "llama", Architecture: "llama",
Quantization: "Unknown", Quantization: "Unknown",
@ -127,104 +79,6 @@ func TestGGUFCataloger_Integration(t *testing.T) {
GGUFVersion: 3, GGUFVersion: 3,
TensorCount: 0, TensorCount: 0,
Header: map[string]interface{}{}, Header: map[string]interface{}{},
TruncatedHeader: false,
},
},
},
expectedRelationships: nil,
},
{
name: "catalog multiple GGUF files",
setup: func(t *testing.T) string {
dir := t.TempDir()
// Create first model
data1 := newTestGGUFBuilder().
withVersion(3).
withStringKV("general.architecture", "llama").
withStringKV("general.name", "model1").
withStringKV("general.version", "1.0").
build()
os.WriteFile(filepath.Join(dir, "model1.gguf"), data1, 0644)
// Create second model
data2 := newTestGGUFBuilder().
withVersion(3).
withStringKV("general.architecture", "mistral").
withStringKV("general.name", "model2").
withStringKV("general.version", "2.0").
build()
os.WriteFile(filepath.Join(dir, "model2.gguf"), data2, 0644)
return dir
},
expectedPackages: []pkg.Package{
{
Name: "model1",
Version: "1.0",
Type: pkg.ModelPkg,
Metadata: pkg.GGUFFileHeader{
ModelFormat: "gguf",
ModelName: "model1",
ModelVersion: "1.0",
Architecture: "llama",
Quantization: "Unknown",
GGUFVersion: 3,
TensorCount: 0,
Header: map[string]interface{}{},
TruncatedHeader: false,
},
},
{
Name: "model2",
Version: "2.0",
Type: pkg.ModelPkg,
Metadata: pkg.GGUFFileHeader{
ModelFormat: "gguf",
ModelName: "model2",
ModelVersion: "2.0",
Architecture: "mistral",
Quantization: "Unknown",
GGUFVersion: 3,
TensorCount: 0,
Header: map[string]interface{}{},
TruncatedHeader: false,
},
},
},
expectedRelationships: nil,
},
{
name: "catalog GGUF in nested directories",
setup: func(t *testing.T) string {
dir := t.TempDir()
nestedDir := filepath.Join(dir, "models", "quantized")
os.MkdirAll(nestedDir, 0755)
data := newTestGGUFBuilder().
withVersion(3).
withStringKV("general.architecture", "qwen").
withStringKV("general.name", "qwen-nested").
build()
os.WriteFile(filepath.Join(nestedDir, "qwen.gguf"), data, 0644)
return dir
},
expectedPackages: []pkg.Package{
{
Name: "qwen-nested",
Version: unknownGGUFData,
Type: pkg.ModelPkg,
Metadata: pkg.GGUFFileHeader{
ModelFormat: "gguf",
ModelName: "qwen-nested",
ModelVersion: unknownGGUFData,
Architecture: "qwen",
Quantization: "Unknown",
GGUFVersion: 3,
TensorCount: 0,
Header: map[string]interface{}{},
TruncatedHeader: false,
}, },
}, },
}, },
@ -243,131 +97,11 @@ func TestGGUFCataloger_Integration(t *testing.T) {
IgnoreLocationLayer(). IgnoreLocationLayer().
IgnorePackageFields("FoundBy", "Locations"). // These are set by the cataloger IgnorePackageFields("FoundBy", "Locations"). // These are set by the cataloger
WithCompareOptions( WithCompareOptions(
// Ignore Hash as it's computed dynamically // Ignore MetadataHash as it's computed dynamically
cmpopts.IgnoreFields(pkg.GGUFFileHeader{}, "Hash"), cmpopts.IgnoreFields(pkg.GGUFFileHeader{}, "MetadataHash"),
) )
tester.TestCataloger(t, NewGGUFCataloger()) tester.TestCataloger(t, NewGGUFCataloger())
}) })
} }
} }
func TestGGUFCataloger_SkipsInvalidFiles(t *testing.T) {
dir := t.TempDir()
// Create a valid GGUF
validData := newTestGGUFBuilder().
withVersion(3).
withStringKV("general.architecture", "llama").
withStringKV("general.name", "valid-model").
build()
os.WriteFile(filepath.Join(dir, "valid.gguf"), validData, 0644)
// Create an invalid GGUF (wrong magic)
invalidData := newTestGGUFBuilder().buildInvalidMagic()
os.WriteFile(filepath.Join(dir, "invalid.gguf"), invalidData, 0644)
// Create a truncated GGUF
os.WriteFile(filepath.Join(dir, "truncated.gguf"), []byte{0x47}, 0644)
// Catalog should succeed and only return the valid package
tester := pkgtest.NewCatalogTester().
FromDirectory(t, dir).
ExpectsAssertion(func(t *testing.T, pkgs []pkg.Package, _ []artifact.Relationship) {
// Should only find the valid model
require.Len(t, pkgs, 1)
assert.Equal(t, "valid-model", pkgs[0].Name)
})
tester.TestCataloger(t, NewGGUFCataloger())
}
func TestGGUFCataloger_Name(t *testing.T) {
cataloger := NewGGUFCataloger()
assert.Equal(t, "gguf-cataloger", cataloger.Name())
}
func TestGGUFCataloger_EmptyDirectory(t *testing.T) {
dir := t.TempDir()
// Create a subdirectory to ensure glob still runs
os.MkdirAll(filepath.Join(dir, "models"), 0755)
tester := pkgtest.NewCatalogTester().
FromDirectory(t, dir).
ExpectsAssertion(func(t *testing.T, pkgs []pkg.Package, rels []artifact.Relationship) {
assert.Empty(t, pkgs)
assert.Empty(t, rels)
})
tester.TestCataloger(t, NewGGUFCataloger())
}
func TestGGUFCataloger_MixedFiles(t *testing.T) {
dir := t.TempDir()
// Create GGUF file
ggufData := newTestGGUFBuilder().
withVersion(3).
withStringKV("general.architecture", "llama").
withStringKV("general.name", "test-model").
build()
os.WriteFile(filepath.Join(dir, "model.gguf"), ggufData, 0644)
// Create other file types
os.WriteFile(filepath.Join(dir, "README.md"), []byte("# Models"), 0644)
os.WriteFile(filepath.Join(dir, "config.json"), []byte("{}"), 0644)
os.WriteFile(filepath.Join(dir, "weights.bin"), []byte("weights"), 0644)
os.MkdirAll(filepath.Join(dir, "subdir"), 0755)
tester := pkgtest.NewCatalogTester().
FromDirectory(t, dir).
ExpectsAssertion(func(t *testing.T, pkgs []pkg.Package, _ []artifact.Relationship) {
// Should only find the GGUF model
require.Len(t, pkgs, 1)
assert.Equal(t, "test-model", pkgs[0].Name)
assert.Equal(t, pkg.ModelPkg, pkgs[0].Type)
})
tester.TestCataloger(t, NewGGUFCataloger())
}
func TestGGUFCataloger_CaseInsensitiveGlob(t *testing.T) {
// Test that the glob pattern is case-sensitive (as expected for **/*.gguf)
dir := t.TempDir()
// Create lowercase .gguf
data := newTestGGUFBuilder().
withVersion(3).
withStringKV("general.architecture", "llama").
withStringKV("general.name", "lowercase").
build()
os.WriteFile(filepath.Join(dir, "model.gguf"), data, 0644)
// Create uppercase .GGUF (should not match the glob)
os.WriteFile(filepath.Join(dir, "MODEL.GGUF"), data, 0644)
tester := pkgtest.NewCatalogTester().
FromDirectory(t, dir).
ExpectsAssertion(func(t *testing.T, pkgs []pkg.Package, _ []artifact.Relationship) {
// Depending on filesystem case-sensitivity, we may get 1 or 2 packages
// On case-insensitive filesystems (macOS), both might match
// On case-sensitive filesystems (Linux), only lowercase matches
assert.GreaterOrEqual(t, len(pkgs), 1, "should find at least the lowercase file")
})
tester.TestCataloger(t, NewGGUFCataloger())
}
// createTestGGUFInDir creates a minimal test GGUF file in the specified directory
func createTestGGUFInDir(t *testing.T, dir, filename string) {
t.Helper()
data := newTestGGUFBuilder().
withVersion(3).
withStringKV("general.architecture", "llama").
withStringKV("general.name", "test-model").
build()
path := filepath.Join(dir, filename)
err := os.WriteFile(path, data, 0644)
require.NoError(t, err)
}

View File

@ -1,24 +1,14 @@
package ai package ai
import ( import (
"crypto/sha256"
"encoding/json"
"fmt"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
) )
func newGGUFPackage(metadata *pkg.GGUFFileHeader, locations ...file.Location) pkg.Package { func newGGUFPackage(metadata *pkg.GGUFFileHeader, version string, locations ...file.Location) pkg.Package {
// Compute hash if not already set
if metadata.Hash == "" {
metadata.Hash = computeMetadataHash(metadata)
}
p := pkg.Package{ p := pkg.Package{
Name: metadata.ModelName, Name: metadata.ModelName,
Version: metadata.ModelVersion, Version: version,
Locations: file.NewLocationSet(locations...), Locations: file.NewLocationSet(locations...),
Type: pkg.ModelPkg, Type: pkg.ModelPkg,
Licenses: pkg.NewLicenseSet(), Licenses: pkg.NewLicenseSet(),
@ -36,34 +26,3 @@ func newGGUFPackage(metadata *pkg.GGUFFileHeader, locations ...file.Location) pk
return p return p
} }
// computeMetadataHash computes a stable hash of the metadata for use as a global identifier
func computeMetadataHash(metadata *pkg.GGUFFileHeader) string {
// Create a stable representation of the metadata
hashData := struct {
Format string
Name string
Version string
Architecture string
GGUFVersion uint32
TensorCount uint64
}{
Format: metadata.ModelFormat,
Name: metadata.ModelName,
Version: metadata.ModelVersion,
Architecture: metadata.Architecture,
GGUFVersion: metadata.GGUFVersion,
TensorCount: metadata.TensorCount,
}
// Marshal to JSON for stable hashing
jsonBytes, err := json.Marshal(hashData)
if err != nil {
log.Debugf("failed to marshal metadata for hashing: %v", err)
return ""
}
// Compute SHA256 hash
hash := sha256.Sum256(jsonBytes)
return fmt.Sprintf("%x", hash[:8]) // Use first 8 bytes (16 hex chars)
}

View File

@ -15,15 +15,15 @@ func TestNewGGUFPackage(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
metadata *pkg.GGUFFileHeader metadata *pkg.GGUFFileHeader
version string
locations []file.Location locations []file.Location
checkFunc func(t *testing.T, p pkg.Package) checkFunc func(t *testing.T, p pkg.Package)
}{ }{
{ {
name: "complete GGUF package with all fields", name: "complete GGUF package with all fields",
version: "3.0",
metadata: &pkg.GGUFFileHeader{ metadata: &pkg.GGUFFileHeader{
ModelFormat: "gguf",
ModelName: "llama3-8b-instruct", ModelName: "llama3-8b-instruct",
ModelVersion: "3.0",
License: "Apache-2.0", License: "Apache-2.0",
Architecture: "llama", Architecture: "llama",
Quantization: "Q4_K_M", Quantization: "Q4_K_M",
@ -31,7 +31,6 @@ func TestNewGGUFPackage(t *testing.T) {
GGUFVersion: 3, GGUFVersion: 3,
TensorCount: 291, TensorCount: 291,
Header: map[string]any{}, Header: map[string]any{},
TruncatedHeader: false,
}, },
locations: []file.Location{file.NewLocation("/models/llama3-8b.gguf")}, locations: []file.Location{file.NewLocation("/models/llama3-8b.gguf")},
checkFunc: func(t *testing.T, p pkg.Package) { checkFunc: func(t *testing.T, p pkg.Package) {
@ -54,10 +53,9 @@ func TestNewGGUFPackage(t *testing.T) {
}, },
{ {
name: "minimal GGUF package", name: "minimal GGUF package",
version: "1.0",
metadata: &pkg.GGUFFileHeader{ metadata: &pkg.GGUFFileHeader{
ModelFormat: "gguf",
ModelName: "simple-model", ModelName: "simple-model",
ModelVersion: "1.0",
Architecture: "gpt2", Architecture: "gpt2",
GGUFVersion: 3, GGUFVersion: 3,
TensorCount: 50, TensorCount: 50,
@ -79,10 +77,9 @@ func TestNewGGUFPackage(t *testing.T) {
}, },
{ {
name: "GGUF package with multiple locations", name: "GGUF package with multiple locations",
version: "1.5",
metadata: &pkg.GGUFFileHeader{ metadata: &pkg.GGUFFileHeader{
ModelFormat: "gguf",
ModelName: "multi-location-model", ModelName: "multi-location-model",
ModelVersion: "1.5",
Architecture: "llama", Architecture: "llama",
GGUFVersion: 3, GGUFVersion: 3,
TensorCount: 150, TensorCount: 150,
@ -99,12 +96,12 @@ func TestNewGGUFPackage(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
p := newGGUFPackage(tt.metadata, tt.locations...) p := newGGUFPackage(tt.metadata, tt.version, tt.locations...)
if d := cmp.Diff(tt.metadata.ModelName, p.Name); d != "" { if d := cmp.Diff(tt.metadata.ModelName, p.Name); d != "" {
t.Errorf("Name mismatch (-want +got):\n%s", d) t.Errorf("Name mismatch (-want +got):\n%s", d)
} }
if d := cmp.Diff(tt.metadata.ModelVersion, p.Version); d != "" { if d := cmp.Diff(tt.version, p.Version); d != "" {
t.Errorf("Version mismatch (-want +got):\n%s", d) t.Errorf("Version mismatch (-want +got):\n%s", d)
} }
if d := cmp.Diff(pkg.ModelPkg, p.Type); d != "" { if d := cmp.Diff(pkg.ModelPkg, p.Type); d != "" {

View File

@ -14,19 +14,14 @@ const (
maxHeaderSize = 50 * 1024 * 1024 // 50MB for large tokenizer vocabularies maxHeaderSize = 50 * 1024 * 1024 // 50MB for large tokenizer vocabularies
) )
// ggufHeaderReader reads just the header portion of a GGUF file efficiently
type ggufHeaderReader struct {
reader io.Reader
}
// readHeader reads only the GGUF header (metadata) without reading tensor data // readHeader reads only the GGUF header (metadata) without reading tensor data
// This is much more efficient than reading the entire file // This is much more efficient than reading the entire file
// The reader should be wrapped with io.LimitedReader to prevent OOM issues // The reader should be wrapped with io.LimitedReader to prevent OOM issues
func (r *ggufHeaderReader) readHeader() ([]byte, error) { func readHeader(r io.Reader) ([]byte, error) {
// Read initial chunk to determine header size // Read initial chunk to determine header size
// GGUF format: magic(4) + version(4) + tensor_count(8) + metadata_kv_count(8) + metadata_kvs + tensors_info // GGUF format: magic(4) + version(4) + tensor_count(8) + metadata_kv_count(8) + metadata_kvs + tensors_info
initialBuf := make([]byte, 24) // Enough for magic, version, tensor count, and kv count initialBuf := make([]byte, 24) // Enough for magic, version, tensor count, and kv count
if _, err := io.ReadFull(r.reader, initialBuf); err != nil { if _, err := io.ReadFull(r, initialBuf); err != nil {
return nil, fmt.Errorf("failed to read GGUF header prefix: %w", err) return nil, fmt.Errorf("failed to read GGUF header prefix: %w", err)
} }
@ -45,7 +40,7 @@ func (r *ggufHeaderReader) readHeader() ([]byte, error) {
// The LimitedReader will return EOF once maxHeaderSize is reached // The LimitedReader will return EOF once maxHeaderSize is reached
buf := make([]byte, 64*1024) // 64KB chunks buf := make([]byte, 64*1024) // 64KB chunks
for { for {
n, err := r.reader.Read(buf) n, err := r.Read(buf)
if n > 0 { if n > 0 {
headerData = append(headerData, buf[:n]...) headerData = append(headerData, buf[:n]...)
} }
@ -65,24 +60,14 @@ func (r *ggufHeaderReader) readHeader() ([]byte, error) {
func convertGGUFMetadataKVs(kvs gguf_parser.GGUFMetadataKVs) map[string]interface{} { func convertGGUFMetadataKVs(kvs gguf_parser.GGUFMetadataKVs) map[string]interface{} {
result := make(map[string]interface{}) result := make(map[string]interface{})
// Limit KV pairs to avoid bloat
const maxKVPairs = 200
count := 0
for _, kv := range kvs { for _, kv := range kvs {
if count >= maxKVPairs {
break
}
// Skip standard fields that are extracted separately // Skip standard fields that are extracted separately
switch kv.Key { switch kv.Key {
case "general.architecture", "general.name", "general.license", case "general.architecture", "general.name", "general.license",
"general.version", "general.parameter_count", "general.quantization": "general.version", "general.parameter_count", "general.quantization":
continue continue
} }
result[kv.Key] = kv.Value result[kv.Key] = kv.Value
count++
} }
return result return result

View File

@ -2,15 +2,19 @@ package ai
import ( import (
"context" "context"
"encoding/json"
"fmt" "fmt"
"io" "io"
"os" "os"
"path/filepath" "path/filepath"
"sort"
"strings" "strings"
"github.com/cespare/xxhash/v2"
gguf_parser "github.com/gpustack/gguf-parser-go" gguf_parser "github.com/gpustack/gguf-parser-go"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/internal/unknown"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
@ -18,8 +22,6 @@ import (
"github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
const unknownGGUFData = "unknown"
// parseGGUFModel parses a GGUF model file and returns the discovered package. // parseGGUFModel parses a GGUF model file and returns the discovered package.
// This implementation only reads the header portion of the file, not the entire model. // This implementation only reads the header portion of the file, not the entire model.
func parseGGUFModel(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { func parseGGUFModel(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
@ -28,8 +30,7 @@ func parseGGUFModel(_ context.Context, _ file.Resolver, _ *generic.Environment,
// Read and validate the GGUF file header using LimitedReader to prevent OOM // Read and validate the GGUF file header using LimitedReader to prevent OOM
// We use LimitedReader to cap reads at maxHeaderSize (50MB) // We use LimitedReader to cap reads at maxHeaderSize (50MB)
limitedReader := &io.LimitedReader{R: reader, N: maxHeaderSize} limitedReader := &io.LimitedReader{R: reader, N: maxHeaderSize}
headerReader := &ggufHeaderReader{reader: limitedReader} headerData, err := readHeader(limitedReader)
headerData, err := headerReader.readHeader()
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("failed to read GGUF header: %w", err) return nil, nil, fmt.Errorf("failed to read GGUF header: %w", err)
} }
@ -61,11 +62,12 @@ func parseGGUFModel(_ context.Context, _ file.Resolver, _ *generic.Environment,
// Extract metadata // Extract metadata
metadata := ggufFile.Metadata() metadata := ggufFile.Metadata()
// Extract version separately (will be set on Package.Version)
modelVersion := extractVersion(ggufFile.Header.MetadataKV)
// Convert to syft metadata structure // Convert to syft metadata structure
syftMetadata := &pkg.GGUFFileHeader{ syftMetadata := &pkg.GGUFFileHeader{
ModelFormat: "gguf",
ModelName: metadata.Name, ModelName: metadata.Name,
ModelVersion: extractVersion(ggufFile.Header.MetadataKV),
License: metadata.License, License: metadata.License,
Architecture: metadata.Architecture, Architecture: metadata.Architecture,
Quantization: metadata.FileTypeDescriptor, Quantization: metadata.FileTypeDescriptor,
@ -73,8 +75,7 @@ func parseGGUFModel(_ context.Context, _ file.Resolver, _ *generic.Environment,
GGUFVersion: uint32(ggufFile.Header.Version), GGUFVersion: uint32(ggufFile.Header.Version),
TensorCount: ggufFile.Header.TensorCount, TensorCount: ggufFile.Header.TensorCount,
Header: convertGGUFMetadataKVs(ggufFile.Header.MetadataKV), Header: convertGGUFMetadataKVs(ggufFile.Header.MetadataKV),
TruncatedHeader: false, // We read the full header MetadataHash: computeKVMetadataHash(ggufFile.Header.MetadataKV),
Hash: "", // Will be computed in newGGUFPackage
} }
// If model name is not in metadata, use filename // If model name is not in metadata, use filename
@ -82,20 +83,37 @@ func parseGGUFModel(_ context.Context, _ file.Resolver, _ *generic.Environment,
syftMetadata.ModelName = extractModelNameFromPath(reader.Path()) syftMetadata.ModelName = extractModelNameFromPath(reader.Path())
} }
// If version is still unknown, try to infer from name
if syftMetadata.ModelVersion == unknownGGUFData {
syftMetadata.ModelVersion = extractVersionFromName(syftMetadata.ModelName)
}
// Create package from metadata // Create package from metadata
p := newGGUFPackage( p := newGGUFPackage(
syftMetadata, syftMetadata,
modelVersion,
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
) )
return []pkg.Package{p}, nil, unknown.IfEmptyf([]pkg.Package{p}, "unable to parse GGUF file") return []pkg.Package{p}, nil, unknown.IfEmptyf([]pkg.Package{p}, "unable to parse GGUF file")
} }
// computeKVMetadataHash computes a stable hash of the KV metadata for use as a global identifier
func computeKVMetadataHash(metadata gguf_parser.GGUFMetadataKVs) string {
// Sort the KV pairs by key for stable hashing
sortedKVs := make([]gguf_parser.GGUFMetadataKV, len(metadata))
copy(sortedKVs, metadata)
sort.Slice(sortedKVs, func(i, j int) bool {
return sortedKVs[i].Key < sortedKVs[j].Key
})
// Marshal sorted KVs to JSON for stable hashing
jsonBytes, err := json.Marshal(sortedKVs)
if err != nil {
log.Debugf("failed to marshal metadata for hashing: %v", err)
return ""
}
// Compute xxhash
hash := xxhash.Sum64(jsonBytes)
return fmt.Sprintf("%016x", hash) // 16 hex chars (64 bits)
}
// extractVersion attempts to extract version from metadata KV pairs // extractVersion attempts to extract version from metadata KV pairs
func extractVersion(kvs gguf_parser.GGUFMetadataKVs) string { func extractVersion(kvs gguf_parser.GGUFMetadataKVs) string {
for _, kv := range kvs { for _, kv := range kvs {
@ -105,14 +123,7 @@ func extractVersion(kvs gguf_parser.GGUFMetadataKVs) string {
} }
} }
} }
return unknownGGUFData return ""
}
// extractVersionFromName tries to extract version from model name
func extractVersionFromName(_ string) string {
// Look for version patterns like "v1.0", "1.5b", "3.0", etc.
// For now, return unknown - this could be enhanced with regex
return unknownGGUFData
} }
// extractModelNameFromPath extracts the model name from the file path // extractModelNameFromPath extracts the model name from the file path

View File

@ -1403,6 +1403,22 @@ func Test_Cataloger_PositiveCases(t *testing.T) {
Metadata: metadata("ffmpeg-library"), Metadata: metadata("ffmpeg-library"),
}, },
}, },
{
logicalFixture: "elixir/1.19.1/linux-amd64",
expected: pkg.Package{
Name: "elixir",
Version: "1.19.1",
Type: "binary",
PURL: "pkg:generic/elixir@1.19.1",
Locations: locations("elixir", "lib/elixir/ebin/elixir.app"),
Metadata: pkg.BinarySignature{
Matches: []pkg.ClassifierMatch{
match("elixir-binary", "elixir"),
match("elixir-library", "lib/elixir/ebin/elixir.app"),
},
},
},
},
} }
for _, test := range tests { for _, test := range tests {

View File

@ -663,6 +663,26 @@ func DefaultClassifiers() []binutils.Classifier {
PURL: mustPURL("pkg:generic/ffmpeg@version"), PURL: mustPURL("pkg:generic/ffmpeg@version"),
CPEs: singleCPE("cpe:2.3:a:ffmpeg:ffmpeg:*:*:*:*:*:*:*:*", cpe.NVDDictionaryLookupSource), CPEs: singleCPE("cpe:2.3:a:ffmpeg:ffmpeg:*:*:*:*:*:*:*:*", cpe.NVDDictionaryLookupSource),
}, },
{
Class: "elixir-binary",
FileGlob: "**/elixir",
EvidenceMatcher: m.FileContentsVersionMatcher(
`(?m)ELIXIR_VERSION=(?P<version>[0-9]+\.[0-9]+\.[0-9]+)`),
Package: "elixir",
PURL: mustPURL("pkg:generic/elixir@version"),
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:elixir-lang:elixir:*:*:*:*:*:*:*:*", cpe.NVDDictionaryLookupSource),
},
},
{
Class: "elixir-library",
FileGlob: "**/elixir/ebin/elixir.app",
EvidenceMatcher: m.FileContentsVersionMatcher(
`(?m)\{vsn,"(?P<version>[0-9]+\.[0-9]+\.[0-9]+(-[a-z0-9]+)?)"\}`),
Package: "elixir",
PURL: mustPURL("pkg:generic/elixir@version"),
CPEs: singleCPE("cpe:2.3:a:elixir-lang:elixir:*:*:*:*:*:*:*:*", cpe.NVDDictionaryLookupSource),
},
} }
return append(classifiers, defaultJavaClassifiers()...) return append(classifiers, defaultJavaClassifiers()...)

View File

@ -6,6 +6,7 @@ import (
"sort" "sort"
"strings" "strings"
packageurl "github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
) )
@ -32,6 +33,19 @@ func newPEPackage(versionResources map[string]string, f file.Location) pkg.Packa
Metadata: newPEBinaryVersionResourcesFromMap(versionResources), Metadata: newPEBinaryVersionResourcesFromMap(versionResources),
} }
// If this appears to be Ghostscript, emit a canonical generic purl
// Example expected: pkg:generic/ghostscript@<version>
prod := strings.ToLower(spaceNormalize(versionResources["ProductName"]))
if prod == "" {
// fall back to FileDescription if ProductName is missing
prod = strings.ToLower(spaceNormalize(versionResources["FileDescription"]))
}
if p.Version != "" && strings.Contains(prod, "ghostscript") {
// build a generic PURL for ghostscript
purl := packageurl.NewPackageURL(packageurl.TypeGeneric, "", "ghostscript", p.Version, nil, "").ToString()
p.PURL = purl
}
p.SetID() p.SetID()
return p return p

View File

@ -0,0 +1,24 @@
package binary
import (
"testing"
"github.com/anchore/syft/syft/file"
)
func TestGhostscriptPEGeneratesGenericPURL(t *testing.T) {
vr := map[string]string{
"CompanyName": "Artifex Software, Inc.",
"ProductName": "GPL Ghostscript",
"FileDescription": "Ghostscript Interpreter",
"ProductVersion": "9.54.0",
}
loc := file.NewLocation("/usr/bin/gswin64c.exe")
p := newPEPackage(vr, loc)
expected := "pkg:generic/ghostscript@9.54.0"
if p.PURL != expected {
t.Fatalf("expected purl %q, got %q", expected, p.PURL)
}
}

View File

@ -0,0 +1,20 @@
#!/bin/sh
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: 2021 The Elixir Team
# SPDX-FileCopyrightText: 2012 Plataformatec
set -e
ELIXIR_VERSION=1.19.1
if [ $# -eq 0 ] || { [ $# -eq 1 ] && { [ "$1" = "--help" ] || [ "$1" = "-h" ]; }; }; then
cat <<USAGE >&2
Usage: $(basename "$0") [options] [.exs file] [data]
## General options
-e "COMMAND" Evaluates the given command (*)
-h, --help Prints this message (standalone)
-r "FILE" Requires the given files/patterns (*)
-S SCRIPT Finds and executes the given script in \$PATH

View File

@ -0,0 +1,19 @@
{application,elixir,
[{description,"elixir"},
{vsn,"1.19.1"},
{modules,
['Elixir.Access','Elixir.Agent.Server','Elixir.Agent',
'Elixir.Application','Elixir.ArgumentError',
elixir_overridable,elixir_parser,elixir_quote,elixir_rewrite,
elixir_sup,elixir_tokenizer,elixir_utils,iex]},
{registered,[elixir_sup,elixir_config,elixir_code_server]},
{applications,[kernel,stdlib,compiler]},
{mod,{elixir,[]}},
{env,
[{ansi_syntax_colors,
[{atom,cyan},
{binary,default_color},
{operator,default_color}]},
{check_endianness,true},
{dbg_callback,{'Elixir.Macro',dbg,[]}},
{time_zone_database,'Elixir.Calendar.UTCOnlyTimeZoneDatabase'}]}]}.

View File

@ -0,0 +1,58 @@
package cpe
import (
"strings"
"github.com/anchore/syft/syft/pkg"
)
// TargetSoftwareToPackageType is derived from looking at target_software attributes in the NVD dataset
// TODO: ideally this would be driven from the store, where we can resolve ecosystem aliases directly
func TargetSoftwareToPackageType(tsw string) pkg.Type {
tsw = strings.NewReplacer("-", "_", " ", "_").Replace(strings.ToLower(tsw))
switch tsw {
case "alpine", "apk":
return pkg.ApkPkg
case "debian", "dpkg":
return pkg.DebPkg
case "java", "maven", "ant", "gradle", "jenkins", "jenkins_ci", "kafka", "logstash", "mule", "nifi", "solr", "spark", "storm", "struts", "tomcat", "zookeeper", "log4j":
return pkg.JavaPkg
case "javascript", "node", "nodejs", "node.js", "npm", "yarn", "apache", "jquery", "next.js", "prismjs":
return pkg.NpmPkg
case "c", "c++", "c/c++", "conan", "gnu_c++", "qt":
return pkg.ConanPkg
case "dart":
return pkg.DartPubPkg
case "redhat", "rpm", "redhat_enterprise_linux", "rhel", "suse", "suse_linux", "opensuse", "opensuse_linux", "fedora", "centos", "oracle_linux", "ol":
return pkg.RpmPkg
case "elixir", "hex":
return pkg.HexPkg
case "erlang":
return pkg.ErlangOTPPkg
case ".net", ".net_framework", "asp", "asp.net", "dotnet", "dotnet_framework", "c#", "csharp", "nuget":
return pkg.DotnetPkg
case "ruby", "gem", "nokogiri", "ruby_on_rails":
return pkg.GemPkg
case "rust", "cargo", "crates":
return pkg.RustPkg
case "python", "pip", "pypi", "flask":
return pkg.PythonPkg
case "kb", "knowledgebase", "msrc", "mskb", "microsoft":
return pkg.KbPkg
case "portage", "gentoo":
return pkg.PortagePkg
case "go", "golang", "gomodule":
return pkg.GoModulePkg
case "linux_kernel", "linux", "z/linux":
return pkg.LinuxKernelPkg
case "php":
return pkg.PhpComposerPkg
case "swift":
return pkg.SwiftPkg
case "wordpress", "wordpress_plugin", "wordpress_":
return pkg.WordpressPluginPkg
case "lua", "luarocks":
return pkg.LuaRocksPkg
}
return ""
}

View File

@ -1,9 +1,12 @@
package dart package dart
import ( import (
"context"
"github.com/anchore/packageurl-go" "github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/internal/licenses"
) )
func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...file.Location) pkg.Package { func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...file.Location) pkg.Package {
@ -29,7 +32,7 @@ func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...fil
return p return p
} }
func newPubspecPackage(raw pubspecPackage, locations ...file.Location) pkg.Package { func newPubspecPackage(ctx context.Context, resolver file.Resolver, raw pubspecPackage, locations ...file.Location) pkg.Package {
var env *pkg.DartPubspecEnvironment var env *pkg.DartPubspecEnvironment
if raw.Environment.SDK != "" || raw.Environment.Flutter != "" { if raw.Environment.SDK != "" || raw.Environment.Flutter != "" {
// this is required only after pubspec v2, but might have been optional before this // this is required only after pubspec v2, but might have been optional before this
@ -58,6 +61,8 @@ func newPubspecPackage(raw pubspecPackage, locations ...file.Location) pkg.Packa
p.SetID() p.SetID()
p = licenses.RelativeToPackage(ctx, resolver, p)
return p return p
} }

View File

@ -29,7 +29,7 @@ type dartPubspecEnvironment struct {
Flutter string `mapstructure:"flutter" yaml:"flutter"` Flutter string `mapstructure:"flutter" yaml:"flutter"`
} }
func parsePubspec(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { func parsePubspec(ctx context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package var pkgs []pkg.Package
dec := yaml.NewDecoder(reader) dec := yaml.NewDecoder(reader)
@ -41,6 +41,8 @@ func parsePubspec(_ context.Context, _ file.Resolver, _ *generic.Environment, re
pkgs = append(pkgs, pkgs = append(pkgs,
newPubspecPackage( newPubspecPackage(
ctx,
resolver,
p, p,
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
), ),

View File

@ -24,6 +24,10 @@ import (
"github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
const (
deinstallStatus string = "deinstall"
)
var ( var (
errEndOfPackages = fmt.Errorf("no more packages to read") errEndOfPackages = fmt.Errorf("no more packages to read")
sourceRegexp = regexp.MustCompile(`(?P<name>\S+)( \((?P<version>.*)\))?`) sourceRegexp = regexp.MustCompile(`(?P<name>\S+)( \((?P<version>.*)\))?`)
@ -112,6 +116,7 @@ type dpkgExtractedMetadata struct {
Provides string `mapstructure:"Provides"` Provides string `mapstructure:"Provides"`
Depends string `mapstructure:"Depends"` Depends string `mapstructure:"Depends"`
PreDepends string `mapstructure:"PreDepends"` // note: original doc is Pre-Depends PreDepends string `mapstructure:"PreDepends"` // note: original doc is Pre-Depends
Status string `mapstructure:"Status"`
} }
// parseDpkgStatusEntry returns an individual Dpkg entry, or returns errEndOfPackages if there are no more packages to parse from the reader. // parseDpkgStatusEntry returns an individual Dpkg entry, or returns errEndOfPackages if there are no more packages to parse from the reader.
@ -134,6 +139,11 @@ func parseDpkgStatusEntry(reader *bufio.Reader) (*pkg.DpkgDBEntry, error) {
return nil, err return nil, err
} }
// Skip entries which have been removed but not purged, e.g. "rc" status in dpkg -l
if strings.Contains(raw.Status, deinstallStatus) {
return nil, nil
}
sourceName, sourceVersion := extractSourceVersion(raw.Source) sourceName, sourceVersion := extractSourceVersion(raw.Source)
if sourceVersion != "" { if sourceVersion != "" {
raw.SourceVersion = sourceVersion raw.SourceVersion = sourceVersion

View File

@ -237,6 +237,37 @@ func Test_parseDpkgStatus(t *testing.T) {
}, },
}, },
}, },
{
name: "deinstall status packages are ignored",
fixturePath: "test-fixtures/var/lib/dpkg/status.d/deinstall",
expected: []pkg.DpkgDBEntry{
{
Package: "linux-image-6.14.0-1012-aws",
Source: "linux-signed-aws-6.14",
Version: "6.14.0-1012.12~24.04.1",
Architecture: "amd64",
InstalledSize: 15221,
Maintainer: "Canonical Kernel Team <kernel-team@lists.ubuntu.com>",
Description: `Signed kernel image aws
A kernel image for aws. This version of it is signed with
Canonical's signing key.`,
Provides: []string{"fuse-module",
"linux-image",
"spl-dkms",
"spl-modules",
"v4l2loopback-dkms",
"v4l2loopback-modules",
"zfs-dkms",
"zfs-modules"},
Depends: []string{
"kmod",
"linux-base (>= 4.5ubuntu1~16.04.1)",
"linux-modules-6.14.0-1012-aws",
},
Files: []pkg.DpkgFileRecord{},
},
},
},
} }
for _, test := range tests { for _, test := range tests {

View File

@ -0,0 +1,38 @@
Package: linux-image-6.14.0-1012-aws
Status: install ok installed
Priority: optional
Section: kernel
Installed-Size: 15221
Maintainer: Canonical Kernel Team <kernel-team@lists.ubuntu.com>
Architecture: amd64
Source: linux-signed-aws-6.14
Version: 6.14.0-1012.12~24.04.1
Provides: fuse-module, linux-image, spl-dkms, spl-modules, v4l2loopback-dkms, v4l2loopback-modules, zfs-dkms, zfs-modules
Depends: kmod, linux-base (>= 4.5ubuntu1~16.04.1), linux-modules-6.14.0-1012-aws
Recommends: grub-pc | grub-efi-amd64 | grub-efi-ia32 | grub | lilo, initramfs-tools | linux-initramfs-tool
Suggests: bpftool, linux-perf, linux-aws-6.14-doc-6.14.0 | linux-aws-6.14-source-6.14.0, linux-aws-6.14-tools, linux-headers-6.14.0-1012-aws
Conflicts: linux-image-unsigned-6.14.0-1012-aws
Description: Signed kernel image aws
A kernel image for aws. This version of it is signed with
Canonical's signing key.
Built-Using: linux-aws-6.14 (= 6.14.0-1012.12~24.04.1)
Package: linux-image-6.8.0-1029-aws
Status: deinstall ok config-files
Priority: optional
Section: kernel
Installed-Size: 14591
Maintainer: Canonical Kernel Team <kernel-team@lists.ubuntu.com>
Architecture: amd64
Source: linux-signed-aws
Version: 6.8.0-1029.31
Config-Version: 6.8.0-1029.31
Provides: fuse-module, linux-image, spl-dkms, spl-modules, v4l2loopback-dkms, v4l2loopback-modules, zfs-dkms, zfs-modules
Depends: kmod, linux-base (>= 4.5ubuntu1~16.04.1), linux-modules-6.8.0-1029-aws
Recommends: grub-pc | grub-efi-amd64 | grub-efi-ia32 | grub | lilo, initramfs-tools | linux-initramfs-tool
Suggests: fdutils, linux-aws-doc-6.8.0 | linux-aws-source-6.8.0, linux-aws-tools, linux-headers-6.8.0-1029-aws
Conflicts: linux-image-unsigned-6.8.0-1029-aws
Description: Signed kernel image aws
A kernel image for aws. This version of it is signed with
Canonical's signing key.
Built-Using: linux-aws (= 6.8.0-1029.31)

View File

@ -12,6 +12,7 @@ import (
// binary cataloger will search for .dll and .exe files and create packages based off of the version resources embedded // binary cataloger will search for .dll and .exe files and create packages based off of the version resources embedded
// as a resource directory within the executable. If there is no evidence of a .NET runtime (a CLR header) then no // as a resource directory within the executable. If there is no evidence of a .NET runtime (a CLR header) then no
// package will be created. // package will be created.
//
// Deprecated: use depsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0). // Deprecated: use depsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
type binaryCataloger struct { type binaryCataloger struct {
} }

View File

@ -13,12 +13,14 @@ func NewDotnetDepsBinaryCataloger(config CatalogerConfig) pkg.Cataloger {
} }
// NewDotnetDepsCataloger returns a cataloger based on deps.json file contents. // NewDotnetDepsCataloger returns a cataloger based on deps.json file contents.
//
// Deprecated: use NewDotnetDepsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0). // Deprecated: use NewDotnetDepsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
func NewDotnetDepsCataloger() pkg.Cataloger { func NewDotnetDepsCataloger() pkg.Cataloger {
return &depsCataloger{} return &depsCataloger{}
} }
// NewDotnetPortableExecutableCataloger returns a cataloger based on PE file contents. // NewDotnetPortableExecutableCataloger returns a cataloger based on PE file contents.
//
// Deprecated: use NewDotnetDepsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0). // Deprecated: use NewDotnetDepsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
func NewDotnetPortableExecutableCataloger() pkg.Cataloger { func NewDotnetPortableExecutableCataloger() pkg.Cataloger {
return &binaryCataloger{} return &binaryCataloger{}

View File

@ -9,6 +9,7 @@ import (
) )
// depsCataloger will search for deps.json file contents. // depsCataloger will search for deps.json file contents.
//
// Deprecated: use depsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0). // Deprecated: use depsBinaryCataloger instead which combines the PE and deps.json data which yields more accurate results (will be removed in syft v2.0).
type depsCataloger struct { type depsCataloger struct {
} }

View File

@ -4,18 +4,20 @@ import (
"context" "context"
"fmt" "fmt"
"path/filepath" "path/filepath"
"regexp"
"strings" "strings"
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/internal/licenses"
) )
// resolveModuleLicenses finds and parses license files for Go modules // resolveModuleLicenses finds and parses license files for Go modules
func resolveModuleLicenses(ctx context.Context, pkgInfos []pkgInfo, fs afero.Fs) pkg.LicenseSet { func resolveModuleLicenses(ctx context.Context, scanRoot string, pkgInfos []pkgInfo, fs afero.Fs) pkg.LicenseSet {
licenses := pkg.NewLicenseSet() out := pkg.NewLicenseSet()
for _, info := range pkgInfos { for _, info := range pkgInfos {
modDir, pkgDir, err := getAbsolutePkgPaths(info) modDir, pkgDir, err := getAbsolutePkgPaths(info)
@ -23,22 +25,32 @@ func resolveModuleLicenses(ctx context.Context, pkgInfos []pkgInfo, fs afero.Fs)
continue continue
} }
licenseFiles, err := findAllLicenseCandidatesUpwards(pkgDir, licenseRegexp, modDir, fs) licenseFiles, err := findAllLicenseCandidatesUpwards(pkgDir, modDir, fs)
if err != nil { if err != nil {
continue continue
} }
for _, f := range licenseFiles { for _, f := range licenseFiles {
contents, err := fs.Open(f) out.Add(readLicenses(ctx, scanRoot, fs, f)...)
if err != nil {
continue
}
licenses.Add(pkg.NewLicensesFromReadCloserWithContext(ctx, file.NewLocationReadCloser(file.Location{}, contents))...)
_ = contents.Close()
} }
} }
return licenses return out
}
func readLicenses(ctx context.Context, scanRoot string, fs afero.Fs, f string) []pkg.License {
contents, err := fs.Open(f)
if err != nil {
log.WithFields("file", f, "error", err).Debug("unable to read license file")
return nil
}
defer internal.CloseAndLogError(contents, f)
location := file.Location{}
if scanRoot != "" && strings.HasPrefix(f, scanRoot) {
// include location when licenses are found within the scan target
location = file.NewLocation(strings.TrimPrefix(f, scanRoot))
}
return pkg.NewLicensesFromReadCloserWithContext(ctx, file.NewLocationReadCloser(location, contents))
} }
/* /*
@ -60,7 +72,7 @@ When we should consider redesign tip to stem:
- We need to consider the case here where nested modules are visited by accident and licenses - We need to consider the case here where nested modules are visited by accident and licenses
are erroneously associated to a 'parent module'; bubble up currently prevents this are erroneously associated to a 'parent module'; bubble up currently prevents this
*/ */
func findAllLicenseCandidatesUpwards(dir string, r *regexp.Regexp, stopAt string, fs afero.Fs) ([]string, error) { func findAllLicenseCandidatesUpwards(dir string, stopAt string, fs afero.Fs) ([]string, error) {
// Validate that both paths are absolute // Validate that both paths are absolute
if !filepath.IsAbs(dir) { if !filepath.IsAbs(dir) {
return nil, fmt.Errorf("dir must be an absolute path, got: %s", dir) return nil, fmt.Errorf("dir must be an absolute path, got: %s", dir)
@ -69,25 +81,16 @@ func findAllLicenseCandidatesUpwards(dir string, r *regexp.Regexp, stopAt string
return nil, fmt.Errorf("stopAt must be an absolute path, got: %s", stopAt) return nil, fmt.Errorf("stopAt must be an absolute path, got: %s", stopAt)
} }
licenses, err := findLicenseCandidates(dir, r, stopAt, fs) return findLicenseCandidates(dir, stopAt, fs)
if err != nil {
return nil, err
}
// Ensure we return an empty slice rather than nil for consistency
if licenses == nil {
return []string{}, nil
}
return licenses, nil
} }
func findLicenseCandidates(dir string, r *regexp.Regexp, stopAt string, fs afero.Fs) ([]string, error) { func findLicenseCandidates(dir string, stopAt string, fs afero.Fs) ([]string, error) {
// stop if we've gone outside the stopAt directory // stop if we've gone outside the stopAt directory
if !strings.HasPrefix(dir, stopAt) { if !strings.HasPrefix(dir, stopAt) {
return []string{}, nil return []string{}, nil
} }
licenses, err := findLicensesInDir(dir, r, fs) out, err := findLicensesInDir(dir, fs)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -95,17 +98,17 @@ func findLicenseCandidates(dir string, r *regexp.Regexp, stopAt string, fs afero
parent := filepath.Dir(dir) parent := filepath.Dir(dir)
// can't go any higher up the directory tree: "/" case // can't go any higher up the directory tree: "/" case
if parent == dir { if parent == dir {
return licenses, nil return out, nil
} }
// search parent directory and combine results // search parent directory and combine results
parentLicenses, err := findLicenseCandidates(parent, r, stopAt, fs) parentLicenses, err := findLicenseCandidates(parent, stopAt, fs)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Combine current directory licenses with parent directory licenses // Combine current directory licenses with parent directory licenses
return append(licenses, parentLicenses...), nil return append(out, parentLicenses...), nil
} }
func getAbsolutePkgPaths(info pkgInfo) (modDir string, pkgDir string, err error) { func getAbsolutePkgPaths(info pkgInfo) (modDir string, pkgDir string, err error) {
@ -126,8 +129,8 @@ func getAbsolutePkgPaths(info pkgInfo) (modDir string, pkgDir string, err error)
return modDir, pkgDir, nil return modDir, pkgDir, nil
} }
func findLicensesInDir(dir string, r *regexp.Regexp, fs afero.Fs) ([]string, error) { func findLicensesInDir(dir string, fs afero.Fs) ([]string, error) {
var licenses []string var out []string
dirContents, err := afero.ReadDir(fs, dir) dirContents, err := afero.ReadDir(fs, dir)
if err != nil { if err != nil {
@ -139,11 +142,11 @@ func findLicensesInDir(dir string, r *regexp.Regexp, fs afero.Fs) ([]string, err
continue continue
} }
if r.MatchString(f.Name()) { if licenses.IsLicenseFile(f.Name()) {
path := filepath.Join(dir, f.Name()) path := filepath.Join(dir, f.Name())
licenses = append(licenses, path) out = append(out, path)
} }
} }
return licenses, nil return out, nil
} }

View File

@ -70,8 +70,8 @@ func TestFindAllLicenseCandidatesUpwards(t *testing.T) {
fs.MkdirAll("/empty/dir/tree", 0755) fs.MkdirAll("/empty/dir/tree", 0755)
// No license files // No license files
}, },
expectedFiles: []string{}, expectedFiles: nil,
description: "Should return empty slice when no license files found", description: "Should return nil when no license files found",
}, },
{ {
name: "handles directory at filesystem root", name: "handles directory at filesystem root",
@ -205,7 +205,7 @@ func TestFindAllLicenseCandidatesUpwards(t *testing.T) {
tt.setupFS(fs) tt.setupFS(fs)
// Run the function // Run the function
result, err := findAllLicenseCandidatesUpwards(tt.startDir, licenseRegexp, tt.stopAt, fs) result, err := findAllLicenseCandidatesUpwards(tt.startDir, tt.stopAt, fs)
// Check error expectation // Check error expectation
if tt.expectedError { if tt.expectedError {

View File

@ -19,14 +19,13 @@ import (
"github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5"
"github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/storage/memory" "github.com/go-git/go-git/v5/storage/memory"
"github.com/scylladb/go-set/strset"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/cache" "github.com/anchore/syft/internal/cache"
"github.com/anchore/syft/internal/licenses"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/internal/licenses"
) )
type goLicenseResolver struct { type goLicenseResolver struct {
@ -35,7 +34,6 @@ type goLicenseResolver struct {
localModCacheDir fs.FS localModCacheDir fs.FS
localVendorDir fs.FS localVendorDir fs.FS
licenseCache cache.Resolver[[]pkg.License] licenseCache cache.Resolver[[]pkg.License]
lowerLicenseFileNames *strset.Set
} }
func newGoLicenseResolver(catalogerName string, opts CatalogerConfig) goLicenseResolver { func newGoLicenseResolver(catalogerName string, opts CatalogerConfig) goLicenseResolver {
@ -64,18 +62,9 @@ func newGoLicenseResolver(catalogerName string, opts CatalogerConfig) goLicenseR
localModCacheDir: localModCacheDir, localModCacheDir: localModCacheDir,
localVendorDir: localVendorDir, localVendorDir: localVendorDir,
licenseCache: cache.GetResolverCachingErrors[[]pkg.License]("golang", "v2"), licenseCache: cache.GetResolverCachingErrors[[]pkg.License]("golang", "v2"),
lowerLicenseFileNames: strset.New(lowercaseLicenseFiles()...),
} }
} }
func lowercaseLicenseFiles() []string {
fileNames := licenses.FileNames()
for i := range fileNames {
fileNames[i] = strings.ToLower(fileNames[i])
}
return fileNames
}
func remotesForModule(proxies []string, noProxy []string, module string) []string { func remotesForModule(proxies []string, noProxy []string, module string) []string {
for _, pattern := range noProxy { for _, pattern := range noProxy {
if matched, err := path.Match(pattern, module); err == nil && matched { if matched, err := path.Match(pattern, module); err == nil && matched {
@ -194,7 +183,7 @@ func (c *goLicenseResolver) findLicensesInFS(ctx context.Context, urlPrefix stri
log.Debugf("nil entry for %s#%s", urlPrefix, filePath) log.Debugf("nil entry for %s#%s", urlPrefix, filePath)
return nil return nil
} }
if !c.lowerLicenseFileNames.Has(strings.ToLower(d.Name())) { if !licenses.IsLicenseFile(d.Name()) {
return nil return nil
} }
rdr, err := fsys.Open(filePath) rdr, err := fsys.Open(filePath)
@ -203,11 +192,11 @@ func (c *goLicenseResolver) findLicensesInFS(ctx context.Context, urlPrefix stri
return nil return nil
} }
defer internal.CloseAndLogError(rdr, filePath) defer internal.CloseAndLogError(rdr, filePath)
licenses := pkg.NewLicensesFromReadCloserWithContext(ctx, file.NewLocationReadCloser(file.NewLocation(filePath), rdr)) foundLicenses := pkg.NewLicensesFromReadCloserWithContext(ctx, file.NewLocationReadCloser(file.NewLocation(filePath), rdr))
// since these licenses are found in an external fs.FS, not in the scanned source, // since these licenses are found in an external fs.FS, not in the scanned source,
// get rid of the locations but keep information about the where the license was found // get rid of the locations but keep information about the where the license was found
// by prepending the urlPrefix to the internal path for an accurate representation // by prepending the urlPrefix to the internal path for an accurate representation
for _, l := range licenses { for _, l := range foundLicenses {
l.URLs = []string{urlPrefix + filePath} l.URLs = []string{urlPrefix + filePath}
l.Locations = file.NewLocationSet() l.Locations = file.NewLocationSet()
out = append(out, l) out = append(out, l)
@ -246,7 +235,7 @@ func (c *goLicenseResolver) findLicensesInSource(ctx context.Context, resolver f
func (c *goLicenseResolver) parseLicenseFromLocation(ctx context.Context, l file.Location, resolver file.Resolver) ([]pkg.License, error) { func (c *goLicenseResolver) parseLicenseFromLocation(ctx context.Context, l file.Location, resolver file.Resolver) ([]pkg.License, error) {
var out []pkg.License var out []pkg.License
fileName := path.Base(l.RealPath) fileName := path.Base(l.RealPath)
if c.lowerLicenseFileNames.Has(strings.ToLower(fileName)) { if licenses.IsLicenseFile(fileName) {
contents, err := resolver.FileContentsByLocation(l) contents, err := resolver.FileContentsByLocation(l)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -10,7 +10,14 @@ import (
) )
func (c *goBinaryCataloger) newGoBinaryPackage(dep *debug.Module, m pkg.GolangBinaryBuildinfoEntry, licenses []pkg.License, locations ...file.Location) pkg.Package { func (c *goBinaryCataloger) newGoBinaryPackage(dep *debug.Module, m pkg.GolangBinaryBuildinfoEntry, licenses []pkg.License, locations ...file.Location) pkg.Package {
// Similar to syft/pkg/cataloger/golang/parse_go_mod.go logic - use original path for relative replacements
finalPath := dep.Path
if dep.Replace != nil { if dep.Replace != nil {
if strings.HasPrefix(dep.Replace.Path, ".") || strings.HasPrefix(dep.Replace.Path, "/") {
finalPath = dep.Path
} else {
finalPath = dep.Replace.Path
}
dep = dep.Replace dep = dep.Replace
} }
@ -23,10 +30,10 @@ func (c *goBinaryCataloger) newGoBinaryPackage(dep *debug.Module, m pkg.GolangBi
} }
p := pkg.Package{ p := pkg.Package{
Name: dep.Path, Name: finalPath,
Version: version, Version: version,
Licenses: pkg.NewLicenseSet(licenses...), Licenses: pkg.NewLicenseSet(licenses...),
PURL: packageURL(dep.Path, version), PURL: packageURL(finalPath, version),
Language: pkg.Go, Language: pkg.Go,
Type: pkg.GoModulePkg, Type: pkg.GoModulePkg,
Locations: file.NewLocationSet(locations...), Locations: file.NewLocationSet(locations...),

View File

@ -1,6 +1,7 @@
package golang package golang
import ( import (
"runtime/debug"
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -54,3 +55,67 @@ func Test_packageURL(t *testing.T) {
}) })
} }
} }
func Test_newGoBinaryPackage_relativeReplace(t *testing.T) {
tests := []struct {
name string
dep *debug.Module
expectedName string
}{
{
name: "relative replace with ../",
dep: &debug.Module{
Path: "github.com/aws/aws-sdk-go-v2",
Version: "(devel)",
Replace: &debug.Module{
Path: "../../",
Version: "(devel)",
},
},
expectedName: "github.com/aws/aws-sdk-go-v2", // should use original path, not relative
},
{
name: "relative replace with ./",
dep: &debug.Module{
Path: "github.com/example/module",
Version: "v1.0.0",
Replace: &debug.Module{
Path: "./local",
Version: "v0.0.0",
},
},
expectedName: "github.com/example/module", // should use original path
},
{
name: "absolute replace",
dep: &debug.Module{
Path: "github.com/old/module",
Version: "v1.0.0",
Replace: &debug.Module{
Path: "github.com/new/module",
Version: "v2.0.0",
},
},
expectedName: "github.com/new/module", // should use replacement path
},
{
name: "no replace",
dep: &debug.Module{
Path: "github.com/normal/module",
Version: "v1.0.0",
},
expectedName: "github.com/normal/module", // should use original path
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
cataloger := &goBinaryCataloger{}
result := cataloger.newGoBinaryPackage(test.dep, pkg.GolangBinaryBuildinfoEntry{}, nil)
assert.Equal(t, test.expectedName, result.Name)
assert.Equal(t, pkg.Go, result.Language)
assert.Equal(t, pkg.GoModulePkg, result.Type)
})
}
}

View File

@ -7,7 +7,7 @@ import (
"go/build" "go/build"
"io" "io"
"path/filepath" "path/filepath"
"regexp" "slices"
"sort" "sort"
"strings" "strings"
@ -20,14 +20,11 @@ import (
"github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/internal/unknown"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/internal/fileresolver"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
var (
licenseRegexp = regexp.MustCompile(`^(?i)((UN)?LICEN(S|C)E|COPYING|NOTICE).*$`)
)
type goModCataloger struct { type goModCataloger struct {
licenseResolver goLicenseResolver licenseResolver goLicenseResolver
} }
@ -46,9 +43,14 @@ func (c *goModCataloger) parseGoModFile(ctx context.Context, resolver file.Resol
log.Debugf("unable to get go.sum: %v", err) log.Debugf("unable to get go.sum: %v", err)
} }
scanRoot := ""
if dir, ok := resolver.(*fileresolver.Directory); ok && dir != nil {
scanRoot = dir.Chroot.Base()
}
// source analysis using go toolchain if available // source analysis using go toolchain if available
syftSourcePackages, sourceModules, sourceDependencies, unknownErr := c.loadPackages(modDir, reader.Location) syftSourcePackages, sourceModules, sourceDependencies, unknownErr := c.loadPackages(modDir, reader.Location)
catalogedModules, sourceModuleToPkg := c.catalogModules(ctx, syftSourcePackages, sourceModules, reader, digests) catalogedModules, sourceModuleToPkg := c.catalogModules(ctx, scanRoot, syftSourcePackages, sourceModules, reader, digests)
relationships := buildModuleRelationships(catalogedModules, sourceDependencies, sourceModuleToPkg) relationships := buildModuleRelationships(catalogedModules, sourceDependencies, sourceModuleToPkg)
// base case go.mod file parsing // base case go.mod file parsing
@ -208,12 +210,16 @@ func (c *goModCataloger) visitPackages(
} }
} }
} }
pkgs[module.Path] = append(pkgs[module.Path], pkgInfo{
info := pkgInfo{
pkgPath: p.PkgPath, pkgPath: p.PkgPath,
modulePath: module.Path, modulePath: module.Path,
pkgDir: pkgDir, pkgDir: pkgDir,
moduleDir: module.Dir, moduleDir: module.Dir,
}) }
if !slices.Contains(pkgs[module.Path], info) { // avoid duplicates
pkgs[module.Path] = append(pkgs[module.Path], info)
}
modules[p.Module.Path] = module modules[p.Module.Path] = module
return true return true
@ -224,6 +230,7 @@ func (c *goModCataloger) visitPackages(
// create syft packages from Go modules found by the go toolchain // create syft packages from Go modules found by the go toolchain
func (c *goModCataloger) catalogModules( func (c *goModCataloger) catalogModules(
ctx context.Context, ctx context.Context,
scanRoot string,
pkgs map[string][]pkgInfo, pkgs map[string][]pkgInfo,
modules map[string]*packages.Module, modules map[string]*packages.Module,
reader file.LocationReadCloser, reader file.LocationReadCloser,
@ -243,7 +250,7 @@ func (c *goModCataloger) catalogModules(
} }
pkgInfos := pkgs[m.Path] pkgInfos := pkgs[m.Path]
moduleLicenses := resolveModuleLicenses(ctx, pkgInfos, afero.NewOsFs()) moduleLicenses := resolveModuleLicenses(ctx, scanRoot, pkgInfos, afero.NewOsFs())
// we do out of source lookups for module parsing // we do out of source lookups for module parsing
// locations are NOT included in the SBOM because of this // locations are NOT included in the SBOM because of this
goModulePkg := pkg.Package{ goModulePkg := pkg.Package{

View File

@ -1,15 +1,22 @@
package homebrew package homebrew
import ( import (
"context"
"path"
"github.com/anchore/packageurl-go" "github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/internal/licenses"
) )
func newHomebrewPackage(pd parsedHomebrewData, formulaLocation file.Location) pkg.Package { func newHomebrewPackage(ctx context.Context, resolver file.Resolver, pd parsedHomebrewData, formulaLocation file.Location) pkg.Package {
var licenses []string var lics []pkg.License
if pd.License != "" { if pd.License != "" {
licenses = append(licenses, pd.License) lics = append(lics, pkg.NewLicensesFromValues(pd.License)...)
} else {
// sometimes licenses are included in the parent directory
lics = licenses.FindInDirs(ctx, resolver, path.Dir(formulaLocation.Path()))
} }
p := pkg.Package{ p := pkg.Package{
@ -17,7 +24,7 @@ func newHomebrewPackage(pd parsedHomebrewData, formulaLocation file.Location) pk
Version: pd.Version, Version: pd.Version,
Type: pkg.HomebrewPkg, Type: pkg.HomebrewPkg,
Locations: file.NewLocationSet(formulaLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Locations: file.NewLocationSet(formulaLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)),
Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromValues(licenses...)...), Licenses: pkg.NewLicenseSet(lics...),
FoundBy: "homebrew-cataloger", FoundBy: "homebrew-cataloger",
PURL: packageURL(pd.Name, pd.Version), PURL: packageURL(pd.Name, pd.Version),
Metadata: pkg.HomebrewFormula{ Metadata: pkg.HomebrewFormula{

View File

@ -22,7 +22,7 @@ type parsedHomebrewData struct {
License string License string
} }
func parseHomebrewFormula(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { func parseHomebrewFormula(ctx context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
pd, err := parseFormulaFile(reader) pd, err := parseFormulaFile(reader)
if err != nil { if err != nil {
log.WithFields("path", reader.RealPath).Trace("failed to parse formula") log.WithFields("path", reader.RealPath).Trace("failed to parse formula")
@ -35,6 +35,8 @@ func parseHomebrewFormula(_ context.Context, _ file.Resolver, _ *generic.Environ
return []pkg.Package{ return []pkg.Package{
newHomebrewPackage( newHomebrewPackage(
ctx,
resolver,
*pd, *pd,
reader.Location, reader.Location,
), ),

View File

@ -0,0 +1,167 @@
# CPE Generation
This package generates Common Platform Enumeration (CPE) identifiers for software packages discovered by Syft.
CPEs are standardized identifiers that enable vulnerability matching by linking packages to known vulnerabilities in databases like the National Vulnerability Database (NVD).
## Overview
CPE generation in Syft uses a **two-tier approach** to balance accuracy and coverage:
1. **Dictionary Lookups** (Authoritative): Pre-validated CPEs from the official NIST CPE dictionary
2. **Heuristic Generation** (Fallback): Intelligent generation based on package metadata and ecosystem-specific patterns
This dual approach ensures:
- **High accuracy** for packages in the NIST dictionary (no false positives)
- **Broad coverage** for packages not yet in the dictionary (maximizes vulnerability detection)
- **Fast performance** with an embedded, indexed CPE dictionary (~814KB)
## Why It Matters
CPEs link discovered packages to security vulnerabilities (CVEs) in tools like Grype. Without accurate CPE generation, vulnerability scanning misses security issues.
## How It Works
### Architecture
```
┌─────────────────────────────────────────────────────────┐
│ Syft Package Discovery │
└──────────────────┬──────────────────────────────────────┘
┌─────────────────────┐
│ CPE Generation │
│ (this package) │
└──────────┬──────────┘
┌───────────┴────────────┐
│ │
▼ ▼
┌──────────────────┐ ┌─────────────────────┐
│ Dictionary │ │ Heuristic │
│ Lookup │ │ Generation │
│ │ │ │
│ • Embedded index │ │ • Ecosystem rules │
│ • ~22K entries │ │ • Vendor/product │
│ • 11 ecosystems │ │ candidates │
└──────────────────┘ │ • Curated mappings │
│ • Smart filters │
└─────────────────────┘
```
### Dictionary Generation Process
The dictionary is generated offline and embedded into the Syft binary for fast, offline lookups.
**Location**: `dictionary/index-generator/`
**Process**:
1. **Fetch**: Retrieves CPE data from NVD Products API using incremental updates
2. **Cache**: Stores raw API responses in ORAS registry for reuse (`.cpe-cache/`)
3. **Filter**:
- Removes CPEs without reference URLs
- Excludes hardware (`h`) and OS (`o`) CPEs (keeps only applications `a`)
4. **Index by Ecosystem**:
- Extracts package names from reference URLs (npm, pypi, rubygems, etc.)
- Creates index: `ecosystem → package_name → [CPE strings]`
5. **Embed**: Generates `data/cpe-index.json` embedded via `go:embed` directive
### Runtime CPE Lookup/Generation
**Entry Point**: `generate.go`
When Syft discovers a package:
1. **Check for Declared CPEs**: If package metadata already contains CPEs (from SBOM imports), skip generation
2. **Try Dictionary Lookup** (`FromDictionaryFind`):
- Loads embedded CPE index (singleton, loaded once)
- Looks up by ecosystem + package name
- Returns pre-validated CPEs if found
- Marks source as `NVDDictionaryLookupSource`
3. **Fallback to Heuristic Generation** (`FromPackageAttributes`):
- Generates vendor/product/targetSW candidates using ecosystem-specific logic
- Creates CPE permutations from candidates
- Applies filters to remove known false positives
- Marks source as `GeneratedSource`
### Supported Ecosystems
**Dictionary Lookups** (11 ecosystems):
npm, RubyGems, PyPI, Jenkins Plugins, crates.io, PHP, Go Modules, WordPress Plugins/Themes
**Heuristic Generation** (all package types):
All dictionary ecosystems plus Java, .NET/NuGet, Alpine APK, Debian/RPM, and any other package type Syft discovers
### Ecosystem-Specific Intelligence
The heuristic generator uses per-ecosystem strategies:
- **Java**: Extracts vendor from groupId, product from artifactId
- **Python**: Parses author fields, adds `_project` suffix variants
- **Go**: Extracts org/repo from module paths (`github.com/org/repo`)
- **JavaScript**: Handles npm scope patterns (`@scope/package`)
### Curated Mappings & Filters
- **500+ curated mappings**: `curl``haxx`, `spring-boot``pivotal`, etc.
- **Filters**: Prevent false positives (Jenkins plugins vs. core, Jira client vs. server)
- **Validation**: Ensures CPE syntax correctness before returning
## Implementation Details
### Embedded Index Format
```json
{
"ecosystems": {
"npm": {
"lodash": ["cpe:2.3:a:lodash:lodash:*:*:*:*:*:node.js:*:*"]
},
"pypi": {
"Django": ["cpe:2.3:a:djangoproject:django:*:*:*:*:*:python:*:*"]
}
}
}
```
The dictionary generator maps packages to ecosystems using reference URL patterns (npmjs.com, pypi.org, rubygems.org, etc.).
## Maintenance
### Updating the CPE Dictionary
The CPE dictionary should be updated periodically to include new packages:
```bash
# Full workflow: pull cache → update from NVD → build index
make generate:cpe-index
# Or run individual steps:
make generate:cpe-index:cache:pull # Pull cached CPE data from ORAS
make generate:cpe-index:cache:update # Fetch updates from NVD Products API
make generate:cpe-index:build # Generate cpe-index.json from cache
```
**Optional**: Set `NVD_API_KEY` for faster updates (50 req/30s vs 5 req/30s)
This workflow:
1. Pulls existing cache from ORAS registry (avoids re-fetching all ~1.5M CPEs)
2. Fetches only products modified since last update from NVD Products API
3. Builds indexed dictionary (~814KB, ~22K entries)
4. Pushes updated cache for team reuse
### Extending CPE Generation
**Add dictionary support for a new ecosystem:**
1. Add URL pattern in `index-generator/generate.go`
2. Regenerate index with `make generate:cpe-index`
**Improve heuristic generation:**
1. Modify ecosystem-specific file (e.g., `java.go`, `python.go`)
2. Add curated mappings to `candidate_by_package_type.go`
**Key files:**
- `generate.go` - Main generation logic
- `dictionary/` - Dictionary generator and embedded index
- `candidate_by_package_type.go` - Ecosystem-specific candidates
- `filter.go` - Filtering rules

View File

@ -0,0 +1,63 @@
package cpegenerate
import (
"strings"
"github.com/anchore/syft/syft/pkg"
)
// candidateVendorsForPE returns vendor candidates for PE (BinaryPkg) packages based on common metadata hints.
// Specifically, normalize Ghostscript binaries to vendor "artifex" when detected.
func candidateVendorsForPE(p pkg.Package) fieldCandidateSet {
candidates := newFieldCandidateSet()
meta, ok := p.Metadata.(pkg.PEBinary)
if !ok {
return candidates
}
var company, product, fileDesc string
for _, kv := range meta.VersionResources {
switch strings.ToLower(kv.Key) {
case "companyname":
company = strings.ToLower(kv.Value)
case "productname":
product = strings.ToLower(kv.Value)
case "filedescription":
fileDesc = strings.ToLower(kv.Value)
}
}
if strings.Contains(product, "ghostscript") || strings.Contains(fileDesc, "ghostscript") || strings.Contains(company, "artifex") {
candidates.addValue("artifex")
}
return candidates
}
// candidateProductsForPE returns product candidates for PE (BinaryPkg) packages based on common metadata hints.
// Specifically, normalize Ghostscript binaries to product "ghostscript" when detected.
func candidateProductsForPE(p pkg.Package) fieldCandidateSet {
candidates := newFieldCandidateSet()
meta, ok := p.Metadata.(pkg.PEBinary)
if !ok {
return candidates
}
var product, fileDesc string
for _, kv := range meta.VersionResources {
switch strings.ToLower(kv.Key) {
case "productname":
product = strings.ToLower(kv.Value)
case "filedescription":
fileDesc = strings.ToLower(kv.Value)
}
}
if strings.Contains(product, "ghostscript") || strings.Contains(fileDesc, "ghostscript") {
candidates.addValue("ghostscript")
}
return candidates
}

View File

@ -653,6 +653,9 @@
"dbCharts": [ "dbCharts": [
"cpe:2.3:a:jenkins:dbcharts:*:*:*:*:*:jenkins:*:*" "cpe:2.3:a:jenkins:dbcharts:*:*:*:*:*:jenkins:*:*"
], ],
"deadmanssnitch": [
"cpe:2.3:a:jenkins:dead_man\\'s_snitch:*:*:*:*:*:jenkins:*:*"
],
"debian-package-builder": [ "debian-package-builder": [
"cpe:2.3:a:jenkins:debian_package_builder:*:*:*:*:*:jenkins:*:*" "cpe:2.3:a:jenkins:debian_package_builder:*:*:*:*:*:jenkins:*:*"
], ],
@ -1360,6 +1363,9 @@
"oic-auth": [ "oic-auth": [
"cpe:2.3:a:jenkins:openid_connect_authentication:*:*:*:*:*:jenkins:*:*" "cpe:2.3:a:jenkins:openid_connect_authentication:*:*:*:*:*:jenkins:*:*"
], ],
"oidc-provider": [
"cpe:2.3:a:jenkins:openid_connect_provider:*:*:*:*:*:jenkins:*:*"
],
"ontrack": [ "ontrack": [
"cpe:2.3:a:jenkins:ontrack:*:*:*:*:*:jenkins:*:*" "cpe:2.3:a:jenkins:ontrack:*:*:*:*:*:jenkins:*:*"
], ],
@ -1531,6 +1537,9 @@
"qualys-pc": [ "qualys-pc": [
"cpe:2.3:a:qualys:policy_compliance:*:*:*:*:*:jenkins:*:*" "cpe:2.3:a:qualys:policy_compliance:*:*:*:*:*:jenkins:*:*"
], ],
"qualys-was": [
"cpe:2.3:a:qualys:web_application_screening:*:*:*:*:*:jenkins:*:*"
],
"quayio-trigger": [ "quayio-trigger": [
"cpe:2.3:a:jenkins:quay.io_trigger:*:*:*:*:*:jenkins:*:*" "cpe:2.3:a:jenkins:quay.io_trigger:*:*:*:*:*:jenkins:*:*"
], ],
@ -2164,6 +2173,9 @@
"@azure/ms-rest-nodeauth": [ "@azure/ms-rest-nodeauth": [
"cpe:2.3:a:microsoft:ms-rest-nodeauth:*:*:*:*:*:node.js:*:*" "cpe:2.3:a:microsoft:ms-rest-nodeauth:*:*:*:*:*:node.js:*:*"
], ],
"@backstage/backend-common": [
"cpe:2.3:a:linuxfoundation:backstage_backend-common:*:*:*:*:*:node.js:*:*"
],
"@backstage/plugin-auth-backend": [ "@backstage/plugin-auth-backend": [
"cpe:2.3:a:linuxfoundation:auth_backend:*:*:*:*:*:node.js:*:*" "cpe:2.3:a:linuxfoundation:auth_backend:*:*:*:*:*:node.js:*:*"
], ],
@ -3035,6 +3047,9 @@
"electron-packager": [ "electron-packager": [
"cpe:2.3:a:electron-packager_project:electron-packager:*:*:*:*:*:node.js:*:*" "cpe:2.3:a:electron-packager_project:electron-packager:*:*:*:*:*:node.js:*:*"
], ],
"electron-pdf": [
"cpe:2.3:a:fraserxu:electron-pdf:*:*:*:*:*:node.js:*:*"
],
"elliptic": [ "elliptic": [
"cpe:2.3:a:indutny:elliptic:*:*:*:*:*:node.js:*:*" "cpe:2.3:a:indutny:elliptic:*:*:*:*:*:node.js:*:*"
], ],
@ -5284,6 +5299,9 @@
"ts-process-promises": [ "ts-process-promises": [
"cpe:2.3:a:ts-process-promises_project:ts-process-promises:*:*:*:*:*:node.js:*:*" "cpe:2.3:a:ts-process-promises_project:ts-process-promises:*:*:*:*:*:node.js:*:*"
], ],
"tsup": [
"cpe:2.3:a:egoist:tsup:*:*:*:*:*:node.js:*:*"
],
"ua-parser": [ "ua-parser": [
"cpe:2.3:a:ua-parser_project:ua-parser:*:*:*:*:*:node.js:*:*" "cpe:2.3:a:ua-parser_project:ua-parser:*:*:*:*:*:node.js:*:*"
], ],
@ -5552,6 +5570,9 @@
"alfnru/password_recovery": [ "alfnru/password_recovery": [
"cpe:2.3:a:password_recovery_project:password_recovery:*:*:*:*:*:roundcube:*:*" "cpe:2.3:a:password_recovery_project:password_recovery:*:*:*:*:*:roundcube:*:*"
], ],
"couleurcitron/tarteaucitron-wp": [
"cpe:2.3:a:couleurcitron:tarteaucitron-wp:*:*:*:*:*:wordpress:*:*"
],
"dev-lancer/minecraft-motd-parser": [ "dev-lancer/minecraft-motd-parser": [
"cpe:2.3:a:jgniecki:minecraft_motd_parser:*:*:*:*:*:*:*:*" "cpe:2.3:a:jgniecki:minecraft_motd_parser:*:*:*:*:*:*:*:*"
], ],
@ -7259,6 +7280,9 @@
"ab-press-optimizer-lite": [ "ab-press-optimizer-lite": [
"cpe:2.3:a:abpressoptimizer:ab_press_optimizer:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:abpressoptimizer:ab_press_optimizer:*:*:*:*:*:wordpress:*:*"
], ],
"abitgone-commentsafe": [
"cpe:2.3:a:abitgone:abitgone_commentsafe:*:*:*:*:*:wordpress:*:*"
],
"about-me": [ "about-me": [
"cpe:2.3:a:about-me_project:about-me:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:about-me_project:about-me:*:*:*:*:*:wordpress:*:*"
], ],
@ -7605,6 +7629,9 @@
"advanced-backgrounds": [ "advanced-backgrounds": [
"cpe:2.3:a:wpbackgrounds:advanced_wordpress_backgrounds:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wpbackgrounds:advanced_wordpress_backgrounds:*:*:*:*:*:wordpress:*:*"
], ],
"advanced-blocks-pro": [
"cpe:2.3:a:essamamdani:advanced_blocks_pro:*:*:*:*:*:wordpress:*:*"
],
"advanced-booking-calendar": [ "advanced-booking-calendar": [
"cpe:2.3:a:elbtide:advanced_booking_calendar:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:elbtide:advanced_booking_calendar:*:*:*:*:*:wordpress:*:*"
], ],
@ -7702,6 +7729,9 @@
"affiliatebooster-blocks": [ "affiliatebooster-blocks": [
"cpe:2.3:a:affiliatebooster:affiliate_booster:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:affiliatebooster:affiliate_booster:*:*:*:*:*:wordpress:*:*"
], ],
"affiliateimportereb": [
"cpe:2.3:a:cr1000:affiliateimportereb:*:*:*:*:*:wordpress:*:*"
],
"affiliates-manager": [ "affiliates-manager": [
"cpe:2.3:a:wpaffiliatemanager:affiliates_manager:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wpaffiliatemanager:affiliates_manager:*:*:*:*:*:wordpress:*:*"
], ],
@ -8408,6 +8438,9 @@
"cpe:2.3:a:dotstore:woocommerce_category_banner_management:*:*:*:*:*:wordpress:*:*", "cpe:2.3:a:dotstore:woocommerce_category_banner_management:*:*:*:*:*:wordpress:*:*",
"cpe:2.3:a:multidots:banner_management_for_woocommerce:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:multidots:banner_management_for_woocommerce:*:*:*:*:*:wordpress:*:*"
], ],
"bannerlid": [
"cpe:2.3:a:web_lid:bannerlid:*:*:*:*:*:wordpress:*:*"
],
"barcode-scanner-lite-pos-to-manage-products-inventory-and-orders": [ "barcode-scanner-lite-pos-to-manage-products-inventory-and-orders": [
"cpe:2.3:a:ukrsolution:barcode_scanner_and_inventory_manager:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:ukrsolution:barcode_scanner_and_inventory_manager:*:*:*:*:*:wordpress:*:*"
], ],
@ -8516,6 +8549,9 @@
"better-elementor-addons": [ "better-elementor-addons": [
"cpe:2.3:a:kitforest:better_elementor_addons:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:kitforest:better_elementor_addons:*:*:*:*:*:wordpress:*:*"
], ],
"better-follow-button-for-jetpack": [
"cpe:2.3:a:antonpug:better_flow_button_for_jetpack:*:*:*:*:*:wordpress:*:*"
],
"better-font-awesome": [ "better-font-awesome": [
"cpe:2.3:a:better_font_awesome_project:better_font_awesome:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:better_font_awesome_project:better_font_awesome:*:*:*:*:*:wordpress:*:*"
], ],
@ -8770,6 +8806,9 @@
"bp-cover": [ "bp-cover": [
"cpe:2.3:a:buddypress_cover_project:buddypress_cover:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:buddypress_cover_project:buddypress_cover:*:*:*:*:*:wordpress:*:*"
], ],
"bp-email-assign-templates": [
"cpe:2.3:a:shanebp:bp_email_assign_templates:*:*:*:*:*:wordpress:*:*"
],
"bp-profile-search": [ "bp-profile-search": [
"cpe:2.3:a:dontdream:bp_profile_search:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:dontdream:bp_profile_search:*:*:*:*:*:wordpress:*:*"
], ],
@ -9240,6 +9279,9 @@
"chained-quiz": [ "chained-quiz": [
"cpe:2.3:a:kibokolabs:chained_quiz:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:kibokolabs:chained_quiz:*:*:*:*:*:wordpress:*:*"
], ],
"chalet-montagne-com-tools": [
"cpe:2.3:a:alpium:chalet-montagne.com_tools:*:*:*:*:*:wordpress:*:*"
],
"chamber-dashboard-business-directory": [ "chamber-dashboard-business-directory": [
"cpe:2.3:a:chamber_dashboard_business_directory_project:chamber_dashboard_business_directory:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:chamber_dashboard_business_directory_project:chamber_dashboard_business_directory:*:*:*:*:*:wordpress:*:*"
], ],
@ -9252,6 +9294,9 @@
"change-memory-limit": [ "change-memory-limit": [
"cpe:2.3:a:simon99:change_memory_limit:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:simon99:change_memory_limit:*:*:*:*:*:wordpress:*:*"
], ],
"change-table-prefix": [
"cpe:2.3:a:youngtechleads:change_table_prefix:*:*:*:*:*:wordpress:*:*"
],
"change-uploaded-file-permissions": [ "change-uploaded-file-permissions": [
"cpe:2.3:a:change_uploaded_file_permissions_project:change_uploaded_file_permissions:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:change_uploaded_file_permissions_project:change_uploaded_file_permissions:*:*:*:*:*:wordpress:*:*"
], ],
@ -9550,6 +9595,9 @@
"commenttweets": [ "commenttweets": [
"cpe:2.3:a:theresehansen:commenttweets:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:theresehansen:commenttweets:*:*:*:*:*:wordpress:*:*"
], ],
"common-tools-for-site": [
"cpe:2.3:a:chetanvaghela:common_tools_for_site:*:*:*:*:*:wordpress:*:*"
],
"commonsbooking": [ "commonsbooking": [
"cpe:2.3:a:wielebenwir:commonsbooking:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wielebenwir:commonsbooking:*:*:*:*:*:wordpress:*:*"
], ],
@ -10041,6 +10089,9 @@
"csv-importer": [ "csv-importer": [
"cpe:2.3:a:deniskobozev:csv_importer:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:deniskobozev:csv_importer:*:*:*:*:*:wordpress:*:*"
], ],
"csv-mass-importer": [
"cpe:2.3:a:aleapp:csv_mass_importer:*:*:*:*:*:wordpress:*:*"
],
"ct-commerce": [ "ct-commerce": [
"cpe:2.3:a:ujwolbastakoti:ct_commerce:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:ujwolbastakoti:ct_commerce:*:*:*:*:*:wordpress:*:*"
], ],
@ -10798,6 +10849,9 @@
"easy-svg": [ "easy-svg": [
"cpe:2.3:a:benjaminzekavica:easy_svg_support:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:benjaminzekavica:easy_svg_support:*:*:*:*:*:wordpress:*:*"
], ],
"easy-svg-upload": [
"cpe:2.3:a:delowerhossain:easy_svg_upload:*:*:*:*:*:wordpress:*:*"
],
"easy-table": [ "easy-table": [
"cpe:2.3:a:easy_table_project:easy_table:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:easy_table_project:easy_table:*:*:*:*:*:wordpress:*:*"
], ],
@ -11286,6 +11340,9 @@
"exit-intent-popups-by-optimonk": [ "exit-intent-popups-by-optimonk": [
"cpe:2.3:a:optimonk:optimonk\\:popups\\,_personalization_\\\u0026_a\\/b_testing:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:optimonk:optimonk\\:popups\\,_personalization_\\\u0026_a\\/b_testing:*:*:*:*:*:wordpress:*:*"
], ],
"exit-notifier": [
"cpe:2.3:a:cvstech:exit_notifier:*:*:*:*:*:wordpress:*:*"
],
"exmage-wp-image-links": [ "exmage-wp-image-links": [
"cpe:2.3:a:villatheme:exmage:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:villatheme:exmage:*:*:*:*:*:wordpress:*:*"
], ],
@ -11325,6 +11382,9 @@
"exquisite-paypal-donation": [ "exquisite-paypal-donation": [
"cpe:2.3:a:exquisite_paypal_donation_project:exquisite_paypal_donation:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:exquisite_paypal_donation_project:exquisite_paypal_donation:*:*:*:*:*:wordpress:*:*"
], ],
"extended-search-plugin": [
"cpe:2.3:a:jakesnyder:enhanced_search_box:*:*:*:*:*:wordpress:*:*"
],
"extensions-for-cf7": [ "extensions-for-cf7": [
"cpe:2.3:a:hasthemes:extensions_for_cf7:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:hasthemes:extensions_for_cf7:*:*:*:*:*:wordpress:*:*"
], ],
@ -11571,6 +11631,7 @@
"cpe:2.3:a:five_minute_webshop_project:five_minute_webshop:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:five_minute_webshop_project:five_minute_webshop:*:*:*:*:*:wordpress:*:*"
], ],
"fl3r-feelbox": [ "fl3r-feelbox": [
"cpe:2.3:a:armandofiore:fl3r_feelbox:*:*:*:*:*:wordpress:*:*",
"cpe:2.3:a:fl3r-feelbox_project:fl3r-feelbox:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:fl3r-feelbox_project:fl3r-feelbox:*:*:*:*:*:wordpress:*:*"
], ],
"flash-album-gallery": [ "flash-album-gallery": [
@ -12235,6 +12296,9 @@
"google-sitemap-plugin": [ "google-sitemap-plugin": [
"cpe:2.3:a:bestwebsoft:google_sitemap:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:bestwebsoft:google_sitemap:*:*:*:*:*:wordpress:*:*"
], ],
"google-website-translator": [
"cpe:2.3:a:prisna:google_website_translator:*:*:*:*:*:wordpress:*:*"
],
"googleanalytics": [ "googleanalytics": [
"cpe:2.3:a:sharethis:dashboard_for_google_analytics:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:sharethis:dashboard_for_google_analytics:*:*:*:*:*:wordpress:*:*"
], ],
@ -12634,6 +12698,9 @@
"hunk-companion": [ "hunk-companion": [
"cpe:2.3:a:themehunk:hunk_companion:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:themehunk:hunk_companion:*:*:*:*:*:wordpress:*:*"
], ],
"hurrytimer": [
"cpe:2.3:a:nabillemsieh:hurrytimer:*:*:*:*:*:wordpress:*:*"
],
"hyphenator": [ "hyphenator": [
"cpe:2.3:a:benedictb\\/maciejgryniuk:hyphenator:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:benedictb\\/maciejgryniuk:hyphenator:*:*:*:*:*:wordpress:*:*"
], ],
@ -12907,6 +12974,9 @@
"cpe:2.3:a:cm-wp:woody_code_snippets:*:*:*:*:*:wordpress:*:*", "cpe:2.3:a:cm-wp:woody_code_snippets:*:*:*:*:*:wordpress:*:*",
"cpe:2.3:a:webcraftic:woody_ad_snippets:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:webcraftic:woody_ad_snippets:*:*:*:*:*:wordpress:*:*"
], ],
"insert-php-code-snippet": [
"cpe:2.3:a:f1logic:insert_php_code_snippet:*:*:*:*:*:wordpress:*:*"
],
"insight-core": [ "insight-core": [
"cpe:2.3:a:thememove:insight_core:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:thememove:insight_core:*:*:*:*:*:wordpress:*:*"
], ],
@ -13011,6 +13081,9 @@
"ip-blacklist-cloud": [ "ip-blacklist-cloud": [
"cpe:2.3:a:ip_blacklist_cloud_project:ip_blacklist_cloud:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:ip_blacklist_cloud_project:ip_blacklist_cloud:*:*:*:*:*:wordpress:*:*"
], ],
"ip-vault-wp-firewall": [
"cpe:2.3:a:youtag:two-factor_authentication:*:*:*:*:*:wordpress:*:*"
],
"ip2location-country-blocker": [ "ip2location-country-blocker": [
"cpe:2.3:a:ip2location:country_blocker:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:ip2location:country_blocker:*:*:*:*:*:wordpress:*:*"
], ],
@ -13557,6 +13630,9 @@
"list-category-posts": [ "list-category-posts": [
"cpe:2.3:a:fernandobriano:list_category_posts:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:fernandobriano:list_category_posts:*:*:*:*:*:wordpress:*:*"
], ],
"list-children": [
"cpe:2.3:a:sizeable:list_children:*:*:*:*:*:wordpress:*:*"
],
"list-last-changes": [ "list-last-changes": [
"cpe:2.3:a:rolandbaer:list_last_changes:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:rolandbaer:list_last_changes:*:*:*:*:*:wordpress:*:*"
], ],
@ -13854,6 +13930,9 @@
"manual-image-crop": [ "manual-image-crop": [
"cpe:2.3:a:manual_image_crop_project:manual_image_crop:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:manual_image_crop_project:manual_image_crop:*:*:*:*:*:wordpress:*:*"
], ],
"mapfig-studio": [
"cpe:2.3:a:acugis:mapfig_studio:*:*:*:*:*:wordpress:*:*"
],
"mapping-multiple-urls-redirect-same-page": [ "mapping-multiple-urls-redirect-same-page": [
"cpe:2.3:a:mapping_multiple_urls_redirect_same_page_project:mapping_multiple_urls_redirect_same_page:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:mapping_multiple_urls_redirect_same_page_project:mapping_multiple_urls_redirect_same_page:*:*:*:*:*:wordpress:*:*"
], ],
@ -14237,6 +14316,9 @@
"monetize": [ "monetize": [
"cpe:2.3:a:monetize_project:monetize:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:monetize_project:monetize:*:*:*:*:*:wordpress:*:*"
], ],
"monitor-chat": [
"cpe:2.3:a:edwardstoever:monitor.chat:*:*:*:*:*:wordpress:*:*"
],
"month-name-translation-benaceur": [ "month-name-translation-benaceur": [
"cpe:2.3:a:benaceur-php:month_name_translation_benaceur:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:benaceur-php:month_name_translation_benaceur:*:*:*:*:*:wordpress:*:*"
], ],
@ -14306,6 +14388,9 @@
"mq-woocommerce-products-price-bulk-edit": [ "mq-woocommerce-products-price-bulk-edit": [
"cpe:2.3:a:mq-woocommerce-products-price-bulk-edit_project:mq-woocommerce-products-price-bulk-edit:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:mq-woocommerce-products-price-bulk-edit_project:mq-woocommerce-products-price-bulk-edit:*:*:*:*:*:wordpress:*:*"
], ],
"ms-registration": [
"cpe:2.3:a:alphaefficiencyteam:custom_login_and_registration:*:*:*:*:*:wordpress:*:*"
],
"ms-reviews": [ "ms-reviews": [
"cpe:2.3:a:ms-reviews_project:ms-reviews:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:ms-reviews_project:ms-reviews:*:*:*:*:*:wordpress:*:*"
], ],
@ -14438,7 +14523,7 @@
"cpe:2.3:a:stormhillmedia:mybook_table_bookstore:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:stormhillmedia:mybook_table_bookstore:*:*:*:*:*:wordpress:*:*"
], ],
"mycred": [ "mycred": [
"cpe:2.3:a:mycred:mycred:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wpexperts:mycred:*:*:*:*:*:wordpress:*:*"
], ],
"mycryptocheckout": [ "mycryptocheckout": [
"cpe:2.3:a:plainviewplugins:mycryptocheckout:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:plainviewplugins:mycryptocheckout:*:*:*:*:*:wordpress:*:*"
@ -14625,12 +14710,18 @@
"ninjafirewall": [ "ninjafirewall": [
"cpe:2.3:a:nintechnet:ninjafirewall:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:nintechnet:ninjafirewall:*:*:*:*:*:wordpress:*:*"
], ],
"ninjateam-telegram": [
"cpe:2.3:a:ninjateam:chat_for_telegram:*:*:*:*:*:wordpress:*:*"
],
"nirweb-support": [ "nirweb-support": [
"cpe:2.3:a:nirweb:nirweb_support:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:nirweb:nirweb_support:*:*:*:*:*:wordpress:*:*"
], ],
"nitropack": [ "nitropack": [
"cpe:2.3:a:nitropack:nitropack:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:nitropack:nitropack:*:*:*:*:*:wordpress:*:*"
], ],
"nix-anti-spam-light": [
"cpe:2.3:a:nixsolutions:nix_anti-spam_light:*:*:*:*:*:wordpress:*:*"
],
"nktagcloud": [ "nktagcloud": [
"cpe:2.3:a:better_tag_cloud_project:better_tag_cloud:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:better_tag_cloud_project:better_tag_cloud:*:*:*:*:*:wordpress:*:*"
], ],
@ -15186,6 +15277,9 @@
"cpe:2.3:a:greentreelabs:gallery_photoblocks:*:*:*:*:*:wordpress:*:*", "cpe:2.3:a:greentreelabs:gallery_photoblocks:*:*:*:*:*:wordpress:*:*",
"cpe:2.3:a:wpchill:gallery_photoblocks:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wpchill:gallery_photoblocks:*:*:*:*:*:wordpress:*:*"
], ],
"photokit": [
"cpe:2.3:a:jackzhu:photokit:*:*:*:*:*:wordpress:*:*"
],
"photoshow": [ "photoshow": [
"cpe:2.3:a:codepeople:smart_image_gallery:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:codepeople:smart_image_gallery:*:*:*:*:*:wordpress:*:*"
], ],
@ -15511,6 +15605,9 @@
"postman-smtp": [ "postman-smtp": [
"cpe:2.3:a:postman-smtp_project:postman-smtp:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:postman-smtp_project:postman-smtp:*:*:*:*:*:wordpress:*:*"
], ],
"postmash": [
"cpe:2.3:a:jmash:postmash:*:*:*:*:*:wordpress:*:*"
],
"postmatic": [ "postmatic": [
"cpe:2.3:a:gopostmatic:replyable:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:gopostmatic:replyable:*:*:*:*:*:wordpress:*:*"
], ],
@ -15761,6 +15858,9 @@
"pure-chat": [ "pure-chat": [
"cpe:2.3:a:purechat:pure_chat:*:*:*:*:*:*:*:*" "cpe:2.3:a:purechat:pure_chat:*:*:*:*:*:*:*:*"
], ],
"pure-css-circle-progress-bar": [
"cpe:2.3:a:shafayat:pure_css_circle_progress_bar:*:*:*:*:*:wordpress:*:*"
],
"purple-xmls-google-product-feed-for-woocommerce": [ "purple-xmls-google-product-feed-for-woocommerce": [
"cpe:2.3:a:dpl:product_feed_on_woocommerce_for_google\\,_awin\\,_shareasale\\,_bing\\,_and_more:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:dpl:product_feed_on_woocommerce_for_google\\,_awin\\,_shareasale\\,_bing\\,_and_more:*:*:*:*:*:wordpress:*:*"
], ],
@ -15964,6 +16064,9 @@
"react-webcam": [ "react-webcam": [
"cpe:2.3:a:react_webcam_project:react_webcam:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:react_webcam_project:react_webcam:*:*:*:*:*:wordpress:*:*"
], ],
"reaction-buttons": [
"cpe:2.3:a:jakob42:reaction_buttons:*:*:*:*:*:wordpress:*:*"
],
"read-and-understood": [ "read-and-understood": [
"cpe:2.3:a:read_and_understood_project:read_and_understood:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:read_and_understood_project:read_and_understood:*:*:*:*:*:wordpress:*:*"
], ],
@ -16124,6 +16227,9 @@
"reservation-studio-widget": [ "reservation-studio-widget": [
"cpe:2.3:a:pvmg:reservation.studio:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:pvmg:reservation.studio:*:*:*:*:*:wordpress:*:*"
], ],
"reset": [
"cpe:2.3:a:smartzminds:reset:*:*:*:*:*:wordpress:*:*"
],
"resize-at-upload-plus": [ "resize-at-upload-plus": [
"cpe:2.3:a:resize_at_upload_plus_project:resize_at_upload_plus:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:resize_at_upload_plus_project:resize_at_upload_plus:*:*:*:*:*:wordpress:*:*"
], ],
@ -16527,6 +16633,9 @@
"sellkit": [ "sellkit": [
"cpe:2.3:a:artbees:sellkit:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:artbees:sellkit:*:*:*:*:*:wordpress:*:*"
], ],
"send-email-only-on-reply-to-my-comment": [
"cpe:2.3:a:yasirwazir:send_email_only_on_reply_to_my_comment:*:*:*:*:*:wordpress:*:*"
],
"send-emails-with-mandrill": [ "send-emails-with-mandrill": [
"cpe:2.3:a:millermedia:mandrill:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:millermedia:mandrill:*:*:*:*:*:wordpress:*:*"
], ],
@ -17092,6 +17201,9 @@
"site-editor": [ "site-editor": [
"cpe:2.3:a:siteeditor:site_editor:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:siteeditor:site_editor:*:*:*:*:*:wordpress:*:*"
], ],
"site-mailer": [
"cpe:2.3:a:elementor:site_mailer:*:*:*:*:*:wordpress:*:*"
],
"site-offline": [ "site-offline": [
"cpe:2.3:a:freehtmldesigns:site_offline:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:freehtmldesigns:site_offline:*:*:*:*:*:wordpress:*:*"
], ],
@ -17780,6 +17892,9 @@
"svg-support": [ "svg-support": [
"cpe:2.3:a:benbodhi:svg_support:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:benbodhi:svg_support:*:*:*:*:*:wordpress:*:*"
], ],
"svg-uploads-support": [
"cpe:2.3:a:ablyperu:svg_uploads_support:*:*:*:*:*:wordpress:*:*"
],
"svg-vector-icon-plugin": [ "svg-vector-icon-plugin": [
"cpe:2.3:a:wp_svg_icons_project:wp_svg_icons:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wp_svg_icons_project:wp_svg_icons:*:*:*:*:*:wordpress:*:*"
], ],
@ -17859,6 +17974,7 @@
"cpe:2.3:a:tainacan:tainacan:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:tainacan:tainacan:*:*:*:*:*:wordpress:*:*"
], ],
"tarteaucitronjs": [ "tarteaucitronjs": [
"cpe:2.3:a:amauri:tarteaucitron.io:*:*:*:*:*:wordpress:*:*",
"cpe:2.3:a:tarteaucitron.js_-_cookies_legislation_\\\u0026_gdpr_project:tarteaucitron.js_-_cookies_legislation_\\\u0026_gdpr:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:tarteaucitron.js_-_cookies_legislation_\\\u0026_gdpr_project:tarteaucitron.js_-_cookies_legislation_\\\u0026_gdpr:*:*:*:*:*:wordpress:*:*"
], ],
"taskbuilder": [ "taskbuilder": [
@ -18106,6 +18222,9 @@
"timeline-widget-addon-for-elementor": [ "timeline-widget-addon-for-elementor": [
"cpe:2.3:a:coolplugins:timeline_widget_for_elementor:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:coolplugins:timeline_widget_for_elementor:*:*:*:*:*:wordpress:*:*"
], ],
"timer-countdown": [
"cpe:2.3:a:yaidier:countdown_timer:*:*:*:*:*:wordpress:*:*"
],
"timesheet": [ "timesheet": [
"cpe:2.3:a:bestwebsoft:timesheet:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:bestwebsoft:timesheet:*:*:*:*:*:wordpress:*:*"
], ],
@ -18249,9 +18368,15 @@
"tripetto": [ "tripetto": [
"cpe:2.3:a:tripetto:tripetto:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:tripetto:tripetto:*:*:*:*:*:wordpress:*:*"
], ],
"tripplan": [
"cpe:2.3:a:checklist:trip_plan:*:*:*:*:*:wordpress:*:*"
],
"truebooker-appointment-booking": [ "truebooker-appointment-booking": [
"cpe:2.3:a:themetechmount:truebooker:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:themetechmount:truebooker:*:*:*:*:*:wordpress:*:*"
], ],
"trx_addons": [
"cpe:2.3:a:themerex:addons:*:*:*:*:*:wordpress:*:*"
],
"ts-webfonts-for-conoha": [ "ts-webfonts-for-conoha": [
"cpe:2.3:a:gmo:typesquare_webfonts_for_conoha:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:gmo:typesquare_webfonts_for_conoha:*:*:*:*:*:wordpress:*:*"
], ],
@ -18457,9 +18582,15 @@
"ultimate-weather-plugin": [ "ultimate-weather-plugin": [
"cpe:2.3:a:ultimate-weather_project:ultimate-weather:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:ultimate-weather_project:ultimate-weather:*:*:*:*:*:wordpress:*:*"
], ],
"ultimate-woocommerce-auction-pro": [
"cpe:2.3:a:auctionplugin:ultimate_wordpress_auction_plugin:*:*:*:*:pro:wordpress:*:*"
],
"ultimate-wp-query-search-filter": [ "ultimate-wp-query-search-filter": [
"cpe:2.3:a:ultimate_wp_query_search_filter_project:ultimate_wp_query_search_filter:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:ultimate_wp_query_search_filter_project:ultimate_wp_query_search_filter:*:*:*:*:*:wordpress:*:*"
], ],
"ultimate-youtube-video-player": [
"cpe:2.3:a:codelizar:ultimate_youtube_video_\\\u0026_shorts_player_with_vimeo:*:*:*:*:*:wordpress:*:*"
],
"ultra-companion": [ "ultra-companion": [
"cpe:2.3:a:wpoperation:ultra_companion:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wpoperation:ultra_companion:*:*:*:*:*:wordpress:*:*"
], ],
@ -19198,6 +19329,9 @@
"woo-esto": [ "woo-esto": [
"cpe:2.3:a:rebing:woocommerce_esto:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:rebing:woocommerce_esto:*:*:*:*:*:wordpress:*:*"
], ],
"woo-exfood": [
"cpe:2.3:a:exthemes:woocommerce_food:*:*:*:*:*:wordpress:*:*"
],
"woo-floating-cart-lite": [ "woo-floating-cart-lite": [
"cpe:2.3:a:xplodedthemes:xt_floating_cart_for_woocommerce:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:xplodedthemes:xt_floating_cart_for_woocommerce:*:*:*:*:*:wordpress:*:*"
], ],
@ -19267,6 +19401,9 @@
"woo-shipping-dpd-baltic": [ "woo-shipping-dpd-baltic": [
"cpe:2.3:a:dpdgroup:woocommerce_shipping:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:dpdgroup:woocommerce_shipping:*:*:*:*:*:wordpress:*:*"
], ],
"woo-slider-pro-drag-drop-slider-builder-for-woocommerce": [
"cpe:2.3:a:binarycarpenter:woo_slider_pro:*:*:*:*:*:wordpress:*:*"
],
"woo-smart-compare": [ "woo-smart-compare": [
"cpe:2.3:a:wpclever:wpc_smart_compare_for_woocommerce:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wpclever:wpc_smart_compare_for_woocommerce:*:*:*:*:*:wordpress:*:*"
], ],
@ -19820,6 +19957,9 @@
"cpe:2.3:a:kigurumi:csv_exporter:*:*:*:*:*:wordpress:*:*", "cpe:2.3:a:kigurumi:csv_exporter:*:*:*:*:*:wordpress:*:*",
"cpe:2.3:a:wp_csv_exporter_project:wp_csv_exporter:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wp_csv_exporter_project:wp_csv_exporter:*:*:*:*:*:wordpress:*:*"
], ],
"wp-curriculo-vitae": [
"cpe:2.3:a:williamluis:wp-curriculo_vitae_free:*:*:*:*:*:wordpress:*:*"
],
"wp-custom-admin-interface": [ "wp-custom-admin-interface": [
"cpe:2.3:a:wp_custom_admin_interface_project:wp_custom_admin_interface:*:*:*:*:*:*:*:*" "cpe:2.3:a:wp_custom_admin_interface_project:wp_custom_admin_interface:*:*:*:*:*:*:*:*"
], ],
@ -19891,7 +20031,8 @@
"cpe:2.3:a:display_users_project:display_users:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:display_users_project:display_users:*:*:*:*:*:wordpress:*:*"
], ],
"wp-docs": [ "wp-docs": [
"cpe:2.3:a:androidbubble:wp_docs:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:androidbubble:wp_docs:*:*:*:*:*:wordpress:*:*",
"cpe:2.3:a:fahadmahmood:wp_docs:*:*:*:*:*:wordpress:*:*"
], ],
"wp-domain-redirect": [ "wp-domain-redirect": [
"cpe:2.3:a:wp_domain_redirect_project:wp_domain_redirect:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wp_domain_redirect_project:wp_domain_redirect:*:*:*:*:*:wordpress:*:*"
@ -20795,6 +20936,9 @@
"wp-table-builder": [ "wp-table-builder": [
"cpe:2.3:a:dotcamp:wp_table_builder:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:dotcamp:wp_table_builder:*:*:*:*:*:wordpress:*:*"
], ],
"wp-table-manager": [
"cpe:2.3:a:joomunited:wp_table_manager:*:*:*:*:*:wordpress:*:*"
],
"wp-table-reloaded": [ "wp-table-reloaded": [
"cpe:2.3:a:wp-table_reloaded_project:wp-table_reloaded:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wp-table_reloaded_project:wp-table_reloaded:*:*:*:*:*:wordpress:*:*"
], ],
@ -21139,6 +21283,9 @@
"wppizza": [ "wppizza": [
"cpe:2.3:a:wp-pizza:wppizza:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wp-pizza:wppizza:*:*:*:*:*:wordpress:*:*"
], ],
"wpquiz": [
"cpe:2.3:a:bauc:wpquiz:*:*:*:*:*:wordpress:*:*"
],
"wprequal": [ "wprequal": [
"cpe:2.3:a:kevinbrent:wprequal:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:kevinbrent:wprequal:*:*:*:*:*:wordpress:*:*"
], ],
@ -21169,6 +21316,9 @@
"wpsolr-search-engine": [ "wpsolr-search-engine": [
"cpe:2.3:a:wpsolr:wpsolr-search-engine:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wpsolr:wpsolr-search-engine:*:*:*:*:*:wordpress:*:*"
], ],
"wpstickybar-sticky-bar-sticky-header": [
"cpe:2.3:a:a17lab:wpstickybar:*:*:*:*:*:wordpress:*:*"
],
"wpstream": [ "wpstream": [
"cpe:2.3:a:wpstream:wpstream:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:wpstream:wpstream:*:*:*:*:*:wordpress:*:*"
], ],
@ -21276,6 +21426,9 @@
"xtremelocator": [ "xtremelocator": [
"cpe:2.3:a:xtremelocator:xtremelocator:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:xtremelocator:xtremelocator:*:*:*:*:*:wordpress:*:*"
], ],
"xv-random-quotes": [
"cpe:2.3:a:xavivars:xv_random_quotes:*:*:*:*:*:wordpress:*:*"
],
"yabp": [ "yabp": [
"cpe:2.3:a:tromit:yabp:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:tromit:yabp:*:*:*:*:*:wordpress:*:*"
], ],
@ -21362,6 +21515,9 @@
"yotpo-social-reviews-for-woocommerce": [ "yotpo-social-reviews-for-woocommerce": [
"cpe:2.3:a:yotpo:yotpo:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:yotpo:yotpo:*:*:*:*:*:wordpress:*:*"
], ],
"yotuwp-easy-youtube-embed": [
"cpe:2.3:a:yotuwp:video_gallery:*:*:*:*:*:wordpress:*:*"
],
"yourchannel": [ "yourchannel": [
"cpe:2.3:a:plugin:yourchannel:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:plugin:yourchannel:*:*:*:*:*:wordpress:*:*"
], ],
@ -21782,6 +21938,9 @@
"pressmart": [ "pressmart": [
"cpe:2.3:a:presslayouts:pressmart:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:presslayouts:pressmart:*:*:*:*:*:wordpress:*:*"
], ],
"puzzles": [
"cpe:2.3:a:themerex:puzzles:*:*:*:*:*:wordpress:*:*"
],
"regina-lite": [ "regina-lite": [
"cpe:2.3:a:machothemes:regina_lite:*:*:*:*:*:wordpress:*:*" "cpe:2.3:a:machothemes:regina_lite:*:*:*:*:*:wordpress:*:*"
], ],

View File

@ -1,3 +0,0 @@
package dictionary
//go:generate go run ./index-generator/ -o data/cpe-index.json

View File

@ -0,0 +1,6 @@
# ORAS cache directory - raw CPE data from NVD API
.cpe-cache/
# Build artifacts
index-generator
.tmp-*

View File

@ -0,0 +1,370 @@
package main
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"time"
)
const cacheDir = ".cpe-cache"
// IncrementMetadata tracks a single fetch increment for a monthly batch
type IncrementMetadata struct {
FetchedAt time.Time `json:"fetchedAt"`
LastModStartDate time.Time `json:"lastModStartDate"`
LastModEndDate time.Time `json:"lastModEndDate"`
Products int `json:"products"`
StartIndex int `json:"startIndex"` // API pagination start index
EndIndex int `json:"endIndex"` // API pagination end index (last fetched)
}
// MonthlyBatchMetadata tracks all increments for a specific month
type MonthlyBatchMetadata struct {
Complete bool `json:"complete"`
TotalProducts int `json:"totalProducts"`
Increments []IncrementMetadata `json:"increments"`
}
// CacheMetadata tracks the state of the CPE cache using monthly time-based organization
type CacheMetadata struct {
LastFullRefresh time.Time `json:"lastFullRefresh"`
LastStartIndex int `json:"lastStartIndex"` // last successfully processed startIndex for resume
TotalProducts int `json:"totalProducts"`
MonthlyBatches map[string]*MonthlyBatchMetadata `json:"monthlyBatches"` // key is "YYYY-MM"
}
// CacheManager handles local caching of CPE data
type CacheManager struct {
cacheDir string
}
// NewCacheManager creates a new cache manager
func NewCacheManager() *CacheManager {
return &CacheManager{
cacheDir: cacheDir,
}
}
// EnsureCacheDir ensures the cache directory exists
func (m *CacheManager) EnsureCacheDir() error {
if err := os.MkdirAll(m.cacheDir, 0755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
return nil
}
// LoadMetadata loads the cache metadata
func (m *CacheManager) LoadMetadata() (*CacheMetadata, error) {
metadataPath := filepath.Join(m.cacheDir, "metadata.json")
// check if metadata file exists
if _, err := os.Stat(metadataPath); os.IsNotExist(err) {
// return empty metadata for first run
return &CacheMetadata{
LastFullRefresh: time.Time{},
TotalProducts: 0,
MonthlyBatches: make(map[string]*MonthlyBatchMetadata),
}, nil
}
data, err := os.ReadFile(metadataPath)
if err != nil {
return nil, fmt.Errorf("failed to read metadata: %w", err)
}
var metadata CacheMetadata
if err := json.Unmarshal(data, &metadata); err != nil {
return nil, fmt.Errorf("failed to unmarshal metadata: %w", err)
}
// ensure MonthlyBatches map is initialized
if metadata.MonthlyBatches == nil {
metadata.MonthlyBatches = make(map[string]*MonthlyBatchMetadata)
}
return &metadata, nil
}
// SaveMetadata saves the cache metadata
func (m *CacheManager) SaveMetadata(metadata *CacheMetadata) error {
if err := m.EnsureCacheDir(); err != nil {
return err
}
metadataPath := filepath.Join(m.cacheDir, "metadata.json")
data, err := json.MarshalIndent(metadata, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal metadata: %w", err)
}
if err := os.WriteFile(metadataPath, data, 0600); err != nil {
return fmt.Errorf("failed to write metadata: %w", err)
}
return nil
}
// SaveProductsToMonthlyFile saves products to a monthly file (initial.json or YYYY-MM.json)
// uses atomic write pattern with temp file + rename for safety
func (m *CacheManager) SaveProductsToMonthlyFile(filename string, products []NVDProduct) error {
if err := m.EnsureCacheDir(); err != nil {
return err
}
filePath := filepath.Join(m.cacheDir, filename)
tempPath := filePath + ".tmp"
// marshal products to JSON
data, err := json.MarshalIndent(products, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal products: %w", err)
}
// write to temp file first
if err := os.WriteFile(tempPath, data, 0600); err != nil {
return fmt.Errorf("failed to write temp file: %w", err)
}
// atomic rename
if err := os.Rename(tempPath, filePath); err != nil {
// cleanup temp file on error
_ = os.Remove(tempPath)
return fmt.Errorf("failed to rename temp file: %w", err)
}
return nil
}
// LoadMonthlyFile loads products from a monthly file
func (m *CacheManager) LoadMonthlyFile(filename string) ([]NVDProduct, error) {
filePath := filepath.Join(m.cacheDir, filename)
data, err := os.ReadFile(filePath)
if err != nil {
if os.IsNotExist(err) {
return []NVDProduct{}, nil
}
return nil, fmt.Errorf("failed to read %s: %w", filename, err)
}
var products []NVDProduct
if err := json.Unmarshal(data, &products); err != nil {
return nil, fmt.Errorf("failed to unmarshal %s: %w", filename, err)
}
return products, nil
}
// GetMonthKey returns the "YYYY-MM" key for a given time
func GetMonthKey(t time.Time) string {
return t.Format("2006-01")
}
// SaveProducts saves products grouped by modification month
// this is called after fetching from the API to organize products into monthly files
func (m *CacheManager) SaveProducts(products []NVDProduct, isFullRefresh bool, metadata *CacheMetadata, increment IncrementMetadata) error {
if len(products) == 0 {
return nil
}
if isFullRefresh {
return m.saveFullRefresh(products, metadata)
}
return m.saveIncrementalUpdate(products, metadata, increment)
}
// saveFullRefresh saves all products to initial.json
func (m *CacheManager) saveFullRefresh(products []NVDProduct, metadata *CacheMetadata) error {
if err := m.SaveProductsToMonthlyFile("initial.json", products); err != nil {
return fmt.Errorf("failed to save initial.json: %w", err)
}
metadata.LastFullRefresh = time.Now()
metadata.TotalProducts = len(products)
metadata.LastStartIndex = 0 // reset on full refresh
metadata.MonthlyBatches = make(map[string]*MonthlyBatchMetadata)
return nil
}
// saveIncrementalUpdate saves products grouped by modification month to monthly files
func (m *CacheManager) saveIncrementalUpdate(products []NVDProduct, metadata *CacheMetadata, increment IncrementMetadata) error {
productsByMonth, err := groupProductsByMonth(products)
if err != nil {
return err
}
for monthKey, monthProducts := range productsByMonth {
if err := m.saveMonthlyBatch(monthKey, monthProducts, metadata, increment); err != nil {
return err
}
}
// update last processed index for resume capability
metadata.LastStartIndex = increment.EndIndex
return nil
}
// groupProductsByMonth groups products by their lastModified month
func groupProductsByMonth(products []NVDProduct) (map[string][]NVDProduct, error) {
productsByMonth := make(map[string][]NVDProduct)
for _, product := range products {
lastMod, err := time.Parse(time.RFC3339, product.CPE.LastModified)
if err != nil {
return nil, fmt.Errorf("failed to parse lastModified for %s: %w", product.CPE.CPENameID, err)
}
monthKey := GetMonthKey(lastMod)
productsByMonth[monthKey] = append(productsByMonth[monthKey], product)
}
return productsByMonth, nil
}
// saveMonthlyBatch saves products for a specific month, merging with existing data
func (m *CacheManager) saveMonthlyBatch(monthKey string, monthProducts []NVDProduct, metadata *CacheMetadata, increment IncrementMetadata) error {
filename := monthKey + ".json"
// load existing products for this month
existing, err := m.LoadMonthlyFile(filename)
if err != nil {
return fmt.Errorf("failed to load existing %s: %w", filename, err)
}
// merge products (newer wins)
merged := mergeProducts(existing, monthProducts)
// atomically save merged products
if err := m.SaveProductsToMonthlyFile(filename, merged); err != nil {
return fmt.Errorf("failed to save %s: %w", filename, err)
}
// update metadata
updateMonthlyBatchMetadata(metadata, monthKey, monthProducts, merged, increment)
return nil
}
// mergeProducts deduplicates products by CPENameID, with newer products overwriting older ones
func mergeProducts(existing, updated []NVDProduct) []NVDProduct {
productMap := make(map[string]NVDProduct)
for _, p := range existing {
productMap[p.CPE.CPENameID] = p
}
for _, p := range updated {
productMap[p.CPE.CPENameID] = p
}
merged := make([]NVDProduct, 0, len(productMap))
for _, p := range productMap {
merged = append(merged, p)
}
return merged
}
// updateMonthlyBatchMetadata updates the metadata for a monthly batch
func updateMonthlyBatchMetadata(metadata *CacheMetadata, monthKey string, newProducts, allProducts []NVDProduct, increment IncrementMetadata) {
if metadata.MonthlyBatches[monthKey] == nil {
metadata.MonthlyBatches[monthKey] = &MonthlyBatchMetadata{
Complete: false,
Increments: []IncrementMetadata{},
}
}
batchMeta := metadata.MonthlyBatches[monthKey]
batchMeta.Increments = append(batchMeta.Increments, IncrementMetadata{
FetchedAt: increment.FetchedAt,
LastModStartDate: increment.LastModStartDate,
LastModEndDate: increment.LastModEndDate,
Products: len(newProducts),
StartIndex: increment.StartIndex,
EndIndex: increment.EndIndex,
})
batchMeta.TotalProducts = len(allProducts)
}
// LoadAllProducts loads and merges all cached products from monthly files
// returns a deduplicated slice of products (newer products override older ones by CPENameID)
func (m *CacheManager) LoadAllProducts() ([]NVDProduct, error) {
// check if cache directory exists
if _, err := os.Stat(m.cacheDir); os.IsNotExist(err) {
return []NVDProduct{}, nil
}
productMap := make(map[string]NVDProduct)
// load initial.json first (if it exists)
initial, err := m.LoadMonthlyFile("initial.json")
if err != nil {
return nil, fmt.Errorf("failed to load initial.json: %w", err)
}
for _, p := range initial {
productMap[p.CPE.CPENameID] = p
}
// load all monthly files (YYYY-MM.json)
entries, err := os.ReadDir(m.cacheDir)
if err != nil {
return nil, fmt.Errorf("failed to read cache directory: %w", err)
}
for _, entry := range entries {
if entry.IsDir() || filepath.Ext(entry.Name()) != ".json" {
continue
}
// skip metadata.json and initial.json
if entry.Name() == "metadata.json" || entry.Name() == "initial.json" {
continue
}
// load monthly file
products, err := m.LoadMonthlyFile(entry.Name())
if err != nil {
return nil, fmt.Errorf("failed to load %s: %w", entry.Name(), err)
}
// merge products (newer wins based on lastModified)
for _, p := range products {
existing, exists := productMap[p.CPE.CPENameID]
if !exists {
productMap[p.CPE.CPENameID] = p
continue
}
// compare lastModified timestamps to keep the newer one
newMod, _ := time.Parse(time.RFC3339, p.CPE.LastModified)
existingMod, _ := time.Parse(time.RFC3339, existing.CPE.LastModified)
if newMod.After(existingMod) {
productMap[p.CPE.CPENameID] = p
}
}
}
// convert map to slice
allProducts := make([]NVDProduct, 0, len(productMap))
for _, p := range productMap {
allProducts = append(allProducts, p)
}
return allProducts, nil
}
// CleanCache removes the local cache directory
func (m *CacheManager) CleanCache() error {
if err := os.RemoveAll(m.cacheDir); err != nil {
return fmt.Errorf("failed to clean cache: %w", err)
}
fmt.Println("Cache cleaned successfully")
return nil
}

View File

@ -0,0 +1,319 @@
package main
import (
"os"
"path/filepath"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestCacheManager_MonthlyFileOperations(t *testing.T) {
tmpDir := t.TempDir()
cacheManager := &CacheManager{cacheDir: tmpDir}
testProducts := []NVDProduct{
{
CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product1:1.0:*:*:*:*:*:*:*",
CPENameID: "product1-id",
LastModified: "2024-11-15T10:00:00.000Z",
Titles: []NVDTitle{{Title: "Test Product 1", Lang: "en"}},
},
},
{
CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product2:2.0:*:*:*:*:*:*:*",
CPENameID: "product2-id",
LastModified: "2024-11-20T10:00:00.000Z",
Titles: []NVDTitle{{Title: "Test Product 2", Lang: "en"}},
},
},
}
t.Run("save and load monthly file", func(t *testing.T) {
err := cacheManager.SaveProductsToMonthlyFile("2024-11.json", testProducts)
require.NoError(t, err)
expectedPath := filepath.Join(tmpDir, "2024-11.json")
require.FileExists(t, expectedPath)
loaded, err := cacheManager.LoadMonthlyFile("2024-11.json")
require.NoError(t, err)
require.Len(t, loaded, 2)
assert.Equal(t, testProducts[0].CPE.CPEName, loaded[0].CPE.CPEName)
assert.Equal(t, testProducts[1].CPE.CPEName, loaded[1].CPE.CPEName)
})
t.Run("atomic save with temp file", func(t *testing.T) {
err := cacheManager.SaveProductsToMonthlyFile("2024-12.json", testProducts)
require.NoError(t, err)
// temp file should not exist after successful save
tempPath := filepath.Join(tmpDir, "2024-12.json.tmp")
require.NoFileExists(t, tempPath)
// actual file should exist
finalPath := filepath.Join(tmpDir, "2024-12.json")
require.FileExists(t, finalPath)
})
t.Run("load non-existent file returns empty", func(t *testing.T) {
loaded, err := cacheManager.LoadMonthlyFile("2025-01.json")
require.NoError(t, err)
assert.Empty(t, loaded)
})
}
func TestCacheManager_Metadata(t *testing.T) {
tmpDir := t.TempDir()
cacheManager := &CacheManager{cacheDir: tmpDir}
t.Run("load metadata on first run", func(t *testing.T) {
metadata, err := cacheManager.LoadMetadata()
require.NoError(t, err)
require.NotNil(t, metadata)
assert.NotNil(t, metadata.MonthlyBatches)
assert.True(t, metadata.LastFullRefresh.IsZero())
assert.Equal(t, 0, metadata.LastStartIndex)
assert.Equal(t, 0, metadata.TotalProducts)
})
t.Run("save and load metadata with monthly batches", func(t *testing.T) {
now := time.Now()
metadata := &CacheMetadata{
LastFullRefresh: now,
LastStartIndex: 4000,
TotalProducts: 1500,
MonthlyBatches: map[string]*MonthlyBatchMetadata{
"2024-11": {
Complete: true,
TotalProducts: 1000,
Increments: []IncrementMetadata{
{
FetchedAt: now,
LastModStartDate: now.Add(-24 * time.Hour),
LastModEndDate: now,
Products: 1000,
StartIndex: 0,
EndIndex: 2000,
},
},
},
"2024-12": {
Complete: false,
TotalProducts: 500,
Increments: []IncrementMetadata{
{
FetchedAt: now,
LastModStartDate: now.Add(-12 * time.Hour),
LastModEndDate: now,
Products: 500,
StartIndex: 0,
EndIndex: 1000,
},
},
},
},
}
err := cacheManager.SaveMetadata(metadata)
require.NoError(t, err)
loadedMetadata, err := cacheManager.LoadMetadata()
require.NoError(t, err)
assert.Equal(t, metadata.TotalProducts, loadedMetadata.TotalProducts)
assert.Equal(t, metadata.LastStartIndex, loadedMetadata.LastStartIndex)
assert.Equal(t, 2, len(loadedMetadata.MonthlyBatches))
assert.True(t, loadedMetadata.MonthlyBatches["2024-11"].Complete)
assert.False(t, loadedMetadata.MonthlyBatches["2024-12"].Complete)
assert.Equal(t, 1000, loadedMetadata.MonthlyBatches["2024-11"].TotalProducts)
assert.Len(t, loadedMetadata.MonthlyBatches["2024-11"].Increments, 1)
})
}
func TestCacheManager_LoadAllProducts(t *testing.T) {
tmpDir := t.TempDir()
cacheManager := &CacheManager{cacheDir: tmpDir}
t.Run("load and merge monthly files", func(t *testing.T) {
// save initial.json with base products
initialProducts := []NVDProduct{
{CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product1:*:*:*:*:*:*:*:*",
CPENameID: "product1-id",
LastModified: "2024-10-01T10:00:00.000Z",
}},
{CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product2:*:*:*:*:*:*:*:*",
CPENameID: "product2-id",
LastModified: "2024-10-15T10:00:00.000Z",
}},
}
err := cacheManager.SaveProductsToMonthlyFile("initial.json", initialProducts)
require.NoError(t, err)
// save 2024-11.json with updated product2 and new product3
novemberProducts := []NVDProduct{
{CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product2:*:*:*:*:*:*:*:*",
CPENameID: "product2-id",
LastModified: "2024-11-05T10:00:00.000Z", // newer version
}},
{CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product3:*:*:*:*:*:*:*:*",
CPENameID: "product3-id",
LastModified: "2024-11-10T10:00:00.000Z",
}},
}
err = cacheManager.SaveProductsToMonthlyFile("2024-11.json", novemberProducts)
require.NoError(t, err)
// load all products
allProducts, err := cacheManager.LoadAllProducts()
require.NoError(t, err)
// should have 3 unique products (product2 from Nov overwrites Oct version)
require.Len(t, allProducts, 3)
// verify we got all products
cpeNames := make(map[string]string) // CPENameID -> LastModified
for _, product := range allProducts {
cpeNames[product.CPE.CPENameID] = product.CPE.LastModified
}
assert.Contains(t, cpeNames, "product1-id")
assert.Contains(t, cpeNames, "product2-id")
assert.Contains(t, cpeNames, "product3-id")
// product2 should be the newer version from November
assert.Equal(t, "2024-11-05T10:00:00.000Z", cpeNames["product2-id"])
})
t.Run("empty directory", func(t *testing.T) {
emptyDir := t.TempDir()
emptyCache := &CacheManager{cacheDir: emptyDir}
allProducts, err := emptyCache.LoadAllProducts()
require.NoError(t, err)
assert.Empty(t, allProducts)
})
}
func TestCacheManager_CleanCache(t *testing.T) {
tmpDir := t.TempDir()
cacheManager := &CacheManager{cacheDir: tmpDir}
// create some cache files
testProducts := []NVDProduct{
{CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product:*:*:*:*:*:*:*:*",
CPENameID: "test-id",
LastModified: "2024-11-01T10:00:00.000Z",
}},
}
err := cacheManager.SaveProductsToMonthlyFile("initial.json", testProducts)
require.NoError(t, err)
// verify cache exists
require.DirExists(t, tmpDir)
// clean cache
err = cacheManager.CleanCache()
require.NoError(t, err)
// verify cache is removed
_, err = os.Stat(tmpDir)
assert.True(t, os.IsNotExist(err))
}
func TestCacheManager_SaveProducts(t *testing.T) {
tmpDir := t.TempDir()
cacheManager := &CacheManager{cacheDir: tmpDir}
t.Run("full refresh saves to initial.json", func(t *testing.T) {
metadata := &CacheMetadata{
MonthlyBatches: make(map[string]*MonthlyBatchMetadata),
}
products := []NVDProduct{
{CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product1:*:*:*:*:*:*:*:*",
CPENameID: "p1",
LastModified: "2024-10-01T10:00:00.000Z",
}},
}
increment := IncrementMetadata{
FetchedAt: time.Now(),
Products: 1,
}
err := cacheManager.SaveProducts(products, true, metadata, increment)
require.NoError(t, err)
// verify initial.json exists
initialPath := filepath.Join(tmpDir, "initial.json")
require.FileExists(t, initialPath)
// verify metadata updated
assert.NotZero(t, metadata.LastFullRefresh)
assert.Equal(t, 1, metadata.TotalProducts)
assert.Empty(t, metadata.MonthlyBatches)
})
t.Run("incremental update groups by month", func(t *testing.T) {
metadata := &CacheMetadata{
LastFullRefresh: time.Now().Add(-30 * 24 * time.Hour),
MonthlyBatches: make(map[string]*MonthlyBatchMetadata),
}
products := []NVDProduct{
{CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product1:*:*:*:*:*:*:*:*",
CPENameID: "p1",
LastModified: "2024-11-05T10:00:00.000Z",
}},
{CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product2:*:*:*:*:*:*:*:*",
CPENameID: "p2",
LastModified: "2024-11-15T10:00:00.000Z",
}},
{CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product3:*:*:*:*:*:*:*:*",
CPENameID: "p3",
LastModified: "2024-12-01T10:00:00.000Z",
}},
}
increment := IncrementMetadata{
FetchedAt: time.Now(),
Products: 3,
}
err := cacheManager.SaveProducts(products, false, metadata, increment)
require.NoError(t, err)
// verify monthly files exist
nov2024Path := filepath.Join(tmpDir, "2024-11.json")
dec2024Path := filepath.Join(tmpDir, "2024-12.json")
require.FileExists(t, nov2024Path)
require.FileExists(t, dec2024Path)
// verify metadata has monthly batches
assert.Len(t, metadata.MonthlyBatches, 2)
assert.Contains(t, metadata.MonthlyBatches, "2024-11")
assert.Contains(t, metadata.MonthlyBatches, "2024-12")
// verify 2024-11 has 2 products
assert.Equal(t, 2, metadata.MonthlyBatches["2024-11"].TotalProducts)
assert.Len(t, metadata.MonthlyBatches["2024-11"].Increments, 1)
// verify 2024-12 has 1 product
assert.Equal(t, 1, metadata.MonthlyBatches["2024-12"].TotalProducts)
})
}

View File

@ -1,11 +1,6 @@
package main package main
import ( import (
"compress/gzip"
"encoding/json"
"encoding/xml"
"fmt"
"io"
"log" "log"
"slices" "slices"
"strings" "strings"
@ -15,39 +10,6 @@ import (
"github.com/anchore/syft/syft/pkg/cataloger/internal/cpegenerate/dictionary" "github.com/anchore/syft/syft/pkg/cataloger/internal/cpegenerate/dictionary"
) )
func generateIndexedDictionaryJSON(rawGzipData io.Reader) ([]byte, error) {
gzipReader, err := gzip.NewReader(rawGzipData)
if err != nil {
return nil, fmt.Errorf("unable to decompress CPE dictionary: %w", err)
}
defer gzipReader.Close()
// Read XML data
data, err := io.ReadAll(gzipReader)
if err != nil {
return nil, fmt.Errorf("unable to read CPE dictionary: %w", err)
}
// Unmarshal XML
var cpeList CpeList
if err := xml.Unmarshal(data, &cpeList); err != nil {
return nil, fmt.Errorf("unable to unmarshal CPE dictionary XML: %w", err)
}
// Filter out data that's not applicable here
cpeList = filterCpeList(cpeList)
// Create indexed dictionary to help with looking up CPEs
indexedDictionary := indexCPEList(cpeList)
// Convert to JSON
jsonData, err := json.MarshalIndent(indexedDictionary, "", " ")
if err != nil {
return nil, fmt.Errorf("unable to marshal CPE dictionary to JSON: %w", err)
}
return jsonData, nil
}
// filterCpeList removes CPE items that are not applicable to software packages. // filterCpeList removes CPE items that are not applicable to software packages.
func filterCpeList(cpeList CpeList) CpeList { func filterCpeList(cpeList CpeList) CpeList {
var processedCpeList CpeList var processedCpeList CpeList

View File

@ -3,6 +3,7 @@ package main
import ( import (
"bytes" "bytes"
"compress/gzip" "compress/gzip"
"encoding/xml"
"io" "io"
"os" "os"
"testing" "testing"
@ -15,22 +16,37 @@ import (
"github.com/anchore/syft/syft/pkg/cataloger/internal/cpegenerate/dictionary" "github.com/anchore/syft/syft/pkg/cataloger/internal/cpegenerate/dictionary"
) )
func Test_generateIndexedDictionaryJSON(t *testing.T) { func Test_processCPEList(t *testing.T) {
// load test data from XML file (legacy format for testing backward compatibility)
f, err := os.Open("testdata/official-cpe-dictionary_v2.3.xml") f, err := os.Open("testdata/official-cpe-dictionary_v2.3.xml")
require.NoError(t, err) require.NoError(t, err)
defer f.Close()
// Create a buffer to store the gzipped data in memory // create a buffer to store the gzipped data in memory
buf := new(bytes.Buffer) buf := new(bytes.Buffer)
w := gzip.NewWriter(buf) w := gzip.NewWriter(buf)
_, err = io.Copy(w, f) _, err = io.Copy(w, f)
require.NoError(t, err) require.NoError(t, err)
// (finalize the gzip stream) // finalize the gzip stream
err = w.Close() err = w.Close()
require.NoError(t, err) require.NoError(t, err)
dictionaryJSON, err := generateIndexedDictionaryJSON(buf) // decompress and parse XML to get CpeList
gzipReader, err := gzip.NewReader(buf)
require.NoError(t, err)
defer gzipReader.Close()
data, err := io.ReadAll(gzipReader)
require.NoError(t, err)
var cpeList CpeList
err = xml.Unmarshal(data, &cpeList)
require.NoError(t, err)
// process the CPE list
dictionaryJSON, err := processCPEList(cpeList)
assert.NoError(t, err) assert.NoError(t, err)
expected, err := os.ReadFile("./testdata/expected-cpe-index.json") expected, err := os.ReadFile("./testdata/expected-cpe-index.json")
@ -40,7 +56,7 @@ func Test_generateIndexedDictionaryJSON(t *testing.T) {
dictionaryJSONString := string(dictionaryJSON) dictionaryJSONString := string(dictionaryJSON)
if diff := cmp.Diff(expectedDictionaryJSONString, dictionaryJSONString); diff != "" { if diff := cmp.Diff(expectedDictionaryJSONString, dictionaryJSONString); diff != "" {
t.Errorf("generateIndexedDictionaryJSON() mismatch (-want +got):\n%s", diff) t.Errorf("processCPEList() mismatch (-want +got):\n%s", diff)
} }
} }

View File

@ -1,49 +1,217 @@
// This program downloads the latest CPE dictionary from NIST and processes it into a JSON file that can be embedded into Syft for more accurate CPE results. // This program fetches CPE data from the NVD Products API and processes it into a JSON file that can be embedded into Syft for more accurate CPE results.
// ORAS caching is managed by Taskfile tasks - this program only works with local cache.
package main package main
import ( import (
"context"
"encoding/json"
"errors" "errors"
"flag" "flag"
"fmt" "fmt"
"log" "log"
"net/http"
"os" "os"
"path/filepath"
"time"
) )
func mainE() error { func mainE() error {
var outputFilename string var outputFilename string
flag.StringVar(&outputFilename, "o", "", "file location to save CPE index") var forceFullRefresh bool
var cacheOnly bool
flag.StringVar(&outputFilename, "o", "", "file location to save CPE index (required for build mode)")
flag.BoolVar(&forceFullRefresh, "full", false, "force full refresh instead of incremental update")
flag.BoolVar(&cacheOnly, "cache-only", false, "only update cache from NVD API, don't generate index")
flag.Parse() flag.Parse()
if outputFilename == "" { // validate flags
return errors.New("-o is required") if !cacheOnly && outputFilename == "" {
return errors.New("-o is required (unless using -cache-only)")
} }
// Download and decompress file if cacheOnly && outputFilename != "" {
fmt.Println("Fetching CPE dictionary...") return errors.New("-cache-only and -o cannot be used together")
resp, err := http.Get(cpeDictionaryURL)
if err != nil {
return fmt.Errorf("unable to get CPE dictionary: %w", err)
} }
defer resp.Body.Close()
ctx := context.Background()
cacheManager := NewCacheManager()
// MODE 1: Update cache only (called by task generate:cpe-index:update-cache)
if cacheOnly {
return updateCache(ctx, cacheManager, forceFullRefresh)
}
// MODE 2: Generate index from existing cache (called by task generate:cpe-index:build)
return generateIndexFromCache(cacheManager, outputFilename)
}
// updateCache fetches new/updated CPE data from NVD API and saves to local cache
func updateCache(ctx context.Context, cacheManager *CacheManager, forceFullRefresh bool) error {
metadata, err := cacheManager.LoadMetadata()
if err != nil {
return fmt.Errorf("failed to load metadata: %w", err)
}
lastModStartDate, isFullRefresh := determineUpdateMode(metadata, forceFullRefresh)
// use resume index if available
resumeFromIndex := 0
if !isFullRefresh && metadata.LastStartIndex > 0 {
resumeFromIndex = metadata.LastStartIndex
fmt.Printf("Resuming from index %d...\n", resumeFromIndex)
}
allProducts, increment, err := fetchProducts(ctx, lastModStartDate, resumeFromIndex)
if err != nil {
// if we have partial products, save them before returning error
if len(allProducts) > 0 {
fmt.Printf("\nError occurred but saving %d products fetched so far...\n", len(allProducts))
if saveErr := saveAndReportResults(cacheManager, allProducts, isFullRefresh, metadata, increment); saveErr != nil {
fmt.Printf("WARNING: Failed to save partial progress: %v\n", saveErr)
} else {
fmt.Println("Partial progress saved successfully. Run again to resume from this point.")
}
}
return err
}
if len(allProducts) == 0 {
fmt.Println("No products fetched (already up to date)")
return nil
}
return saveAndReportResults(cacheManager, allProducts, isFullRefresh, metadata, increment)
}
// determineUpdateMode decides whether to do a full refresh or incremental update
func determineUpdateMode(metadata *CacheMetadata, forceFullRefresh bool) (time.Time, bool) {
if forceFullRefresh || metadata.LastFullRefresh.IsZero() {
fmt.Println("Performing full refresh of CPE data")
return time.Time{}, true
}
fmt.Printf("Performing incremental update since %s\n", metadata.LastFullRefresh.Format("2006-01-02"))
return metadata.LastFullRefresh, false
}
// fetchProducts fetches products from the NVD API
func fetchProducts(ctx context.Context, lastModStartDate time.Time, resumeFromIndex int) ([]NVDProduct, IncrementMetadata, error) {
apiClient := NewNVDAPIClient()
fmt.Println("Fetching CPE data from NVD Products API...")
var allProducts []NVDProduct
var totalResults int
var firstStartIndex, lastEndIndex int
onPageFetched := func(startIndex int, response NVDProductsResponse) error {
if totalResults == 0 {
totalResults = response.TotalResults
firstStartIndex = startIndex
}
lastEndIndex = startIndex + response.ResultsPerPage
allProducts = append(allProducts, response.Products...)
fmt.Printf("Fetched %d/%d products...\n", len(allProducts), totalResults)
return nil
}
if err := apiClient.FetchProductsSince(ctx, lastModStartDate, resumeFromIndex, onPageFetched); err != nil {
// return partial products with increment metadata so they can be saved
increment := IncrementMetadata{
FetchedAt: time.Now(),
LastModStartDate: lastModStartDate,
LastModEndDate: time.Now(),
Products: len(allProducts),
StartIndex: firstStartIndex,
EndIndex: lastEndIndex,
}
return allProducts, increment, fmt.Errorf("failed to fetch products from NVD API: %w", err)
}
increment := IncrementMetadata{
FetchedAt: time.Now(),
LastModStartDate: lastModStartDate,
LastModEndDate: time.Now(),
Products: len(allProducts),
StartIndex: firstStartIndex,
EndIndex: lastEndIndex,
}
return allProducts, increment, nil
}
// saveAndReportResults saves products and metadata, then reports success
func saveAndReportResults(cacheManager *CacheManager, allProducts []NVDProduct, isFullRefresh bool, metadata *CacheMetadata, increment IncrementMetadata) error {
fmt.Println("Saving products to cache...")
if err := cacheManager.SaveProducts(allProducts, isFullRefresh, metadata, increment); err != nil {
return fmt.Errorf("failed to save products: %w", err)
}
if err := cacheManager.SaveMetadata(metadata); err != nil {
return fmt.Errorf("failed to save metadata: %w", err)
}
fmt.Println("Cache updated successfully!")
if isFullRefresh {
fmt.Printf("Total products in cache: %d\n", len(allProducts))
} else {
fmt.Printf("Added/updated %d products\n", len(allProducts))
fmt.Printf("Grouped into %d monthly files\n", len(metadata.MonthlyBatches))
}
return nil
}
// generateIndexFromCache generates the CPE index from cached data only
func generateIndexFromCache(cacheManager *CacheManager, outputFilename string) error {
fmt.Println("Loading cached products...")
allProducts, err := cacheManager.LoadAllProducts()
if err != nil {
return fmt.Errorf("failed to load cached products: %w", err)
}
if len(allProducts) == 0 {
return fmt.Errorf("no cached data available - run 'task generate:cpe-index:cache:pull' and 'task generate:cpe-index:cache:update' first")
}
fmt.Printf("Loaded %d products from cache\n", len(allProducts))
fmt.Println("Converting products to CPE list...")
cpeList := ProductsToCpeList(allProducts)
fmt.Println("Generating index...") fmt.Println("Generating index...")
dictionaryJSON, err := generateIndexedDictionaryJSON(resp.Body) dictionaryJSON, err := processCPEList(cpeList)
if err != nil { if err != nil {
return err return err
} }
// Write CPE index (JSON data) to disk // ensure parent directory exists
err = os.WriteFile(outputFilename, dictionaryJSON, 0600) outputDir := filepath.Dir(outputFilename)
if err != nil { if err := os.MkdirAll(outputDir, 0755); err != nil {
return fmt.Errorf("failed to create output directory: %w", err)
}
if err := os.WriteFile(outputFilename, dictionaryJSON, 0600); err != nil {
return fmt.Errorf("unable to write processed CPE dictionary to file: %w", err) return fmt.Errorf("unable to write processed CPE dictionary to file: %w", err)
} }
fmt.Println("Done!") fmt.Println("CPE index generated successfully!")
return nil return nil
} }
// processCPEList filters and indexes a CPE list, returning JSON bytes
func processCPEList(cpeList CpeList) ([]byte, error) {
// filter out data that's not applicable
cpeList = filterCpeList(cpeList)
// create indexed dictionary to help with looking up CPEs
indexedDictionary := indexCPEList(cpeList)
// convert to JSON
jsonData, err := json.MarshalIndent(indexedDictionary, "", " ")
if err != nil {
return nil, fmt.Errorf("unable to marshal CPE dictionary to JSON: %w", err)
}
return jsonData, nil
}
// errExit prints an error and exits with a non-zero exit code. // errExit prints an error and exits with a non-zero exit code.
func errExit(err error) { func errExit(err error) {
log.Printf("command failed: %s", err) log.Printf("command failed: %s", err)

View File

@ -23,4 +23,5 @@ type CpeList struct {
CpeItems []CpeItem `xml:"cpe-item"` CpeItems []CpeItem `xml:"cpe-item"`
} }
const cpeDictionaryURL = "https://nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz" // cpeDictionaryURL is deprecated - we now use the NVD Products API
// const cpeDictionaryURL = "https://nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz"

View File

@ -0,0 +1,66 @@
package main
// nvd_adapter.go converts NVD API responses to the existing CpeList/CpeItem structures
// this allows the existing filtering and indexing logic to work without modification
// ProductsToCpeList converts NVD API products to the legacy CpeList format
func ProductsToCpeList(products []NVDProduct) CpeList {
var cpeItems []CpeItem
for _, product := range products {
item := productToCpeItem(product)
cpeItems = append(cpeItems, item)
}
return CpeList{
CpeItems: cpeItems,
}
}
// productToCpeItem converts a single NVD API product to a CpeItem
func productToCpeItem(product NVDProduct) CpeItem {
details := product.CPE
item := CpeItem{
// use CPE 2.2 format for the Name field (legacy compatibility)
// note: the old XML feed had both 2.2 and 2.3 formats
// for now, we'll use 2.3 format in both places since that's what the API provides
Name: details.CPEName,
}
// extract title (prefer English)
for _, title := range details.Titles {
if title.Lang == "en" {
item.Title = title.Title
break
}
}
// fallback to first title if no English title found
if item.Title == "" && len(details.Titles) > 0 {
item.Title = details.Titles[0].Title
}
// convert references
if len(details.Refs) > 0 {
item.References.Reference = make([]struct {
Href string `xml:"href,attr"`
Body string `xml:",chardata"`
}, len(details.Refs))
for i, ref := range details.Refs {
item.References.Reference[i].Href = ref.Ref
item.References.Reference[i].Body = ref.Type
}
}
// set CPE 2.3 information
item.Cpe23Item.Name = details.CPEName
// handle deprecation
if details.Deprecated && len(details.DeprecatedBy) > 0 {
// use the first deprecated-by CPE (the old format only supported one)
item.Cpe23Item.Deprecation.DeprecatedBy.Name = details.DeprecatedBy[0].CPEName
}
return item
}

View File

@ -0,0 +1,235 @@
package main
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestProductToCpeItem(t *testing.T) {
tests := []struct {
name string
product NVDProduct
expected CpeItem
}{
{
name: "basic product conversion",
product: NVDProduct{
CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*",
Deprecated: false,
Titles: []NVDTitle{
{Title: "Test Product", Lang: "en"},
},
Refs: []NVDRef{
{Ref: "https://example.com/product", Type: "Vendor"},
},
},
},
expected: CpeItem{
Name: "cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*",
Title: "Test Product",
References: struct {
Reference []struct {
Href string `xml:"href,attr"`
Body string `xml:",chardata"`
} `xml:"reference"`
}{
Reference: []struct {
Href string `xml:"href,attr"`
Body string `xml:",chardata"`
}{
{Href: "https://example.com/product", Body: "Vendor"},
},
},
Cpe23Item: struct {
Name string `xml:"name,attr"`
Deprecation struct {
DeprecatedBy struct {
Name string `xml:"name,attr"`
} `xml:"deprecated-by"`
} `xml:"deprecation"`
}{
Name: "cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*",
},
},
},
{
name: "deprecated product",
product: NVDProduct{
CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:old:1.0:*:*:*:*:*:*:*",
Deprecated: true,
DeprecatedBy: []NVDDeprecatedBy{
{CPEName: "cpe:2.3:a:vendor:new:1.0:*:*:*:*:*:*:*", CPENameID: "test-uuid-123"},
},
Titles: []NVDTitle{
{Title: "Old Product", Lang: "en"},
},
Refs: []NVDRef{
{Ref: "https://example.com/old", Type: "Vendor"},
},
},
},
expected: CpeItem{
Name: "cpe:2.3:a:vendor:old:1.0:*:*:*:*:*:*:*",
Title: "Old Product",
References: struct {
Reference []struct {
Href string `xml:"href,attr"`
Body string `xml:",chardata"`
} `xml:"reference"`
}{
Reference: []struct {
Href string `xml:"href,attr"`
Body string `xml:",chardata"`
}{
{Href: "https://example.com/old", Body: "Vendor"},
},
},
Cpe23Item: struct {
Name string `xml:"name,attr"`
Deprecation struct {
DeprecatedBy struct {
Name string `xml:"name,attr"`
} `xml:"deprecated-by"`
} `xml:"deprecation"`
}{
Name: "cpe:2.3:a:vendor:old:1.0:*:*:*:*:*:*:*",
Deprecation: struct {
DeprecatedBy struct {
Name string `xml:"name,attr"`
} `xml:"deprecated-by"`
}{
DeprecatedBy: struct {
Name string `xml:"name,attr"`
}{
Name: "cpe:2.3:a:vendor:new:1.0:*:*:*:*:*:*:*",
},
},
},
},
},
{
name: "product with multiple titles prefers English",
product: NVDProduct{
CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*",
Titles: []NVDTitle{
{Title: "Produit", Lang: "fr"},
{Title: "Product", Lang: "en"},
{Title: "Producto", Lang: "es"},
},
Refs: []NVDRef{
{Ref: "https://example.com", Type: "Vendor"},
},
},
},
expected: CpeItem{
Name: "cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*",
Title: "Product",
References: struct {
Reference []struct {
Href string `xml:"href,attr"`
Body string `xml:",chardata"`
} `xml:"reference"`
}{
Reference: []struct {
Href string `xml:"href,attr"`
Body string `xml:",chardata"`
}{
{Href: "https://example.com", Body: "Vendor"},
},
},
Cpe23Item: struct {
Name string `xml:"name,attr"`
Deprecation struct {
DeprecatedBy struct {
Name string `xml:"name,attr"`
} `xml:"deprecated-by"`
} `xml:"deprecation"`
}{
Name: "cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*",
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := productToCpeItem(tt.product)
assert.Equal(t, tt.expected.Name, result.Name)
assert.Equal(t, tt.expected.Title, result.Title)
assert.Equal(t, tt.expected.Cpe23Item.Name, result.Cpe23Item.Name)
assert.Equal(t, tt.expected.Cpe23Item.Deprecation.DeprecatedBy.Name, result.Cpe23Item.Deprecation.DeprecatedBy.Name)
require.Equal(t, len(tt.expected.References.Reference), len(result.References.Reference))
for i := range tt.expected.References.Reference {
assert.Equal(t, tt.expected.References.Reference[i].Href, result.References.Reference[i].Href)
assert.Equal(t, tt.expected.References.Reference[i].Body, result.References.Reference[i].Body)
}
})
}
}
func TestProductsToCpeList(t *testing.T) {
products := []NVDProduct{
{
CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product1:1.0:*:*:*:*:*:*:*",
Titles: []NVDTitle{
{Title: "Product 1", Lang: "en"},
},
Refs: []NVDRef{
{Ref: "https://npmjs.com/package/product1", Type: "Vendor"},
},
},
},
{
CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product2:2.0:*:*:*:*:*:*:*",
Titles: []NVDTitle{
{Title: "Product 2", Lang: "en"},
},
Refs: []NVDRef{
{Ref: "https://pypi.org/project/product2", Type: "Vendor"},
},
},
},
}
result := ProductsToCpeList(products)
require.Len(t, result.CpeItems, 2)
assert.Equal(t, "cpe:2.3:a:vendor:product1:1.0:*:*:*:*:*:*:*", result.CpeItems[0].Name)
assert.Equal(t, "Product 1", result.CpeItems[0].Title)
assert.Equal(t, "cpe:2.3:a:vendor:product2:2.0:*:*:*:*:*:*:*", result.CpeItems[1].Name)
assert.Equal(t, "Product 2", result.CpeItems[1].Title)
}
func TestProductsToCpeList_MultipleProducts(t *testing.T) {
products := []NVDProduct{
{
CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product1:*:*:*:*:*:*:*:*",
Titles: []NVDTitle{{Title: "Product 1", Lang: "en"}},
Refs: []NVDRef{{Ref: "https://example.com/1", Type: "Vendor"}},
},
},
{
CPE: NVDProductDetails{
CPEName: "cpe:2.3:a:vendor:product2:*:*:*:*:*:*:*:*",
Titles: []NVDTitle{{Title: "Product 2", Lang: "en"}},
Refs: []NVDRef{{Ref: "https://example.com/2", Type: "Vendor"}},
},
},
}
result := ProductsToCpeList(products)
require.Len(t, result.CpeItems, 2)
assert.Equal(t, "cpe:2.3:a:vendor:product1:*:*:*:*:*:*:*:*", result.CpeItems[0].Cpe23Item.Name)
assert.Equal(t, "cpe:2.3:a:vendor:product2:*:*:*:*:*:*:*:*", result.CpeItems[1].Cpe23Item.Name)
}

View File

@ -0,0 +1,286 @@
package main
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"strconv"
"time"
"golang.org/x/time/rate"
)
const (
nvdProductsAPIURL = "https://services.nvd.nist.gov/rest/json/cpes/2.0"
resultsPerPage = 2000 // maximum allowed by NVD API
// rate limits per NVD API documentation
unauthenticatedRequestsPer30Seconds = 5
authenticatedRequestsPer30Seconds = 50
// retry configuration for rate limiting
maxRetries = 5
baseRetryDelay = 30 * time.Second // NVD uses 30-second rolling windows
)
// NVDAPIClient handles communication with the NVD Products API
type NVDAPIClient struct {
httpClient *http.Client
rateLimiter *rate.Limiter
apiKey string
}
// NVDProductsResponse represents the JSON response from the NVD Products API
type NVDProductsResponse struct {
ResultsPerPage int `json:"resultsPerPage"`
StartIndex int `json:"startIndex"`
TotalResults int `json:"totalResults"`
Format string `json:"format"`
Version string `json:"version"`
Timestamp string `json:"timestamp"`
Products []NVDProduct `json:"products"`
}
// NVDProduct represents a single product entry from the API
type NVDProduct struct {
CPE NVDProductDetails `json:"cpe"`
}
// NVDProductDetails contains the CPE and reference information
type NVDProductDetails struct {
CPEName string `json:"cpeName"`
Deprecated bool `json:"deprecated"`
DeprecatedBy []NVDDeprecatedBy `json:"deprecatedBy,omitempty"`
CPENameID string `json:"cpeNameId"`
Created string `json:"created"`
LastModified string `json:"lastModified"`
Titles []NVDTitle `json:"titles"`
Refs []NVDRef `json:"refs"`
}
// NVDTitle represents a title in a specific language
type NVDTitle struct {
Title string `json:"title"`
Lang string `json:"lang"`
}
// NVDRef represents a reference URL
type NVDRef struct {
Ref string `json:"ref"`
Type string `json:"type,omitempty"`
}
// NVDDeprecatedBy represents a CPE that replaces a deprecated one
type NVDDeprecatedBy struct {
CPEName string `json:"cpeName"`
CPENameID string `json:"cpeNameId"`
}
// NewNVDAPIClient creates a new NVD API client
// it reads the NVD_API_KEY environment variable for authenticated requests
func NewNVDAPIClient() *NVDAPIClient {
apiKey := os.Getenv("NVD_API_KEY")
// determine rate limit based on authentication
requestsPer30Seconds := unauthenticatedRequestsPer30Seconds
if apiKey != "" {
requestsPer30Seconds = authenticatedRequestsPer30Seconds
fmt.Printf("Using authenticated NVD API access (%d requests per 30 seconds)\n", requestsPer30Seconds)
} else {
fmt.Printf("Using unauthenticated NVD API access (%d requests per 30 seconds)\n", requestsPer30Seconds)
fmt.Println("Set NVD_API_KEY environment variable for higher rate limits")
}
// create rate limiter with 10% safety margin to avoid hitting limits
// X requests per 30 seconds * 0.9 = (X * 0.9) / 30 requests per second
effectiveRate := float64(requestsPer30Seconds) * 0.9 / 30.0
limiter := rate.NewLimiter(rate.Limit(effectiveRate), 1)
fmt.Printf("Rate limiter configured: %.2f requests/second (with 10%% safety margin)\n", effectiveRate)
return &NVDAPIClient{
httpClient: &http.Client{
Timeout: 60 * time.Second,
},
rateLimiter: limiter,
apiKey: apiKey,
}
}
// PageCallback is called after each page is successfully fetched
// it receives the startIndex and the response for that page
type PageCallback func(startIndex int, response NVDProductsResponse) error
// FetchProductsSince fetches all products modified since the given date
// if lastModStartDate is zero, fetches all products
// calls onPageFetched callback after each successful page fetch for incremental saving
// if resumeFromIndex > 0, starts fetching from that index
func (c *NVDAPIClient) FetchProductsSince(ctx context.Context, lastModStartDate time.Time, resumeFromIndex int, onPageFetched PageCallback) error {
startIndex := resumeFromIndex
for {
resp, err := c.fetchPage(ctx, startIndex, lastModStartDate)
if err != nil {
return fmt.Errorf("failed to fetch page at index %d: %w", startIndex, err)
}
// call callback to save progress immediately
if onPageFetched != nil {
if err := onPageFetched(startIndex, resp); err != nil {
return fmt.Errorf("callback failed at index %d: %w", startIndex, err)
}
}
// check if we've fetched all results
if startIndex+resp.ResultsPerPage >= resp.TotalResults {
fmt.Printf("Fetched %d/%d products (complete)\n", resp.TotalResults, resp.TotalResults)
break
}
startIndex += resp.ResultsPerPage
fmt.Printf("Fetched %d/%d products...\n", startIndex, resp.TotalResults)
}
return nil
}
// fetchPage fetches a single page of results from the NVD API with retry logic for rate limiting
func (c *NVDAPIClient) fetchPage(ctx context.Context, startIndex int, lastModStartDate time.Time) (NVDProductsResponse, error) {
var lastErr error
for attempt := 0; attempt < maxRetries; attempt++ {
// wait for rate limiter
if err := c.rateLimiter.Wait(ctx); err != nil {
return NVDProductsResponse{}, fmt.Errorf("rate limiter error: %w", err)
}
// build request URL
url := fmt.Sprintf("%s?resultsPerPage=%d&startIndex=%d", nvdProductsAPIURL, resultsPerPage, startIndex)
// add date range if specified (incremental update)
if !lastModStartDate.IsZero() {
// NVD API requires RFC3339 format: 2024-01-01T00:00:00.000
lastModStartStr := lastModStartDate.Format("2006-01-02T15:04:05.000")
url += fmt.Sprintf("&lastModStartDate=%s", lastModStartStr)
}
// create request
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
return NVDProductsResponse{}, fmt.Errorf("failed to create request: %w", err)
}
// add API key header if available
if c.apiKey != "" {
req.Header.Set("apiKey", c.apiKey)
}
req.Header.Set("User-Agent", "syft-cpe-dictionary-generator")
// execute request
httpResp, err := c.httpClient.Do(req)
if err != nil {
return NVDProductsResponse{}, fmt.Errorf("failed to execute request: %w", err)
}
// handle rate limiting
if httpResp.StatusCode == http.StatusTooManyRequests {
lastErr = c.handleRateLimit(ctx, httpResp, attempt)
continue // retry
}
// handle HTTP status codes
statusResponse, handled, err := c.handleHTTPStatus(httpResp, startIndex)
if handled {
// either error or special case (404 with empty results)
return statusResponse, err
}
// success - parse response
var response NVDProductsResponse
if err := json.NewDecoder(httpResp.Body).Decode(&response); err != nil {
httpResp.Body.Close()
return NVDProductsResponse{}, fmt.Errorf("failed to decode response: %w", err)
}
httpResp.Body.Close()
return response, nil
}
return NVDProductsResponse{}, fmt.Errorf("max retries (%d) exceeded: %w", maxRetries, lastErr)
}
// handleRateLimit handles HTTP 429 responses by parsing Retry-After and waiting
func (c *NVDAPIClient) handleRateLimit(ctx context.Context, httpResp *http.Response, attempt int) error {
body, _ := io.ReadAll(httpResp.Body)
httpResp.Body.Close()
// parse Retry-After header
retryAfter := parseRetryAfter(httpResp.Header.Get("Retry-After"))
if retryAfter == 0 {
// use exponential backoff if no Retry-After header
retryAfter = baseRetryDelay * time.Duration(1<<uint(attempt))
}
err := fmt.Errorf("rate limited (429): %s", string(body))
fmt.Printf("Rate limited (429), retrying in %v (attempt %d/%d)...\n", retryAfter, attempt+1, maxRetries)
select {
case <-time.After(retryAfter):
return err // return to retry
case <-ctx.Done():
return ctx.Err()
}
}
// handleHTTPStatus handles non-429 HTTP status codes
// returns (response, handled, error) where:
// - handled=true means the status was processed (either success case like 404 or error)
// - handled=false means continue to normal response parsing
func (c *NVDAPIClient) handleHTTPStatus(httpResp *http.Response, startIndex int) (NVDProductsResponse, bool, error) {
// handle 404 as "no results found" (common when querying recent dates with no updates)
if httpResp.StatusCode == http.StatusNotFound {
httpResp.Body.Close()
return NVDProductsResponse{
ResultsPerPage: 0,
StartIndex: startIndex,
TotalResults: 0,
Products: []NVDProduct{},
}, true, nil
}
// check for other non-200 status codes
if httpResp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(httpResp.Body)
httpResp.Body.Close()
return NVDProductsResponse{}, true, fmt.Errorf("unexpected status code %d: %s", httpResp.StatusCode, string(body))
}
// status OK - let caller parse response
return NVDProductsResponse{}, false, nil
}
// parseRetryAfter parses the Retry-After header from HTTP 429 responses
// returns 0 if the header is missing or invalid
func parseRetryAfter(header string) time.Duration {
if header == "" {
return 0
}
// try parsing as seconds (most common format)
if seconds, err := strconv.Atoi(header); err == nil {
return time.Duration(seconds) * time.Second
}
// try parsing as HTTP date (less common)
if t, err := time.Parse(time.RFC1123, header); err == nil {
duration := time.Until(t)
if duration > 0 {
return duration
}
}
return 0
}

View File

@ -225,6 +225,9 @@ func candidateVendors(p pkg.Package) []string {
vendors.union(candidateVendorsForAPK(p)) vendors.union(candidateVendorsForAPK(p))
case pkg.NpmPackage: case pkg.NpmPackage:
vendors.union(candidateVendorsForJavascript(p)) vendors.union(candidateVendorsForJavascript(p))
case pkg.PEBinary:
// Add PE-specific vendor hints (e.g. ghostscript -> artifex)
vendors.union(candidateVendorsForPE(p))
case pkg.WordpressPluginEntry: case pkg.WordpressPluginEntry:
vendors.clear() vendors.clear()
vendors.union(candidateVendorsForWordpressPlugin(p)) vendors.union(candidateVendorsForWordpressPlugin(p))
@ -301,6 +304,9 @@ func candidateProductSet(p pkg.Package) fieldCandidateSet {
switch p.Metadata.(type) { switch p.Metadata.(type) {
case pkg.ApkDBEntry: case pkg.ApkDBEntry:
products.union(candidateProductsForAPK(p)) products.union(candidateProductsForAPK(p))
case pkg.PEBinary:
// Add PE-specific product hints (e.g. ghostscript)
products.union(candidateProductsForPE(p))
case pkg.WordpressPluginEntry: case pkg.WordpressPluginEntry:
products.clear() products.clear()
products.union(candidateProductsForWordpressPlugin(p)) products.union(candidateProductsForWordpressPlugin(p))

View File

@ -0,0 +1,39 @@
package cpegenerate
import (
"testing"
"github.com/anchore/syft/syft/pkg"
)
func TestGhostscriptPEGeneratesArtifexCPE(t *testing.T) {
// construct a BinaryPkg with PE metadata resembling Ghostscript
p := pkg.Package{
Name: "GPL Ghostscript",
Version: "9.54.0",
Type: pkg.BinaryPkg,
Metadata: pkg.PEBinary{
VersionResources: pkg.KeyValues{
{Key: "CompanyName", Value: "Artifex Software, Inc."},
{Key: "ProductName", Value: "GPL Ghostscript"},
{Key: "FileDescription", Value: "Ghostscript Interpreter"},
},
},
}
cpes := FromPackageAttributes(p)
if len(cpes) == 0 {
t.Fatalf("expected at least one CPE, got none")
}
found := false
for _, c := range cpes {
if c.Attributes.Vendor == "artifex" && c.Attributes.Product == "ghostscript" && c.Attributes.Version == p.Version {
found = true
break
}
}
if !found {
t.Fatalf("expected to find CPE with vendor 'artifex' and product 'ghostscript' for Ghostscript PE binary; got: %+v", cpes)
}
}

Some files were not shown because too many files have changed in this diff Show More