Compare commits

...

61 Commits

Author SHA1 Message Date
anchore-actions-token-generator[bot]
365325376a
chore(deps): update tools to latest versions (#4370)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
2025-11-15 06:47:23 -05:00
Alex Goodman
153f2321ce
Fix test-fixture publish (#4369)
* pin python dependencies

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* pin rust dependencies

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* pin php deps

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* update and pin http and curl fixtures

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

---------

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-11-14 15:41:23 -05:00
Alex Goodman
7bf7bcc461
Support extras statements in Python PDM cataloger (#4352)
* fix pdm

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* update json schema

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* fix tests

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* add test for metadata construction

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* add missing test fixture

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* conserve markers

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* update json schema

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* add additional tests

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

---------

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-11-14 15:13:10 -05:00
anchore-actions-token-generator[bot]
6a21b5e5e2
chore(deps): update tools to latest versions (#4365)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
2025-11-14 09:25:27 -05:00
dependabot[bot]
6480c8a425
chore(deps): bump github/codeql-action from 4.31.2 to 4.31.3 (#4366)
Bumps [github/codeql-action](https://github.com/github/codeql-action) from 4.31.2 to 4.31.3.
- [Release notes](https://github.com/github/codeql-action/releases)
- [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md)
- [Commits](0499de31b9...014f16e7ab)

---
updated-dependencies:
- dependency-name: github/codeql-action
  dependency-version: 4.31.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-14 09:25:08 -05:00
Kudryavcev Nikolay
89842bd2f6
chore: migrate syft to use mholt/archives instead of anchore fork (#4029)
---------
Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>
Signed-off-by: Christopher Phillips <spiffcs@users.noreply.github.com>
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-11-13 23:04:43 +00:00
Christopher Angelo Phillips
4a60c41f38
feat: 4184 gguf parser (ai artifact cataloger) part 1 (#4279)
---------
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-11-13 17:43:48 -05:00
anchore-actions-token-generator[bot]
2e100f33f3
chore(deps): update tools to latest versions (#4358)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
2025-11-12 13:27:47 -05:00
dependabot[bot]
b444f0c2ed
chore(deps): bump golang.org/x/mod from 0.29.0 to 0.30.0 (#4359)
Bumps [golang.org/x/mod](https://github.com/golang/mod) from 0.29.0 to 0.30.0.
- [Commits](https://github.com/golang/mod/compare/v0.29.0...v0.30.0)

---
updated-dependencies:
- dependency-name: golang.org/x/mod
  dependency-version: 0.30.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-12 13:27:33 -05:00
Adam Chovanec
102d362daf
feat: CPEs format decoder (#4207)
Signed-off-by: Adam Chovanec <git@adamchovanec.cz>
2025-11-12 10:45:09 -05:00
Alex Goodman
66c78d44af
Document additional json schema fields (#4356)
* add documentation to key fields

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* regenerate json schema

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

---------

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-11-10 16:29:06 -05:00
dependabot[bot]
78a4ab8ced
chore(deps): bump github.com/olekukonko/tablewriter from 1.0.9 to 1.1.1 (#4354)
Bumps [github.com/olekukonko/tablewriter](https://github.com/olekukonko/tablewriter) from 1.0.9 to 1.1.1.
- [Commits](https://github.com/olekukonko/tablewriter/compare/v1.0.9...v1.1.1)

---
updated-dependencies:
- dependency-name: github.com/olekukonko/tablewriter
  dependency-version: 1.1.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-10 13:31:15 -05:00
dependabot[bot]
25ca33d20e
chore(deps): bump github.com/jedib0t/go-pretty/v6 from 6.7.0 to 6.7.1 (#4355)
Bumps [github.com/jedib0t/go-pretty/v6](https://github.com/jedib0t/go-pretty) from 6.7.0 to 6.7.1.
- [Release notes](https://github.com/jedib0t/go-pretty/releases)
- [Commits](https://github.com/jedib0t/go-pretty/compare/v6.7.0...v6.7.1)

---
updated-dependencies:
- dependency-name: github.com/jedib0t/go-pretty/v6
  dependency-version: 6.7.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-10 13:30:56 -05:00
anchore-actions-token-generator[bot]
60ca241593
chore(deps): update tools to latest versions (#4347)
* chore: new tool checks
---------
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
2025-11-07 20:56:44 +00:00
dependabot[bot]
0f475c8bcd
chore(deps): bump github.com/opencontainers/selinux (#4349)
Bumps [github.com/opencontainers/selinux](https://github.com/opencontainers/selinux) from 1.11.0 to 1.13.0.
- [Release notes](https://github.com/opencontainers/selinux/releases)
- [Commits](https://github.com/opencontainers/selinux/compare/v1.11.0...v1.13.0)

---
updated-dependencies:
- dependency-name: github.com/opencontainers/selinux
  dependency-version: 1.13.0
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-07 15:21:35 -05:00
Alex Goodman
199394934d
preserve --from order (#4350)
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-11-07 10:17:10 -05:00
dependabot[bot]
8a22d394ed
chore(deps): bump golang.org/x/time from 0.12.0 to 0.14.0 (#4348)
Bumps [golang.org/x/time](https://github.com/golang/time) from 0.12.0 to 0.14.0.
- [Commits](https://github.com/golang/time/compare/v0.12.0...v0.14.0)

---
updated-dependencies:
- dependency-name: golang.org/x/time
  dependency-version: 0.14.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-07 08:48:20 -05:00
Tim Olshansky
bbef262b8f
feat: Add license enrichment from pypi to python packages (#4295)
* feat: Add license enrichment from pypi to python packages
* Implement license caching and improve test coverage
---------
Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>
2025-11-06 16:05:08 -05:00
Tim Olshansky
4e06a7ab32
feat(javascript): Add dependency parsing (#4304)
* feat: Add dependency parsing to javascript package locks

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* Bump schema version

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* Add support for yarn and pnpm, excl. yarn v1

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* Add support for dependencies for v1 yarn lock files

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* Ensure schema is correctly generated

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* Fix tests

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

* PR feedback

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>

---------

Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>
2025-11-06 16:03:43 -05:00
Alex Goodman
e5711e9b42
Update CPE processing to use NVD API (#4332)
* update NVD CPE dictionary processor to use API

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* pass linting with exceptions

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

---------

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-11-06 16:02:26 -05:00
Rez Moss
f69b1db099
feat: detect elixir bin (#4334)
* Elixir detection, fixed #4333
---------
Signed-off-by: Rez Moss <hi@rezmoss.com>
2025-11-06 16:02:02 -05:00
dependabot[bot]
fe1ea443c2
chore(deps): bump github.com/jedib0t/go-pretty/v6 from 6.6.9 to 6.7.0 (#4337)
Bumps [github.com/jedib0t/go-pretty/v6](https://github.com/jedib0t/go-pretty) from 6.6.9 to 6.7.0.
- [Release notes](https://github.com/jedib0t/go-pretty/releases)
- [Commits](https://github.com/jedib0t/go-pretty/compare/v6.6.9...v6.7.0)

---
updated-dependencies:
- dependency-name: github.com/jedib0t/go-pretty/v6
  dependency-version: 6.7.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-06 15:47:49 -05:00
dependabot[bot]
bfcbf266df
chore(deps): bump github.com/containerd/containerd from 1.7.28 to 1.7.29 (#4340)
Bumps [github.com/containerd/containerd](https://github.com/containerd/containerd) from 1.7.28 to 1.7.29.
- [Release notes](https://github.com/containerd/containerd/releases)
- [Changelog](https://github.com/containerd/containerd/blob/main/RELEASES.md)
- [Commits](https://github.com/containerd/containerd/compare/v1.7.28...v1.7.29)

---
updated-dependencies:
- dependency-name: github.com/containerd/containerd
  dependency-version: 1.7.29
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-06 15:46:32 -05:00
Keith Zantow
a400c675fc
feat: license file search (#4327)
Signed-off-by: Keith Zantow <kzantow@gmail.com>
2025-11-03 14:16:05 -05:00
Alex Goodman
7c154e7c37
use official action for token generation (#4331)
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-11-03 13:08:42 -05:00
anchore-actions-token-generator[bot]
4c93394bc2
chore(deps): update anchore dependencies (#4330)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
2025-11-03 12:44:07 -05:00
kdt523
3e4e82f03e
Canonicalize Ghostscript CPE/PURL for ghostscript packages from PE Binaries (#4308)
* binary(pe): canonicalize Ghostscript CPE to artifex:ghostscript and add generic purl for PE (#4275)\n\n- Detect Ghostscript via PE version resources and set purl pkg:generic/ghostscript@<version>\n- Add PE-specific CPE candidates: vendor 'artifex', product 'ghostscript'\n- Add focused unit tests for purl and CPE generation

Signed-off-by: kdt523 <krushna.datir231@vit.edu>

* fix: gofmt formatting for static analysis pass (pe-ghostscript-cpe-purl-4275)

Signed-off-by: kdt523 <krushna.datir231@vit.edu>

---------

Signed-off-by: kdt523 <krushna.datir231@vit.edu>
2025-11-03 14:54:48 +00:00
dependabot[bot]
793b0a346f
chore(deps): bump github/codeql-action from 4.31.1 to 4.31.2 (#4325)
Bumps [github/codeql-action](https://github.com/github/codeql-action) from 4.31.1 to 4.31.2.
- [Release notes](https://github.com/github/codeql-action/releases)
- [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md)
- [Commits](5fe9434cd2...0499de31b9)

---
updated-dependencies:
- dependency-name: github/codeql-action
  dependency-version: 4.31.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-03 09:11:20 -05:00
dependabot[bot]
a0dac519db
chore(deps): bump github.com/hashicorp/go-getter from 1.8.2 to 1.8.3 (#4326)
Bumps [github.com/hashicorp/go-getter](https://github.com/hashicorp/go-getter) from 1.8.2 to 1.8.3.
- [Release notes](https://github.com/hashicorp/go-getter/releases)
- [Changelog](https://github.com/hashicorp/go-getter/blob/main/.goreleaser.yml)
- [Commits](https://github.com/hashicorp/go-getter/compare/v1.8.2...v1.8.3)

---
updated-dependencies:
- dependency-name: github.com/hashicorp/go-getter
  dependency-version: 1.8.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-03 09:11:12 -05:00
dependabot[bot]
34f5e521c1
chore(deps): bump modernc.org/sqlite from 1.39.1 to 1.40.0 (#4329)
Bumps [modernc.org/sqlite](https://gitlab.com/cznic/sqlite) from 1.39.1 to 1.40.0.
- [Commits](https://gitlab.com/cznic/sqlite/compare/v1.39.1...v1.40.0)

---
updated-dependencies:
- dependency-name: modernc.org/sqlite
  dependency-version: 1.40.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-03 09:11:05 -05:00
dependabot[bot]
774b1e97b9
chore(deps): bump github/codeql-action from 4.31.0 to 4.31.1 (#4321)
Bumps [github/codeql-action](https://github.com/github/codeql-action) from 4.31.0 to 4.31.1.
- [Release notes](https://github.com/github/codeql-action/releases)
- [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md)
- [Commits](4e94bd11f7...5fe9434cd2)

---
updated-dependencies:
- dependency-name: github/codeql-action
  dependency-version: 4.31.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-30 13:19:57 -04:00
Alex Goodman
538430d65d
describe cataloger capabilities via test observations (#4318)
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-10-30 13:19:42 -04:00
Alex Goodman
5db3a9bf55
add workflow to create PR for spdx license list updates (#4319)
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-10-30 12:14:13 -04:00
Stepan
efc2f0012c
fix: go binary replace handling in path (#4156)
* Fix issue with relative paths on go binary

Signed-off-by: Stepan <stepworm@yandex.ru>

* Linting

Signed-off-by: Stepan <stepworm@yandex.ru>

---------

Signed-off-by: Stepan <stepworm@yandex.ru>
Co-authored-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-10-29 15:59:47 +00:00
kyounghoonJang
c5c1454848
feat(java): Add support for .far (Feature Archive) files (#4193)
* feat(java): add support for .far archivesEnables the Java cataloger to recognize and catalog dependencies within .far files, which are used in Apache Sling applications.

Signed-off-by: Kyounghoon Jang <matkimchi_@naver.com>

* feat(java): Add tests for .far (Feature Archive) file support

Signed-off-by: Kyounghoon Jang <matkimchi_@naver.com>

---------

Signed-off-by: Kyounghoon Jang <matkimchi_@naver.com>
Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
Co-authored-by: Alex Goodman <wagoodman@users.noreply.github.com>
2025-10-29 15:41:27 +00:00
Kudryavcev Nikolay
f5c765192c
Refactor fileresolver to not require base path (#4298)
* ref: close source in test and examples

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>

* ref: pretty file/directory source resolver (make them more similar)

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>

* ref: move absoluteSymlinkFreePathToParent to file resolver

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>

* revert breaking change

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>

---------

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>
2025-10-29 10:41:18 -04:00
Will Murphy
728feea620
ci: use apple creds before pushing tags (#4313)
We have had a few releases fail because the Apple credentials needed
some sort of fix. These release were operationally more interesting
because they failed after pushing a git tag (which effectively releases
the golagn package). Therefore, try to use these creds early, before
there's a tag pushed.

Signed-off-by: Will Murphy <willmurphyscode@users.noreply.github.com>
2025-10-29 10:07:47 -04:00
dependabot[bot]
45fb52dca1
chore(deps): bump github.com/jedib0t/go-pretty/v6 from 6.6.8 to 6.6.9 (#4315)
Bumps [github.com/jedib0t/go-pretty/v6](https://github.com/jedib0t/go-pretty) from 6.6.8 to 6.6.9.
- [Release notes](https://github.com/jedib0t/go-pretty/releases)
- [Commits](https://github.com/jedib0t/go-pretty/compare/v6.6.8...v6.6.9)

---
updated-dependencies:
- dependency-name: github.com/jedib0t/go-pretty/v6
  dependency-version: 6.6.9
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-29 10:06:37 -04:00
Rez Moss
45bf8b14ab
fix: omit records with empty PURL in GitHub format (#4312)
Signed-off-by: Rez Moss <hi@rezmoss.com>
2025-10-28 18:34:10 -04:00
Brian Muenzenmeyer
9478cd974b
docs: update template link in README.md (#4306)
Signed-off-by: Brian Muenzenmeyer <brian.muenzenmeyer@gmail.com>
2025-10-28 11:29:07 -04:00
Will Murphy
0d9ea69a66
Respect "rpmmod" PURL qualifier (#4314)
Red Hat purls the RPM modularity info in a query param in the PURLs in
their vulnerability data. It would be nice if Syft respected this
qualifier so that Grype can use it when a Red Hat purl is passed.

Signed-off-by: Will Murphy <willmurphyscode@users.noreply.github.com>
2025-10-28 09:35:11 -04:00
dependabot[bot]
bee78c0b16
chore(deps): bump github/codeql-action from 4.30.9 to 4.31.0 (#4310)
Bumps [github/codeql-action](https://github.com/github/codeql-action) from 4.30.9 to 4.31.0.
- [Release notes](https://github.com/github/codeql-action/releases)
- [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md)
- [Commits](16140ae1a1...4e94bd11f7)

---
updated-dependencies:
- dependency-name: github/codeql-action
  dependency-version: 4.31.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-27 10:43:04 -04:00
dependabot[bot]
88bbcbe9c6
chore(deps): bump anchore/sbom-action from 0.20.8 to 0.20.9 (#4305) 2025-10-27 02:03:09 -04:00
anchore-actions-token-generator[bot]
e0680eb704
chore(deps): update tools to latest versions (#4307) 2025-10-27 02:02:47 -04:00
Marc
16f851c5d9
feat: include .rar files as Java archives for Java resource adapters (#4137)
Signed-off-by: Marc Thomas <marc.thomas@t-systems.com>
2025-10-24 11:55:02 -04:00
Ross Kirk
d5ca1ad543
fix: ignore dpkg entries with "deinstall" status (#4231)
Signed-off-by: Ross Kirk <ross.kirk@upwind.io>
2025-10-23 16:23:58 -04:00
anchore-actions-token-generator[bot]
8be463911c
chore(deps): update tools to latest versions (#4302)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
2025-10-22 09:38:18 -04:00
dependabot[bot]
44b7b0947c
chore(deps): bump github.com/github/go-spdx/v2 from 2.3.3 to 2.3.4 (#4301) 2025-10-21 09:34:26 -04:00
dependabot[bot]
675075e882
chore(deps): bump github/codeql-action from 4.30.8 to 4.30.9 (#4299)
Bumps [github/codeql-action](https://github.com/github/codeql-action) from 4.30.8 to 4.30.9.
- [Release notes](https://github.com/github/codeql-action/releases)
- [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md)
- [Commits](f443b600d9...16140ae1a1)

---
updated-dependencies:
- dependency-name: github/codeql-action
  dependency-version: 4.30.9
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-20 10:08:39 -04:00
JoeyShapiro
31b2c4c090
support universal (fat) mach-o binary files (#4278)
Signed-off-by: Joseph Shapiro <joeyashapiro@gmail.com>
2025-10-17 13:41:59 -04:00
dependabot[bot]
07029ead8a
chore(deps): bump sigstore/cosign-installer from 3.10.0 to 4.0.0 (#4296)
Bumps [sigstore/cosign-installer](https://github.com/sigstore/cosign-installer) from 3.10.0 to 4.0.0.
- [Release notes](https://github.com/sigstore/cosign-installer/releases)
- [Commits](d7543c93d8...faadad0cce)

---
updated-dependencies:
- dependency-name: sigstore/cosign-installer
  dependency-version: 4.0.0
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-17 10:22:20 -04:00
dependabot[bot]
f4de1e863c
chore(deps): bump anchore/sbom-action from 0.20.7 to 0.20.8 (#4297)
Bumps [anchore/sbom-action](https://github.com/anchore/sbom-action) from 0.20.7 to 0.20.8.
- [Release notes](https://github.com/anchore/sbom-action/releases)
- [Changelog](https://github.com/anchore/sbom-action/blob/main/RELEASE.md)
- [Commits](d8a2c01300...aa0e114b2e)

---
updated-dependencies:
- dependency-name: anchore/sbom-action
  dependency-version: 0.20.8
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-17 10:22:10 -04:00
JoeyShapiro
538b4a2194
convert posix path back to windows (#4285)
Signed-off-by: Joseph Shapiro <joeyashapiro@gmail.com>
2025-10-17 09:29:06 -04:00
Kudryavcev Nikolay
fc74b07369
Remove duplicate image source providers (#4289)
Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>
2025-10-16 16:19:11 -04:00
dependabot[bot]
6627c5214c
chore(deps): bump anchore/sbom-action from 0.20.6 to 0.20.7 (#4293)
Bumps [anchore/sbom-action](https://github.com/anchore/sbom-action) from 0.20.6 to 0.20.7.
- [Release notes](https://github.com/anchore/sbom-action/releases)
- [Changelog](https://github.com/anchore/sbom-action/blob/main/RELEASE.md)
- [Commits](f8bdd1d8ac...d8a2c01300)

---
updated-dependencies:
- dependency-name: anchore/sbom-action
  dependency-version: 0.20.7
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-16 13:57:17 -04:00
Tim Olshansky
c0f32e1dba
feat: add option to fetch remote licenses for pnpm-lock.yaml files (#4286)
Signed-off-by: Tim Olshansky <456103+timols@users.noreply.github.com>
2025-10-16 12:23:06 -04:00
Pavel Buchart
e923db2a94
Add PDM parser (#4234)
Signed-off-by: Pavel Buchart <pavel@buchart.cz>
Signed-off-by: Keith Zantow <kzantow@gmail.com>
Co-authored-by: Keith Zantow <kzantow@gmail.com>
2025-10-16 08:50:44 -04:00
anchore-actions-token-generator[bot]
0c98a364d5
chore(deps): update tools to latest versions (#4291)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: spiffcs <32073428+spiffcs@users.noreply.github.com>
2025-10-16 07:02:32 -04:00
Keith Zantow
4343d04652
fix: panic during java archive maven resolution (#4290)
Signed-off-by: Keith Zantow <kzantow@gmail.com>
2025-10-16 07:00:31 -04:00
Kudryavcev Nikolay
065ac13ab7
Extract zip archive with multiple entries (#4283)
* extract zip archive with multiple entries

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>

* set OverwriteExisting by type assertion switch case

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>

---------

Signed-off-by: Kudryavcev Nikolay <kydry.nikolau@gmail.com>
2025-10-15 12:05:05 -04:00
Christopher Angelo Phillips
e9a8bc5ab9
chore: update to use old configuration on new cosign (#4287)
Signed-off-by: Christopher Phillips <32073428+spiffcs@users.noreply.github.com>
2025-10-15 15:12:20 +00:00
231 changed files with 28538 additions and 1610 deletions

View File

@ -26,7 +26,7 @@ tools:
# used for linting
- name: golangci-lint
version:
want: v2.5.0
want: v2.6.2
method: github-release
with:
repo: golangci/golangci-lint
@ -58,7 +58,7 @@ tools:
# used to release all artifacts
- name: goreleaser
version:
want: v2.12.5
want: v2.12.7
method: github-release
with:
repo: goreleaser/goreleaser
@ -90,7 +90,7 @@ tools:
# used for running all local and CI tasks
- name: task
version:
want: v3.45.4
want: v3.45.5
method: github-release
with:
repo: go-task/task
@ -98,7 +98,7 @@ tools:
# used for triggering a release
- name: gh
version:
want: v2.81.0
want: v2.83.1
method: github-release
with:
repo: cli/cli
@ -114,7 +114,7 @@ tools:
# used to upload test fixture cache
- name: yq
version:
want: v4.48.1
want: v4.48.2
method: github-release
with:
repo: mikefarah/yq

View File

@ -47,7 +47,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 #v3.29.5
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db #v3.29.5
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@ -58,7 +58,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@f443b600d91635bebf5b0d9ebc620189c0d6fba5 #v3.29.5
uses: github/codeql-action/autobuild@014f16e7ab1402f30e7c3329d33797e7948572db #v3.29.5
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@ -72,4 +72,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 #v3.29.5
uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db #v3.29.5

View File

@ -19,6 +19,16 @@ jobs:
with:
persist-credentials: false
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
- name: Validate Apple notarization credentials
run: .tool/quill submission list
env:
QUILL_NOTARY_ISSUER: ${{ secrets.APPLE_NOTARY_ISSUER }}
QUILL_NOTARY_KEY_ID: ${{ secrets.APPLE_NOTARY_KEY_ID }}
QUILL_NOTARY_KEY: ${{ secrets.APPLE_NOTARY_KEY }}
- name: Check if running on main
if: github.ref != 'refs/heads/main'
# we are using the following flag when running `cosign blob-verify` for checksum signature verification:
@ -161,7 +171,7 @@ jobs:
# for updating brew formula in anchore/homebrew-syft
GITHUB_BREW_TOKEN: ${{ secrets.ANCHOREOPS_GITHUB_OSS_WRITE_TOKEN }}
- uses: anchore/sbom-action@f8bdd1d8ac5e901a77a92f111440fdb1b593736b #v0.20.6
- uses: anchore/sbom-action@8e94d75ddd33f69f691467e42275782e4bfefe84 #v0.20.9
continue-on-error: true
with:
file: go.mod

View File

@ -31,11 +31,11 @@ jobs:
with:
repos: ${{ github.event.inputs.repos }}
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 #v2.1.4
id: generate-token
with:
app_id: ${{ secrets.TOKEN_APP_ID }}
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
app-id: ${{ secrets.TOKEN_APP_ID }}
private-key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
with:

View File

@ -45,11 +45,11 @@ jobs:
echo "\`\`\`"
} >> $GITHUB_STEP_SUMMARY
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 #v2.1.4
id: generate-token
with:
app_id: ${{ secrets.TOKEN_APP_ID }}
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
app-id: ${{ secrets.TOKEN_APP_ID }}
private-key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
with:

View File

@ -14,6 +14,9 @@ env:
jobs:
upgrade-cpe-dictionary-index:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
if: github.repository == 'anchore/syft' # only run for main repo
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
@ -22,18 +25,31 @@ jobs:
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
id: bootstrap
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
- name: Login to GitHub Container Registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | ${{ steps.bootstrap.outputs.oras }} login ghcr.io -u ${{ github.actor }} --password-stdin
- run: |
make generate-cpe-dictionary-index
- name: Pull CPE cache from registry
run: make generate:cpe-index:cache:pull
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0
- name: Update CPE cache from NVD API
run: make generate:cpe-index:cache:update
env:
NVD_API_KEY: ${{ secrets.NVD_API_KEY }}
- name: Generate CPE dictionary index
run: make generate:cpe-index:build
- name: Push updated CPE cache to registry
run: make generate:cpe-index:cache:push
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 #v2.1.4
id: generate-token
with:
app_id: ${{ secrets.TOKEN_APP_ID }}
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
app-id: ${{ secrets.TOKEN_APP_ID }}
private-key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
with:

View File

@ -0,0 +1,54 @@
name: PR to update SPDX license list
on:
schedule:
- cron: "0 6 * * 1" # every monday at 6 AM UTC
workflow_dispatch:
permissions:
contents: read
env:
SLACK_NOTIFICATIONS: true
jobs:
upgrade-spdx-license-list:
runs-on: ubuntu-latest
if: github.repository == 'anchore/syft' # only run for main repo
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
with:
persist-credentials: false
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
- run: |
make generate-license-list
- uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a #v2.1.0
id: generate-token
with:
app_id: ${{ secrets.TOKEN_APP_ID }}
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e #v7.0.8
with:
signoff: true
delete-branch: true
branch: auto/latest-spdx-license-list
labels: dependencies
commit-message: "chore(deps): update SPDX license list"
title: "chore(deps): update SPDX license list"
body: |
Update SPDX license list based on the latest available list from spdx.org
token: ${{ steps.generate-token.outputs.token }}
- uses: 8398a7/action-slack@77eaa4f1c608a7d68b38af4e3f739dcd8cba273e #v3.19.0
with:
status: ${{ job.status }}
fields: workflow,eventName,job
text: Syft SPDX license list update failed
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TOOLBOX_WEBHOOK_URL }}
if: ${{ failure() && env.SLACK_NOTIFICATIONS == 'true' }}

View File

@ -210,7 +210,7 @@ jobs:
runs-on: macos-latest
steps:
- name: Install Cosign
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
with:

3
.gitignore vendored
View File

@ -2,6 +2,7 @@
go.work
go.work.sum
.tool-versions
.python-version
# app configuration
/.syft.yaml
@ -72,3 +73,5 @@ cosign.pub
__pycache__/
*.py[cod]
*$py.class

View File

@ -337,6 +337,7 @@ signs:
certificate: "${artifact}.pem"
args:
- "sign-blob"
- "--use-signing-config=false"
- "--oidc-issuer=https://token.actions.githubusercontent.com"
- "--output-certificate=${certificate}"
- "--output-signature=${signature}"

View File

@ -106,8 +106,8 @@ syft <image> -o <format>
Where the `formats` available are:
- `syft-json`: Use this to get as much information out of Syft as possible!
- `syft-text`: A row-oriented, human-and-machine-friendly output.
- `cyclonedx-xml`: A XML report conforming to the [CycloneDX 1.6 specification](https://cyclonedx.org/specification/overview/).
- `cyclonedx-xml@1.5`: A XML report conforming to the [CycloneDX 1.5 specification](https://cyclonedx.org/specification/overview/).
- `cyclonedx-xml`: An XML report conforming to the [CycloneDX 1.6 specification](https://cyclonedx.org/specification/overview/).
- `cyclonedx-xml@1.5`: An XML report conforming to the [CycloneDX 1.5 specification](https://cyclonedx.org/specification/overview/).
- `cyclonedx-json`: A JSON report conforming to the [CycloneDX 1.6 specification](https://cyclonedx.org/specification/overview/).
- `cyclonedx-json@1.5`: A JSON report conforming to the [CycloneDX 1.5 specification](https://cyclonedx.org/specification/overview/).
- `spdx-tag-value`: A tag-value formatted report conforming to the [SPDX 2.3 specification](https://spdx.github.io/spdx-spec/v2.3/).
@ -116,7 +116,7 @@ Where the `formats` available are:
- `spdx-json@2.2`: A JSON report conforming to the [SPDX 2.2 JSON Schema](https://github.com/spdx/spdx-spec/blob/v2.2/schemas/spdx-schema.json).
- `github-json`: A JSON report conforming to GitHub's dependency snapshot format.
- `syft-table`: A columnar summary (default).
- `template`: Lets the user specify the output format. See ["Using templates"](#using-templates) below.
- `template`: Lets the user specify the output format. See ["Using templates"](https://github.com/anchore/syft/wiki/using-templates) below.
Note that flags using the @<version> can be used for earlier versions of each specification as well.
@ -135,7 +135,7 @@ Note that flags using the @<version> can be used for earlier versions of each sp
- Go (go.mod, Go binaries)
- GitHub (workflows, actions)
- Haskell (cabal, stack)
- Java (jar, ear, war, par, sar, nar, native-image)
- Java (jar, ear, war, par, sar, nar, rar, native-image)
- JavaScript (npm, yarn)
- Jenkins Plugins (jpi, hpi)
- Linux kernel archives (vmlinz)

View File

@ -1,5 +1,9 @@
version: "3"
includes:
generate:cpe-index: ./task.d/generate/cpe-index.yaml
vars:
OWNER: anchore
PROJECT: syft
@ -511,10 +515,11 @@ tasks:
- "gofmt -s -w ./internal/spdxlicense"
generate-cpe-dictionary-index:
desc: Generate the CPE index based off of the latest available CPE dictionary
dir: "syft/pkg/cataloger/internal/cpegenerate/dictionary"
desc: Generate the CPE index from local cache
cmds:
- "go generate"
- task: generate:cpe-index:cache:pull
- task: generate:cpe-index:cache:update
- task: generate:cpe-index:build
## Build-related targets #################################

View File

@ -253,7 +253,6 @@ func generateSBOMForAttestation(ctx context.Context, id clio.Identification, opt
}
src, err := getSource(ctx, opts, userInput, stereoscope.RegistryTag)
if err != nil {
return nil, err
}

View File

@ -87,8 +87,8 @@ func runCatalogerList(opts *catalogerListOptions) error {
}
func catalogerListReport(opts *catalogerListOptions, allTaskGroups [][]task.Task) (string, error) {
defaultCatalogers := options.Flatten(opts.DefaultCatalogers)
selectCatalogers := options.Flatten(opts.SelectCatalogers)
defaultCatalogers := options.FlattenAndSort(opts.DefaultCatalogers)
selectCatalogers := options.FlattenAndSort(opts.SelectCatalogers)
selectedTaskGroups, selectionEvidence, err := task.SelectInGroups(
allTaskGroups,
cataloging.NewSelectionRequest().

View File

@ -185,7 +185,6 @@ func runScan(ctx context.Context, id clio.Identification, opts *scanOptions, use
}
src, err := getSource(ctx, &opts.Catalog, userInput, sources...)
if err != nil {
return err
}

View File

@ -198,9 +198,10 @@ func (cfg Catalog) ToPackagesConfig() pkgcataloging.Config {
},
Nix: nix.DefaultConfig().
WithCaptureOwnedFiles(cfg.Nix.CaptureOwnedFiles),
Python: python.CatalogerConfig{
GuessUnpinnedRequirements: cfg.Python.GuessUnpinnedRequirements,
},
Python: python.DefaultCatalogerConfig().
WithSearchRemoteLicenses(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Python), cfg.Python.SearchRemoteLicenses)).
WithPypiBaseURL(cfg.Python.PypiBaseURL).
WithGuessUnpinnedRequirements(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Python), cfg.Python.GuessUnpinnedRequirements)),
JavaArchive: java.DefaultArchiveCatalogerConfig().
WithUseMavenLocalRepository(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Java, task.Maven), cfg.Java.UseMavenLocalRepository)).
WithMavenLocalRepositoryDir(cfg.Java.MavenLocalRepositoryDir).
@ -283,10 +284,10 @@ func (cfg *Catalog) PostLoad() error {
cfg.From = Flatten(cfg.From)
cfg.Catalogers = Flatten(cfg.Catalogers)
cfg.DefaultCatalogers = Flatten(cfg.DefaultCatalogers)
cfg.SelectCatalogers = Flatten(cfg.SelectCatalogers)
cfg.Enrich = Flatten(cfg.Enrich)
cfg.Catalogers = FlattenAndSort(cfg.Catalogers)
cfg.DefaultCatalogers = FlattenAndSort(cfg.DefaultCatalogers)
cfg.SelectCatalogers = FlattenAndSort(cfg.SelectCatalogers)
cfg.Enrich = FlattenAndSort(cfg.Enrich)
// for backwards compatibility
cfg.DefaultCatalogers = append(cfg.DefaultCatalogers, cfg.Catalogers...)
@ -311,6 +312,11 @@ func Flatten(commaSeparatedEntries []string) []string {
out = append(out, strings.TrimSpace(s))
}
}
return out
}
func FlattenAndSort(commaSeparatedEntries []string) []string {
out := Flatten(commaSeparatedEntries)
sort.Strings(out)
return out
}
@ -320,6 +326,7 @@ var publicisedEnrichmentOptions = []string{
task.Golang,
task.Java,
task.JavaScript,
task.Python,
}
func enrichmentEnabled(enrichDirectives []string, features ...string) *bool {

View File

@ -79,6 +79,98 @@ func TestCatalog_PostLoad(t *testing.T) {
}
}
func TestFlatten(t *testing.T) {
tests := []struct {
name string
input []string
expected []string
}{
{
name: "preserves order of comma-separated values",
input: []string{"registry,docker,oci-dir"},
expected: []string{"registry", "docker", "oci-dir"},
},
{
name: "preserves order across multiple entries",
input: []string{"registry,docker", "oci-dir"},
expected: []string{"registry", "docker", "oci-dir"},
},
{
name: "trims whitespace",
input: []string{" registry , docker ", " oci-dir "},
expected: []string{"registry", "docker", "oci-dir"},
},
{
name: "handles single value",
input: []string{"registry"},
expected: []string{"registry"},
},
{
name: "handles empty input",
input: []string{},
expected: nil,
},
{
name: "preserves reverse alphabetical order",
input: []string{"zebra,yankee,xray"},
expected: []string{"zebra", "yankee", "xray"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := Flatten(tt.input)
assert.Equal(t, tt.expected, got)
})
}
}
func TestFlattenAndSort(t *testing.T) {
tests := []struct {
name string
input []string
expected []string
}{
{
name: "sorts comma-separated values",
input: []string{"registry,docker,oci-dir"},
expected: []string{"docker", "oci-dir", "registry"},
},
{
name: "sorts across multiple entries",
input: []string{"registry,docker", "oci-dir"},
expected: []string{"docker", "oci-dir", "registry"},
},
{
name: "trims whitespace and sorts",
input: []string{" registry , docker ", " oci-dir "},
expected: []string{"docker", "oci-dir", "registry"},
},
{
name: "handles single value",
input: []string{"registry"},
expected: []string{"registry"},
},
{
name: "handles empty input",
input: []string{},
expected: nil,
},
{
name: "sorts reverse alphabetical order",
input: []string{"zebra,yankee,xray"},
expected: []string{"xray", "yankee", "zebra"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := FlattenAndSort(tt.input)
assert.Equal(t, tt.expected, got)
})
}
}
func Test_enrichmentEnabled(t *testing.T) {
tests := []struct {
directives string
@ -139,7 +231,7 @@ func Test_enrichmentEnabled(t *testing.T) {
for _, test := range tests {
t.Run(test.directives, func(t *testing.T) {
got := enrichmentEnabled(Flatten([]string{test.directives}), test.test)
got := enrichmentEnabled(FlattenAndSort([]string{test.directives}), test.test)
assert.Equal(t, test.expected, got)
})
}

View File

@ -3,7 +3,9 @@ package options
import "github.com/anchore/clio"
type pythonConfig struct {
GuessUnpinnedRequirements bool `json:"guess-unpinned-requirements" yaml:"guess-unpinned-requirements" mapstructure:"guess-unpinned-requirements"`
SearchRemoteLicenses *bool `json:"search-remote-licenses" yaml:"search-remote-licenses" mapstructure:"search-remote-licenses"`
PypiBaseURL string `json:"pypi-base-url" yaml:"pypi-base-url" mapstructure:"pypi-base-url"`
GuessUnpinnedRequirements *bool `json:"guess-unpinned-requirements" yaml:"guess-unpinned-requirements" mapstructure:"guess-unpinned-requirements"`
}
var _ interface {
@ -11,6 +13,8 @@ var _ interface {
} = (*pythonConfig)(nil)
func (o *pythonConfig) DescribeFields(descriptions clio.FieldDescriptionSet) {
descriptions.Add(&o.SearchRemoteLicenses, `enables Syft to use the network to fill in more detailed license information`)
descriptions.Add(&o.PypiBaseURL, `base Pypi url to use`)
descriptions.Add(&o.GuessUnpinnedRequirements, `when running across entries in requirements.txt that do not specify a specific version
(e.g. "sqlalchemy >= 1.0.0, <= 2.0.0, != 3.0.0, <= 3.0.0"), attempt to guess what the version could
be based on the version requirements specified (e.g. "1.0.0"). When enabled the lowest expressible version

View File

@ -25,7 +25,6 @@ func BenchmarkImagePackageCatalogers(b *testing.B) {
// get the source object for the image
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
require.NoError(b, err)
b.Cleanup(func() {
require.NoError(b, theSource.Close())
})
@ -88,6 +87,7 @@ func TestPkgCoverageImage(t *testing.T) {
definedPkgs.Remove(string(pkg.TerraformPkg))
definedPkgs.Remove(string(pkg.PhpPeclPkg)) // we have coverage for pear instead
definedPkgs.Remove(string(pkg.CondaPkg))
definedPkgs.Remove(string(pkg.ModelPkg))
var cases []testCase
cases = append(cases, commonTestCases...)
@ -162,6 +162,7 @@ func TestPkgCoverageDirectory(t *testing.T) {
definedPkgs.Remove(string(pkg.UnknownPkg))
definedPkgs.Remove(string(pkg.CondaPkg))
definedPkgs.Remove(string(pkg.PhpPeclPkg)) // this is covered as pear packages
definedPkgs.Remove(string(pkg.ModelPkg))
// for directory scans we should not expect to see any of the following package types
definedPkgs.Remove(string(pkg.KbPkg))

View File

@ -30,10 +30,10 @@ func TestPackageDeduplication(t *testing.T) {
locationCount: map[string]int{
"basesystem-11-13.el9": 5, // in all layers
"curl-minimal-7.76.1-26.el9_3.2.0.1": 2, // base + wget layer
"curl-minimal-7.76.1-31.el9": 3, // curl upgrade layer + all above layers
"curl-minimal-7.76.1-31.el9_6.1": 3, // curl upgrade layer + all above layers
"wget-1.21.1-8.el9_4": 4, // wget + all above layers
"vsftpd-3.0.5-6.el9": 2, // vsftpd + all above layers
"httpd-2.4.62-4.el9": 1, // last layer
"httpd-2.4.62-4.el9_6.4": 1, // last layer
},
},
{
@ -47,11 +47,11 @@ func TestPackageDeduplication(t *testing.T) {
"httpd": 1, // rpm, binary is now excluded by overlap
},
locationCount: map[string]int{
"basesystem-11-13.el9": 1,
"curl-minimal-7.76.1-31.el9": 1, // upgrade
"wget-1.21.1-8.el9_4": 1,
"vsftpd-3.0.5-6.el9": 1,
"httpd-2.4.62-4.el9": 1,
"basesystem-11-13.el9": 1,
"curl-minimal-7.76.1-31.el9_6.1": 1, // upgrade
"wget-1.21.1-8.el9_4": 1,
"vsftpd-3.0.5-6.el9": 1,
"httpd-2.4.62-4.el9_6.4": 1,
},
},
}

View File

@ -7,16 +7,16 @@ FROM --platform=linux/amd64 rockylinux:9.3.20231119@sha256:d644d203142cd5b54ad2a
# copying the RPM DB from each stage to a final stage in separate layers. This will result in a much smaller image.
FROM base AS stage1
RUN dnf install -y wget
RUN dnf install -y wget-1.21.1-8.el9_4
FROM stage1 AS stage2
RUN dnf update -y curl-minimal
RUN dnf update -y curl-minimal-7.76.1-31.el9_6.1
FROM stage2 AS stage3
RUN dnf install -y vsftpd
RUN dnf install -y vsftpd-3.0.5-6.el9
FROM stage3 AS stage4
RUN dnf install -y httpd
RUN dnf install -y httpd-2.4.62-4.el9_6.4
FROM scratch

View File

@ -38,11 +38,11 @@ func catalogFixtureImageWithConfig(t *testing.T, fixtureImageName string, cfg *s
// get the source to build an SBOM against
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
require.NoError(t, err)
t.Cleanup(func() {
require.NoError(t, theSource.Close())
})
// build the SBOM
s, err := syft.CreateSBOM(context.Background(), theSource, cfg)
require.NoError(t, err)
@ -66,7 +66,7 @@ func catalogDirectory(t *testing.T, dir string, catalogerSelection ...string) (s
func catalogDirectoryWithConfig(t *testing.T, dir string, cfg *syft.CreateSBOMConfig) (sbom.SBOM, source.Source) {
cfg.CatalogerSelection = cfg.CatalogerSelection.WithDefaults(pkgcataloging.DirectoryTag)
// get the source to build an sbom against
// get the source to build an SBOM against
theSource, err := syft.GetSource(context.Background(), dir, syft.DefaultGetSourceConfig().WithSources("dir"))
require.NoError(t, err)
t.Cleanup(func() {

View File

@ -23,6 +23,7 @@ const defaultImage = "alpine:3.19"
func main() {
// automagically get a source.Source for arbitrary string input
src := getSource(imageReference())
defer src.Close()
// will catalog the given source and return a SBOM keeping in mind several configurable options
sbom := getSBOM(src)
@ -46,7 +47,6 @@ func getSource(input string) source.Source {
fmt.Println("detecting source type for input:", input, "...")
src, err := syft.GetSource(context.Background(), input, nil)
if err != nil {
panic(err)
}

View File

@ -19,6 +19,7 @@ const defaultImage = "alpine:3.19"
func main() {
// automagically get a source.Source for arbitrary string input
src := getSource(imageReference())
defer src.Close()
// catalog the given source and return a SBOM
sbom := getSBOM(src)
@ -40,7 +41,6 @@ func imageReference() string {
func getSource(input string) source.Source {
src, err := syft.GetSource(context.Background(), input, nil)
if err != nil {
panic(err)
}

View File

@ -19,6 +19,7 @@ const defaultImage = "alpine:3.19"
func main() {
// automagically get a source.Source for arbitrary string input
src := getSource(imageReference())
defer src.Close()
// catalog the given source and return a SBOM
// let's explicitly use catalogers that are:
@ -44,7 +45,6 @@ func imageReference() string {
func getSource(input string) source.Source {
src, err := syft.GetSource(context.Background(), input, nil)
if err != nil {
panic(err)
}

View File

@ -15,6 +15,7 @@ func main() {
image := "alpine:3.19"
src, _ := syft.GetSource(context.Background(), image, syft.DefaultGetSourceConfig().WithSources("registry"))
defer src.Close()
sbom, _ := syft.CreateSBOM(context.Background(), src, syft.DefaultCreateSBOMConfig())

51
go.mod
View File

@ -11,11 +11,10 @@ require (
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d
github.com/acobaugh/osrelease v0.1.0
github.com/adrg/xdg v0.5.3
github.com/anchore/archiver/v3 v3.5.3-0.20241210171143-5b1d8d1c7c51
github.com/anchore/bubbly v0.0.0-20231115134915-def0aba654a9
github.com/anchore/clio v0.0.0-20250319180342-2cfe4b0cb716
github.com/anchore/fangs v0.0.0-20250319222917-446a1e748ec2
github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537
github.com/anchore/go-collections v0.0.0-20251016125210-a3c352120e8c
github.com/anchore/go-homedir v0.0.0-20250319154043-c29668562e4d
github.com/anchore/go-logger v0.0.0-20250318195838-07ae343dd722
github.com/anchore/go-macholibre v0.0.0-20220308212642-53e6d0aaf6fb
@ -24,7 +23,7 @@ require (
github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115
github.com/anchore/stereoscope v0.1.11
github.com/anchore/stereoscope v0.1.12
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be
github.com/aquasecurity/go-pep440-version v0.0.1
github.com/bitnami/go-version v0.0.0-20250131085805-b1f57a8634ef
@ -40,7 +39,7 @@ require (
github.com/dustin/go-humanize v1.0.1
github.com/elliotchance/phpserialize v1.4.0
github.com/facebookincubator/nvdtools v0.1.5
github.com/github/go-spdx/v2 v2.3.3
github.com/github/go-spdx/v2 v2.3.4
github.com/gkampitakis/go-snaps v0.5.15
github.com/go-git/go-billy/v5 v5.6.2
github.com/go-git/go-git/v5 v5.16.3
@ -53,19 +52,19 @@ require (
github.com/google/uuid v1.6.0
github.com/gookit/color v1.6.0
github.com/hashicorp/go-cleanhttp v0.5.2
github.com/hashicorp/go-getter v1.8.2
github.com/hashicorp/go-getter v1.8.3
github.com/hashicorp/go-multierror v1.1.1
github.com/hashicorp/hcl/v2 v2.24.0
github.com/iancoleman/strcase v0.3.0
github.com/invopop/jsonschema v0.7.0
github.com/jedib0t/go-pretty/v6 v6.6.8
github.com/jedib0t/go-pretty/v6 v6.7.1
github.com/jinzhu/copier v0.4.0
github.com/kastenhq/goversion v0.0.0-20230811215019-93b2f8823953
github.com/magiconair/properties v1.8.10
github.com/mholt/archives v0.1.5
github.com/moby/sys/mountinfo v0.7.2
github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1
github.com/olekukonko/tablewriter v1.0.9
github.com/olekukonko/tablewriter v1.1.1
github.com/opencontainers/go-digest v1.0.0
github.com/pelletier/go-toml v1.9.5
github.com/quasilyte/go-ruleguard/dsl v0.3.23
@ -90,9 +89,9 @@ require (
go.uber.org/goleak v1.3.0
go.yaml.in/yaml/v3 v3.0.4
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
golang.org/x/mod v0.29.0
golang.org/x/mod v0.30.0
golang.org/x/net v0.46.0
modernc.org/sqlite v1.39.1
modernc.org/sqlite v1.40.0
)
require (
@ -131,7 +130,7 @@ require (
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/cloudflare/circl v1.6.1 // indirect
github.com/containerd/cgroups v1.1.0 // indirect
github.com/containerd/containerd v1.7.28 // indirect
github.com/containerd/containerd v1.7.29 // indirect
github.com/containerd/containerd/api v1.8.0 // indirect
github.com/containerd/continuity v0.4.4 // indirect
github.com/containerd/errdefs v1.0.0 // indirect
@ -142,7 +141,7 @@ require (
github.com/containerd/stargz-snapshotter/estargz v0.16.3 // indirect
github.com/containerd/ttrpc v1.2.7 // indirect
github.com/containerd/typeurl/v2 v2.2.0 // indirect
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
github.com/cyphar/filepath-securejoin v0.6.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/docker/cli v28.5.1+incompatible // indirect
github.com/docker/distribution v2.8.3+incompatible // indirect
@ -168,7 +167,6 @@ require (
github.com/goccy/go-yaml v1.18.0
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e // indirect
github.com/google/s2a-go v0.1.8 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect
@ -191,7 +189,7 @@ require (
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mattn/go-runewidth v0.0.19 // indirect
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/mikelolasagasti/xz v1.0.1 // indirect
github.com/minio/minlz v1.0.1 // indirect
@ -209,13 +207,9 @@ require (
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/muesli/termenv v0.16.0 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/nwaples/rardecode v1.1.3 // indirect
github.com/nwaples/rardecode/v2 v2.2.0 // indirect
github.com/olekukonko/errors v1.1.0 // indirect
github.com/olekukonko/ll v0.0.9 // indirect
github.com/opencontainers/image-spec v1.1.1 // indirect
github.com/opencontainers/runtime-spec v1.1.0 // indirect
github.com/opencontainers/selinux v1.11.0 // indirect
github.com/opencontainers/selinux v1.13.0 // indirect
github.com/pborman/indent v1.2.1 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
@ -270,7 +264,7 @@ require (
golang.org/x/sys v0.37.0 // indirect
golang.org/x/term v0.36.0 // indirect
golang.org/x/text v0.30.0 // indirect
golang.org/x/time v0.12.0 // indirect
golang.org/x/time v0.14.0
golang.org/x/tools v0.38.0
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
google.golang.org/api v0.203.0 // indirect
@ -287,6 +281,12 @@ require (
)
require (
github.com/cespare/xxhash/v2 v2.3.0
github.com/gpustack/gguf-parser-go v0.22.1
)
require (
cyphar.com/go-pathrs v0.2.1 // indirect
github.com/aws/aws-sdk-go-v2 v1.36.5 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 // indirect
github.com/aws/aws-sdk-go-v2/config v1.29.17 // indirect
@ -305,7 +305,20 @@ require (
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.3 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.34.0 // indirect
github.com/aws/smithy-go v1.22.4 // indirect
github.com/clipperhouse/displaywidth v0.3.1 // indirect
github.com/clipperhouse/stringish v0.1.1 // indirect
github.com/clipperhouse/uax29/v2 v2.2.0 // indirect
github.com/hashicorp/aws-sdk-go-base/v2 v2.0.0-beta.65 // indirect
github.com/henvic/httpretty v0.1.4 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/nwaples/rardecode/v2 v2.2.0 // indirect
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 // indirect
github.com/olekukonko/errors v1.1.0 // indirect
github.com/olekukonko/ll v0.1.2 // indirect
github.com/smallnest/ringbuffer v0.0.0-20241116012123-461381446e3d // indirect
gonum.org/v1/gonum v0.15.1 // indirect
)
retract (

85
go.sum
View File

@ -59,6 +59,8 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
cloud.google.com/go/storage v1.43.0 h1:CcxnSohZwizt4LCzQHWvBf1/kvtHUn7gk9QERXPyXFs=
cloud.google.com/go/storage v1.43.0/go.mod h1:ajvxEa7WmZS1PxvKRq4bq0tFT3vMd502JwstCcYv0Q0=
cyphar.com/go-pathrs v0.2.1 h1:9nx1vOgwVvX1mNBWDu93+vaceedpbsDqo+XuBGL40b8=
cyphar.com/go-pathrs v0.2.1/go.mod h1:y8f1EMG7r+hCuFf/rXsKqMJrJAUoADZGNh5/vZPKcGc=
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
@ -108,16 +110,14 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/anchore/archiver/v3 v3.5.3-0.20241210171143-5b1d8d1c7c51 h1:yhk+P8lF3ZiROjmaVRao9WGTRo4b/wYjoKEiAHWrKwc=
github.com/anchore/archiver/v3 v3.5.3-0.20241210171143-5b1d8d1c7c51/go.mod h1:nwuGSd7aZp0rtYt79YggCGafz1RYsclE7pi3fhLwvuw=
github.com/anchore/bubbly v0.0.0-20231115134915-def0aba654a9 h1:p0ZIe0htYOX284Y4axJaGBvXHU0VCCzLN5Wf5XbKStU=
github.com/anchore/bubbly v0.0.0-20231115134915-def0aba654a9/go.mod h1:3ZsFB9tzW3vl4gEiUeuSOMDnwroWxIxJelOOHUp8dSw=
github.com/anchore/clio v0.0.0-20250319180342-2cfe4b0cb716 h1:2sIdYJlQESEnyk3Y0WD2vXWW5eD2iMz9Ev8fj1Z8LNA=
github.com/anchore/clio v0.0.0-20250319180342-2cfe4b0cb716/go.mod h1:Utb9i4kwiCWvqAIxZaJeMIXFO9uOgQXlvH2BfbfO/zI=
github.com/anchore/fangs v0.0.0-20250319222917-446a1e748ec2 h1:GC2QaO0YsmjpsZ4rtVKv9DnproIxqqn+qkskpc+i8MA=
github.com/anchore/fangs v0.0.0-20250319222917-446a1e748ec2/go.mod h1:XUbUECwVKuD3qYRUj+QZIOHjyyXua2gFmVjKA40iHXA=
github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537 h1:GjNGuwK5jWjJMyVppBjYS54eOiiSNv4Ba869k4wh72Q=
github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537/go.mod h1:1aiktV46ATCkuVg0O573ZrH56BUawTECPETbZyBcqT8=
github.com/anchore/go-collections v0.0.0-20251016125210-a3c352120e8c h1:eoJXyC0n7DZ4YvySG/ETdYkTar2Due7eH+UmLK6FbrA=
github.com/anchore/go-collections v0.0.0-20251016125210-a3c352120e8c/go.mod h1:1aiktV46ATCkuVg0O573ZrH56BUawTECPETbZyBcqT8=
github.com/anchore/go-homedir v0.0.0-20250319154043-c29668562e4d h1:gT69osH9AsdpOfqxbRwtxcNnSZ1zg4aKy2BevO3ZBdc=
github.com/anchore/go-homedir v0.0.0-20250319154043-c29668562e4d/go.mod h1:PhSnuFYknwPZkOWKB1jXBNToChBA+l0FjwOxtViIc50=
github.com/anchore/go-logger v0.0.0-20250318195838-07ae343dd722 h1:2SqmFgE7h+Ql4VyBzhjLkRF/3gDrcpUBj8LjvvO6OOM=
@ -138,8 +138,8 @@ github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b h1:e1bmaoJfZV
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E=
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115 h1:ZyRCmiEjnoGJZ1+Ah0ZZ/mKKqNhGcUZBl0s7PTTDzvY=
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115/go.mod h1:KoYIv7tdP5+CC9VGkeZV4/vGCKsY55VvoG+5dadg4YI=
github.com/anchore/stereoscope v0.1.11 h1:YP/XUNcJyMbOOPAWPkeZNCVlKKTRO2cnBTEeUW6I40Y=
github.com/anchore/stereoscope v0.1.11/go.mod h1:G3PZlzPbxFhylj9pQwtqfVPaahuWmy/UCtv5FTIIMvg=
github.com/anchore/stereoscope v0.1.12 h1:4T/10G7Nb98UoJBKVvAIhsAtrR63lZXxMJb/Qfw5inw=
github.com/anchore/stereoscope v0.1.12/go.mod h1:G3PZlzPbxFhylj9pQwtqfVPaahuWmy/UCtv5FTIIMvg=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
@ -227,7 +227,6 @@ github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqy
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
@ -263,6 +262,12 @@ github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/clipperhouse/displaywidth v0.3.1 h1:k07iN9gD32177o1y4O1jQMzbLdCrsGJh+blirVYybsk=
github.com/clipperhouse/displaywidth v0.3.1/go.mod h1:tgLJKKyaDOCadywag3agw4snxS5kYEuYR6Y9+qWDDYM=
github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs=
github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA=
github.com/clipperhouse/uax29/v2 v2.2.0 h1:ChwIKnQN3kcZteTXMgb1wztSgaU+ZemkgWdohwgs8tY=
github.com/clipperhouse/uax29/v2 v2.2.0/go.mod h1:EFJ2TJMRUaplDxHKj1qAEhCtQPW2tJSwu5BF98AuoVM=
github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0=
github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
@ -277,8 +282,8 @@ github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWH
github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM=
github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw=
github.com/containerd/containerd v1.7.28 h1:Nsgm1AtcmEh4AHAJ4gGlNSaKgXiNccU270Dnf81FQ3c=
github.com/containerd/containerd v1.7.28/go.mod h1:azUkWcOvHrWvaiUjSQH0fjzuHIwSPg1WL5PshGP4Szs=
github.com/containerd/containerd v1.7.29 h1:90fWABQsaN9mJhGkoVnuzEY+o1XDPbg9BTC9QTAHnuE=
github.com/containerd/containerd v1.7.29/go.mod h1:azUkWcOvHrWvaiUjSQH0fjzuHIwSPg1WL5PshGP4Szs=
github.com/containerd/containerd/api v1.8.0 h1:hVTNJKR8fMc/2Tiw60ZRijntNMd1U+JVMyTRdsD2bS0=
github.com/containerd/containerd/api v1.8.0/go.mod h1:dFv4lt6S20wTu/hMcP4350RL87qPWLVa/OHOwmmdnYc=
github.com/containerd/continuity v0.4.4 h1:/fNVfTJ7wIl/YPMHjf+5H32uFhl63JucB34PlCpMKII=
@ -304,8 +309,8 @@ github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSV
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
github.com/cyphar/filepath-securejoin v0.6.0 h1:BtGB77njd6SVO6VztOHfPxKitJvd/VPT+OFBFMOi1Is=
github.com/cyphar/filepath-securejoin v0.6.0/go.mod h1:A8hd4EnAeyujCJRrICiOWqjS1AX0a9kM5XL+NwKoYSc=
github.com/dave/jennifer v1.7.1 h1:B4jJJDHelWcDhlRQxWeo0Npa/pYKBLrirAQoTN45txo=
github.com/dave/jennifer v1.7.1/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc=
github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -385,8 +390,8 @@ github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/github/go-spdx/v2 v2.3.3 h1:QI7evnHWEfWkT54eJwkoV/f3a0xD3gLlnVmT5wQG6LE=
github.com/github/go-spdx/v2 v2.3.3/go.mod h1:2ZxKsOhvBp+OYBDlsGnUMcchLeo2mrpEBn2L1C+U3IQ=
github.com/github/go-spdx/v2 v2.3.4 h1:6VNAsYWvQge+SOeoubTlH81MY21d5uekXNIRGfXMNXo=
github.com/github/go-spdx/v2 v2.3.4/go.mod h1:7LYNCshU2Gj17qZ0heJ5CQUKWWmpd98K7o93K8fJSMk=
github.com/gkampitakis/ciinfo v0.3.2 h1:JcuOPk8ZU7nZQjdUhctuhQofk7BGHuIy0c9Ez8BNhXs=
github.com/gkampitakis/ciinfo v0.3.2/go.mod h1:1NIwaOcFChN4fa/B0hEBdAb6npDlFL8Bwx4dfRLRqAo=
github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZdC4M=
@ -472,8 +477,6 @@ github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiu
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
@ -541,6 +544,8 @@ github.com/gookit/assert v0.1.1/go.mod h1:jS5bmIVQZTIwk42uXl4lyj4iaaxx32tqH16CFj
github.com/gookit/color v1.2.5/go.mod h1:AhIE+pS6D4Ql0SQWbBeXPHw7gY0/sjHoA4s/n1KB7xg=
github.com/gookit/color v1.6.0 h1:JjJXBTk1ETNyqyilJhkTXJYYigHG24TM9Xa2M1xAhRA=
github.com/gookit/color v1.6.0/go.mod h1:9ACFc7/1IpHGBW8RwuDm/0YEnhg3dwwXpoMsmtyHfjs=
github.com/gpustack/gguf-parser-go v0.22.1 h1:FRnEDWqT0Rcplr/R9ctCRSN2+3DhVsf6dnR5/i9JA4E=
github.com/gpustack/gguf-parser-go v0.22.1/go.mod h1:y4TwTtDqFWTK+xvprOjRUh+dowgU2TKCX37vRKvGiZ0=
github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms=
@ -556,8 +561,8 @@ github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtng
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
github.com/hashicorp/go-getter v1.8.2 h1:CGCK+bZQLl44PYiwJweVzfpjg7bBwtuXu3AGcLiod2o=
github.com/hashicorp/go-getter v1.8.2/go.mod h1:CUTt9x2bCtJ/sV8ihgrITL3IUE+0BE1j/e4n5P/GIM4=
github.com/hashicorp/go-getter v1.8.3 h1:gIS+oTNv3kyYAvlUVgMR46MiG0bM0KuSON/KZEvRoRg=
github.com/hashicorp/go-getter v1.8.3/go.mod h1:CUTt9x2bCtJ/sV8ihgrITL3IUE+0BE1j/e4n5P/GIM4=
github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
@ -590,6 +595,8 @@ github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOn
github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE=
github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4=
github.com/henvic/httpretty v0.1.4 h1:Jo7uwIRWVFxkqOnErcoYfH90o3ddQyVrSANeS4cxYmU=
github.com/henvic/httpretty v0.1.4/go.mod h1:Dn60sQTZfbt2dYsdUSNsCljyF4AfdqnuJFDLJA1I4AM=
github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI=
github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
@ -609,14 +616,15 @@ github.com/invopop/jsonschema v0.7.0 h1:2vgQcBz1n256N+FpX3Jq7Y17AjYt46Ig3zIWyy77
github.com/invopop/jsonschema v0.7.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
github.com/jedib0t/go-pretty/v6 v6.6.8 h1:JnnzQeRz2bACBobIaa/r+nqjvws4yEhcmaZ4n1QzsEc=
github.com/jedib0t/go-pretty/v6 v6.6.8/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
github.com/jedib0t/go-pretty/v6 v6.7.1 h1:bHDSsj93NuJ563hHuM7ohk/wpX7BmRFNIsVv1ssI2/M=
github.com/jedib0t/go-pretty/v6 v6.7.1/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
@ -676,8 +684,8 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 h1:P8UmIzZMYDR+NGImiFvErt6VWfIRPuGM+vyjiEdkmIw=
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
@ -722,9 +730,11 @@ github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcY
github.com/moby/term v0.0.0-20221205130635-1aeaba878587 h1:HfkjXDfhgVaN5rmueG8cL8KKeFNecRCXFhaJ2qZ5SKA=
github.com/moby/term v0.0.0-20221205130635-1aeaba878587/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
@ -741,16 +751,16 @@ github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1 h1:kpt9ZfKcm+
github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1/go.mod h1:qgCw4bBKZX8qMgGeEZzGFVT3notl42dBjNqO2jut0M0=
github.com/nsf/jsondiff v0.0.0-20210926074059-1e845ec5d249 h1:NHrXEjTNQY7P0Zfx1aMrNhpgxHmow66XQtm0aQLY0AE=
github.com/nsf/jsondiff v0.0.0-20210926074059-1e845ec5d249/go.mod h1:mpRZBD8SJ55OIICQ3iWH0Yz3cjzA61JdqMLoWXeB2+8=
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A=
github.com/nwaples/rardecode/v2 v2.2.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 h1:zrbMGy9YXpIeTnGj4EljqMiZsIcE09mmF8XsD5AYOJc=
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6/go.mod h1:rEKTHC9roVVicUIfZK7DYrdIoM0EOr8mK1Hj5s3JjH0=
github.com/olekukonko/errors v1.1.0 h1:RNuGIh15QdDenh+hNvKrJkmxxjV4hcS50Db478Ou5sM=
github.com/olekukonko/errors v1.1.0/go.mod h1:ppzxA5jBKcO1vIpCXQ9ZqgDh8iwODz6OXIGKU8r5m4Y=
github.com/olekukonko/ll v0.0.9 h1:Y+1YqDfVkqMWuEQMclsF9HUR5+a82+dxJuL1HHSRpxI=
github.com/olekukonko/ll v0.0.9/go.mod h1:En+sEW0JNETl26+K8eZ6/W4UQ7CYSrrgg/EdIYT2H8g=
github.com/olekukonko/tablewriter v1.0.9 h1:XGwRsYLC2bY7bNd93Dk51bcPZksWZmLYuaTHR0FqfL8=
github.com/olekukonko/tablewriter v1.0.9/go.mod h1:5c+EBPeSqvXnLLgkm9isDdzR3wjfBkHR9Nhfp3NWrzo=
github.com/olekukonko/ll v0.1.2 h1:lkg/k/9mlsy0SxO5aC+WEpbdT5K83ddnNhAepz7TQc0=
github.com/olekukonko/ll v0.1.2/go.mod h1:b52bVQRRPObe+yyBl0TxNfhesL0nedD4Cht0/zx55Ew=
github.com/olekukonko/tablewriter v1.1.1 h1:b3reP6GCfrHwmKkYwNRFh2rxidGHcT6cgxj/sHiDDx0=
github.com/olekukonko/tablewriter v1.1.1/go.mod h1:De/bIcTF+gpBDB3Alv3fEsZA+9unTsSzAg/ZGADCtn4=
github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k=
github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
@ -759,8 +769,8 @@ github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJw
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg=
github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/selinux v1.11.0 h1:+5Zbo97w3Lbmb3PeqQtpmTkMwsW5nRI3YaLpt7tQ7oU=
github.com/opencontainers/selinux v1.11.0/go.mod h1:E5dMC3VPuVvVHDYmi78qvhJp8+M586T4DlDRYpFkyec=
github.com/opencontainers/selinux v1.13.0 h1:Zza88GWezyT7RLql12URvoxsbLfjFx988+LGaWfbL84=
github.com/opencontainers/selinux v1.13.0/go.mod h1:XxWTed+A/s5NNq4GmYScVy+9jzXhGBVEOAyucdRUY8s=
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
@ -809,7 +819,6 @@ github.com/quasilyte/go-ruleguard/dsl v0.3.23 h1:lxjt5B6ZCiBeeNO8/oQsegE6fLeCzuM
github.com/quasilyte/go-ruleguard/dsl v0.3.23/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
@ -851,6 +860,8 @@ github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af h1:Sp5TG9f7K39yf
github.com/sirupsen/logrus v1.9.4-0.20230606125235-dd1b4c2e81af/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
github.com/smallnest/ringbuffer v0.0.0-20241116012123-461381446e3d h1:3VwvTjiRPA7cqtgOWddEL+JrcijMlXUmj99c/6YyZoY=
github.com/smallnest/ringbuffer v0.0.0-20241116012123-461381446e3d/go.mod h1:tAG61zBM1DYRaGIPloumExGvScf08oHuo0kFoOqdbT0=
github.com/sorairolake/lzip-go v0.3.8 h1:j5Q2313INdTA80ureWYRhX+1K78mUXfMoPZCw/ivWik=
github.com/sorairolake/lzip-go v0.3.8/go.mod h1:JcBqGMV0frlxwrsE9sMWXDjqn3EeVf0/54YPsw66qkU=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
@ -1061,8 +1072,8 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -1239,8 +1250,8 @@ golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
@ -1304,6 +1315,8 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU=
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
gonum.org/v1/gonum v0.15.1 h1:FNy7N6OUZVUaWG9pTiD+jlhdQ3lMP+/LcTpJ6+a8sQ0=
gonum.org/v1/gonum v0.15.1/go.mod h1:eZTZuRFrzu5pcyjN5wJhcIhnUdNijYxX1T2IcrOGY0o=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
@ -1509,8 +1522,8 @@ modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
modernc.org/sqlite v1.39.1 h1:H+/wGFzuSCIEVCvXYVHX5RQglwhMOvtHSv+VtidL2r4=
modernc.org/sqlite v1.39.1/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE=
modernc.org/sqlite v1.40.0 h1:bNWEDlYhNPAUdUdBzjAvn8icAs/2gaKlj4vM+tQ6KdQ=
modernc.org/sqlite v1.40.0/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE=
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=

View File

@ -0,0 +1,46 @@
package pkgtestobservation
import "time"
// Observations represents capability observations during testing
type Observations struct {
License bool `json:"license"`
Relationships Relationship `json:"relationships"`
FileListing Count `json:"file_listing"`
FileDigests Count `json:"file_digests"`
IntegrityHash Count `json:"integrity_hash"`
}
// Relationship tracks dependency relationship observations
type Relationship struct {
Found bool `json:"found"`
Count int `json:"count"`
}
// Count tracks whether a capability was found and how many times
type Count struct {
Found bool `json:"found"`
Count int `json:"count"`
}
// Test is the root structure for test-observations.json
type Test struct {
Package string `json:"package"`
UpdatedAt time.Time `json:"updated_at"`
Catalogers map[string]*Cataloger `json:"catalogers"`
Parsers map[string]*Parser `json:"parsers"`
}
// Parser captures all observations for a parser
type Parser struct {
MetadataTypes []string `json:"metadata_types"`
PackageTypes []string `json:"package_types"`
Observations Observations `json:"observations"`
}
// Cataloger captures all observations for a cataloger
type Cataloger struct {
MetadataTypes []string `json:"metadata_types"`
PackageTypes []string `json:"package_types"`
Observations Observations `json:"observations"`
}

View File

@ -3,5 +3,9 @@ package internal
const (
// JSONSchemaVersion is the current schema version output by the JSON encoder
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
JSONSchemaVersion = "16.0.40"
JSONSchemaVersion = "16.1.0"
// Changelog
// 16.1.0 - reformulated the python pdm fields (added "URL" and removed the unused "path" field).
)

View File

@ -1,17 +1,40 @@
package file
import (
"context"
"fmt"
"os"
"path/filepath"
"github.com/bmatcuk/doublestar/v4"
"github.com/mholt/archives"
"github.com/anchore/archiver/v3"
"github.com/anchore/syft/internal"
)
// TraverseFilesInTar enumerates all paths stored within a tar archive using the visitor pattern.
func TraverseFilesInTar(ctx context.Context, archivePath string, visitor archives.FileHandler) error {
tarReader, err := os.Open(archivePath)
if err != nil {
return fmt.Errorf("unable to open tar archive (%s): %w", archivePath, err)
}
defer internal.CloseAndLogError(tarReader, archivePath)
format, _, err := archives.Identify(ctx, archivePath, nil)
if err != nil {
return fmt.Errorf("failed to identify tar compression format: %w", err)
}
extractor, ok := format.(archives.Extractor)
if !ok {
return fmt.Errorf("file format does not support extraction: %s", archivePath)
}
return extractor.Extract(ctx, tarReader, visitor)
}
// ExtractGlobsFromTarToUniqueTempFile extracts paths matching the given globs within the given archive to a temporary directory, returning file openers for each file extracted.
func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...string) (map[string]Opener, error) {
func ExtractGlobsFromTarToUniqueTempFile(ctx context.Context, archivePath, dir string, globs ...string) (map[string]Opener, error) {
results := make(map[string]Opener)
// don't allow for full traversal, only select traversal from given paths
@ -19,9 +42,7 @@ func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...strin
return results, nil
}
visitor := func(file archiver.File) error {
defer file.Close()
visitor := func(_ context.Context, file archives.FileInfo) error {
// ignore directories
if file.IsDir() {
return nil
@ -43,7 +64,13 @@ func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...strin
// provides a ReadCloser. It is up to the caller to handle closing the file explicitly.
defer tempFile.Close()
if err := safeCopy(tempFile, file.ReadCloser); err != nil {
packedFile, err := file.Open()
if err != nil {
return fmt.Errorf("unable to read file=%q from tar=%q: %w", file.NameInArchive, archivePath, err)
}
defer internal.CloseAndLogError(packedFile, archivePath)
if err := safeCopy(tempFile, packedFile); err != nil {
return fmt.Errorf("unable to copy source=%q for tar=%q: %w", file.Name(), archivePath, err)
}
@ -52,7 +79,7 @@ func ExtractGlobsFromTarToUniqueTempFile(archivePath, dir string, globs ...strin
return nil
}
return results, archiver.Walk(archivePath, visitor)
return results, TraverseFilesInTar(ctx, archivePath, visitor)
}
func matchesAnyGlob(name string, globs ...string) bool {

View File

@ -1,10 +1,12 @@
package file
import (
"context"
"os"
"sort"
"strings"
"github.com/mholt/archives"
"github.com/scylladb/go-set/strset"
"github.com/anchore/syft/internal/log"
@ -14,22 +16,25 @@ import (
type ZipFileManifest map[string]os.FileInfo
// NewZipFileManifest creates and returns a new ZipFileManifest populated with path and metadata from the given zip archive path.
func NewZipFileManifest(archivePath string) (ZipFileManifest, error) {
zipReader, err := OpenZip(archivePath)
func NewZipFileManifest(ctx context.Context, archivePath string) (ZipFileManifest, error) {
zipReader, err := os.Open(archivePath)
manifest := make(ZipFileManifest)
if err != nil {
log.Debugf("unable to open zip archive (%s): %v", archivePath, err)
return manifest, err
}
defer func() {
err = zipReader.Close()
if err != nil {
if err = zipReader.Close(); err != nil {
log.Debugf("unable to close zip archive (%s): %+v", archivePath, err)
}
}()
for _, file := range zipReader.File {
manifest.Add(file.Name, file.FileInfo())
err = archives.Zip{}.Extract(ctx, zipReader, func(_ context.Context, file archives.FileInfo) error {
manifest.Add(file.NameInArchive, file.FileInfo)
return nil
})
if err != nil {
return manifest, err
}
return manifest, nil
}

View File

@ -4,6 +4,7 @@
package file
import (
"context"
"encoding/json"
"os"
"path"
@ -24,7 +25,7 @@ func TestNewZipFileManifest(t *testing.T) {
archiveFilePath := setupZipFileTest(t, sourceDirPath, false)
actual, err := NewZipFileManifest(archiveFilePath)
actual, err := NewZipFileManifest(context.Background(), archiveFilePath)
if err != nil {
t.Fatalf("unable to extract from unzip archive: %+v", err)
}
@ -59,7 +60,7 @@ func TestNewZip64FileManifest(t *testing.T) {
sourceDirPath := path.Join(cwd, "test-fixtures", "zip-source")
archiveFilePath := setupZipFileTest(t, sourceDirPath, true)
actual, err := NewZipFileManifest(archiveFilePath)
actual, err := NewZipFileManifest(context.Background(), archiveFilePath)
if err != nil {
t.Fatalf("unable to extract from unzip archive: %+v", err)
}
@ -99,7 +100,7 @@ func TestZipFileManifest_GlobMatch(t *testing.T) {
archiveFilePath := setupZipFileTest(t, sourceDirPath, false)
z, err := NewZipFileManifest(archiveFilePath)
z, err := NewZipFileManifest(context.Background(), archiveFilePath)
if err != nil {
t.Fatalf("unable to extract from unzip archive: %+v", err)
}

View File

@ -1,13 +1,15 @@
package file
import (
"archive/zip"
"bytes"
"context"
"fmt"
"os"
"path/filepath"
"strings"
"github.com/mholt/archives"
"github.com/anchore/syft/internal/log"
)
@ -25,7 +27,7 @@ type errZipSlipDetected struct {
}
func (e *errZipSlipDetected) Error() string {
return fmt.Sprintf("paths are not allowed to resolve outside of the root prefix (%q). Destination: %q", e.Prefix, e.JoinArgs)
return fmt.Sprintf("path traversal detected: paths are not allowed to resolve outside of the root prefix (%q). Destination: %q", e.Prefix, e.JoinArgs)
}
type zipTraversalRequest map[string]struct{}
@ -39,38 +41,34 @@ func newZipTraverseRequest(paths ...string) zipTraversalRequest {
}
// TraverseFilesInZip enumerates all paths stored within a zip archive using the visitor pattern.
func TraverseFilesInZip(archivePath string, visitor func(*zip.File) error, paths ...string) error {
func TraverseFilesInZip(ctx context.Context, archivePath string, visitor archives.FileHandler, paths ...string) error {
request := newZipTraverseRequest(paths...)
zipReader, err := OpenZip(archivePath)
zipReader, err := os.Open(archivePath)
if err != nil {
return fmt.Errorf("unable to open zip archive (%s): %w", archivePath, err)
}
defer func() {
err = zipReader.Close()
if err != nil {
if err := zipReader.Close(); err != nil {
log.Errorf("unable to close zip archive (%s): %+v", archivePath, err)
}
}()
for _, file := range zipReader.File {
return archives.Zip{}.Extract(ctx, zipReader, func(ctx context.Context, file archives.FileInfo) error {
// if no paths are given then assume that all files should be traversed
if len(paths) > 0 {
if _, ok := request[file.Name]; !ok {
if _, ok := request[file.NameInArchive]; !ok {
// this file path is not of interest
continue
return nil
}
}
if err = visitor(file); err != nil {
return err
}
}
return nil
return visitor(ctx, file)
})
}
// ExtractFromZipToUniqueTempFile extracts select paths for the given archive to a temporary directory, returning file openers for each file extracted.
func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (map[string]Opener, error) {
func ExtractFromZipToUniqueTempFile(ctx context.Context, archivePath, dir string, paths ...string) (map[string]Opener, error) {
results := make(map[string]Opener)
// don't allow for full traversal, only select traversal from given paths
@ -78,9 +76,8 @@ func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (m
return results, nil
}
visitor := func(file *zip.File) error {
tempfilePrefix := filepath.Base(filepath.Clean(file.Name)) + "-"
visitor := func(_ context.Context, file archives.FileInfo) error {
tempfilePrefix := filepath.Base(filepath.Clean(file.NameInArchive)) + "-"
tempFile, err := os.CreateTemp(dir, tempfilePrefix)
if err != nil {
return fmt.Errorf("unable to create temp file: %w", err)
@ -92,33 +89,32 @@ func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (m
zippedFile, err := file.Open()
if err != nil {
return fmt.Errorf("unable to read file=%q from zip=%q: %w", file.Name, archivePath, err)
return fmt.Errorf("unable to read file=%q from zip=%q: %w", file.NameInArchive, archivePath, err)
}
defer func() {
err := zippedFile.Close()
if err != nil {
log.Errorf("unable to close source file=%q from zip=%q: %+v", file.Name, archivePath, err)
if err := zippedFile.Close(); err != nil {
log.Errorf("unable to close source file=%q from zip=%q: %+v", file.NameInArchive, archivePath, err)
}
}()
if file.FileInfo().IsDir() {
return fmt.Errorf("unable to extract directories, only files: %s", file.Name)
if file.IsDir() {
return fmt.Errorf("unable to extract directories, only files: %s", file.NameInArchive)
}
if err := safeCopy(tempFile, zippedFile); err != nil {
return fmt.Errorf("unable to copy source=%q for zip=%q: %w", file.Name, archivePath, err)
return fmt.Errorf("unable to copy source=%q for zip=%q: %w", file.NameInArchive, archivePath, err)
}
results[file.Name] = Opener{path: tempFile.Name()}
results[file.NameInArchive] = Opener{path: tempFile.Name()}
return nil
}
return results, TraverseFilesInZip(archivePath, visitor, paths...)
return results, TraverseFilesInZip(ctx, archivePath, visitor, paths...)
}
// ContentsFromZip extracts select paths for the given archive and returns a set of string contents for each path.
func ContentsFromZip(archivePath string, paths ...string) (map[string]string, error) {
func ContentsFromZip(ctx context.Context, archivePath string, paths ...string) (map[string]string, error) {
results := make(map[string]string)
// don't allow for full traversal, only select traversal from given paths
@ -126,37 +122,38 @@ func ContentsFromZip(archivePath string, paths ...string) (map[string]string, er
return results, nil
}
visitor := func(file *zip.File) error {
visitor := func(_ context.Context, file archives.FileInfo) error {
zippedFile, err := file.Open()
if err != nil {
return fmt.Errorf("unable to read file=%q from zip=%q: %w", file.Name, archivePath, err)
return fmt.Errorf("unable to read file=%q from zip=%q: %w", file.NameInArchive, archivePath, err)
}
defer func() {
if err := zippedFile.Close(); err != nil {
log.Errorf("unable to close source file=%q from zip=%q: %+v", file.NameInArchive, archivePath, err)
}
}()
if file.FileInfo().IsDir() {
return fmt.Errorf("unable to extract directories, only files: %s", file.Name)
if file.IsDir() {
return fmt.Errorf("unable to extract directories, only files: %s", file.NameInArchive)
}
var buffer bytes.Buffer
if err := safeCopy(&buffer, zippedFile); err != nil {
return fmt.Errorf("unable to copy source=%q for zip=%q: %w", file.Name, archivePath, err)
return fmt.Errorf("unable to copy source=%q for zip=%q: %w", file.NameInArchive, archivePath, err)
}
results[file.Name] = buffer.String()
results[file.NameInArchive] = buffer.String()
err = zippedFile.Close()
if err != nil {
return fmt.Errorf("unable to close source file=%q from zip=%q: %w", file.Name, archivePath, err)
}
return nil
}
return results, TraverseFilesInZip(archivePath, visitor, paths...)
return results, TraverseFilesInZip(ctx, archivePath, visitor, paths...)
}
// UnzipToDir extracts a zip archive to a target directory.
func UnzipToDir(archivePath, targetDir string) error {
visitor := func(file *zip.File) error {
joinedPath, err := safeJoin(targetDir, file.Name)
func UnzipToDir(ctx context.Context, archivePath, targetDir string) error {
visitor := func(_ context.Context, file archives.FileInfo) error {
joinedPath, err := SafeJoin(targetDir, file.NameInArchive)
if err != nil {
return err
}
@ -164,11 +161,11 @@ func UnzipToDir(archivePath, targetDir string) error {
return extractSingleFile(file, joinedPath, archivePath)
}
return TraverseFilesInZip(archivePath, visitor)
return TraverseFilesInZip(ctx, archivePath, visitor)
}
// safeJoin ensures that any destinations do not resolve to a path above the prefix path.
func safeJoin(prefix string, dest ...string) (string, error) {
// SafeJoin ensures that any destinations do not resolve to a path above the prefix path.
func SafeJoin(prefix string, dest ...string) (string, error) {
joinResult := filepath.Join(append([]string{prefix}, dest...)...)
cleanJoinResult := filepath.Clean(joinResult)
if !strings.HasPrefix(cleanJoinResult, filepath.Clean(prefix)) {
@ -181,13 +178,18 @@ func safeJoin(prefix string, dest ...string) (string, error) {
return joinResult, nil
}
func extractSingleFile(file *zip.File, expandedFilePath, archivePath string) error {
func extractSingleFile(file archives.FileInfo, expandedFilePath, archivePath string) error {
zippedFile, err := file.Open()
if err != nil {
return fmt.Errorf("unable to read file=%q from zip=%q: %w", file.Name, archivePath, err)
return fmt.Errorf("unable to read file=%q from zip=%q: %w", file.NameInArchive, archivePath, err)
}
defer func() {
if err := zippedFile.Close(); err != nil {
log.Errorf("unable to close source file=%q from zip=%q: %+v", file.NameInArchive, archivePath, err)
}
}()
if file.FileInfo().IsDir() {
if file.IsDir() {
err = os.MkdirAll(expandedFilePath, file.Mode())
if err != nil {
return fmt.Errorf("unable to create dir=%q from zip=%q: %w", expandedFilePath, archivePath, err)
@ -202,20 +204,16 @@ func extractSingleFile(file *zip.File, expandedFilePath, archivePath string) err
if err != nil {
return fmt.Errorf("unable to create dest file=%q from zip=%q: %w", expandedFilePath, archivePath, err)
}
defer func() {
if err := outputFile.Close(); err != nil {
log.Errorf("unable to close dest file=%q from zip=%q: %+v", outputFile.Name(), archivePath, err)
}
}()
if err := safeCopy(outputFile, zippedFile); err != nil {
return fmt.Errorf("unable to copy source=%q to dest=%q for zip=%q: %w", file.Name, outputFile.Name(), archivePath, err)
}
err = outputFile.Close()
if err != nil {
return fmt.Errorf("unable to close dest file=%q from zip=%q: %w", outputFile.Name(), archivePath, err)
return fmt.Errorf("unable to copy source=%q to dest=%q for zip=%q: %w", file.NameInArchive, outputFile.Name(), archivePath, err)
}
}
err = zippedFile.Close()
if err != nil {
return fmt.Errorf("unable to close source file=%q from zip=%q: %w", file.Name, archivePath, err)
}
return nil
}

View File

@ -4,6 +4,8 @@
package file
import (
"archive/zip"
"context"
"crypto/sha256"
"encoding/json"
"errors"
@ -17,6 +19,7 @@ import (
"github.com/go-test/deep"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func equal(r1, r2 io.Reader) (bool, error) {
@ -55,7 +58,7 @@ func TestUnzipToDir(t *testing.T) {
expectedPaths := len(expectedZipArchiveEntries)
observedPaths := 0
err = UnzipToDir(archiveFilePath, unzipDestinationDir)
err = UnzipToDir(context.Background(), archiveFilePath, unzipDestinationDir)
if err != nil {
t.Fatalf("unable to unzip archive: %+v", err)
}
@ -145,7 +148,7 @@ func TestContentsFromZip(t *testing.T) {
paths = append(paths, p)
}
actual, err := ContentsFromZip(archivePath, paths...)
actual, err := ContentsFromZip(context.Background(), archivePath, paths...)
if err != nil {
t.Fatalf("unable to extract from unzip archive: %+v", err)
}
@ -307,9 +310,528 @@ func TestSafeJoin(t *testing.T) {
for _, test := range tests {
t.Run(fmt.Sprintf("%+v:%+v", test.prefix, test.args), func(t *testing.T) {
actual, err := safeJoin(test.prefix, test.args...)
actual, err := SafeJoin(test.prefix, test.args...)
test.errAssertion(t, err)
assert.Equal(t, test.expected, actual)
})
}
}
// TestSymlinkProtection demonstrates that SafeJoin protects against symlink-based
// directory traversal attacks by validating that archive entry paths cannot escape
// the extraction directory.
func TestSafeJoin_SymlinkProtection(t *testing.T) {
tests := []struct {
name string
archivePath string // Path as it would appear in the archive
expectError bool
description string
}{
{
name: "path traversal via ../",
archivePath: "../../../outside/file.txt",
expectError: true,
description: "Archive entry with ../ trying to escape extraction dir",
},
{
name: "absolute path symlink target",
archivePath: "../../../sensitive.txt",
expectError: true,
description: "Simulates symlink pointing outside via relative path",
},
{
name: "safe relative path within extraction dir",
archivePath: "subdir/safe.txt",
expectError: false,
description: "Normal file path that stays within extraction directory",
},
{
name: "safe path with internal ../",
archivePath: "dir1/../dir2/file.txt",
expectError: false,
description: "Path with ../ that still resolves within extraction dir",
},
{
name: "deeply nested traversal",
archivePath: "../../../../../../tmp/evil.txt",
expectError: true,
description: "Multiple levels of ../ trying to escape",
},
{
name: "single parent directory escape",
archivePath: "../",
expectError: true,
description: "Simple one-level escape attempt",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Create temp directories to simulate extraction scenario
tmpDir := t.TempDir()
extractDir := filepath.Join(tmpDir, "extract")
outsideDir := filepath.Join(tmpDir, "outside")
require.NoError(t, os.MkdirAll(extractDir, 0755))
require.NoError(t, os.MkdirAll(outsideDir, 0755))
// Create a file outside extraction dir that an attacker might target
outsideFile := filepath.Join(outsideDir, "sensitive.txt")
require.NoError(t, os.WriteFile(outsideFile, []byte("sensitive data"), 0644))
// Test SafeJoin - this is what happens when processing archive entries
result, err := SafeJoin(extractDir, tt.archivePath)
if tt.expectError {
// Should block malicious paths
require.Error(t, err, "Expected SafeJoin to reject malicious path")
var zipSlipErr *errZipSlipDetected
assert.ErrorAs(t, err, &zipSlipErr, "Error should be errZipSlipDetected type")
assert.Empty(t, result, "Result should be empty for blocked paths")
} else {
// Should allow safe paths
require.NoError(t, err, "Expected SafeJoin to allow safe path")
assert.NotEmpty(t, result, "Result should not be empty for safe paths")
assert.True(t, strings.HasPrefix(filepath.Clean(result), filepath.Clean(extractDir)),
"Safe path should resolve within extraction directory")
}
})
}
}
// TestUnzipToDir_SymlinkAttacks tests UnzipToDir function with malicious ZIP archives
// containing symlink entries that attempt path traversal attacks.
//
// EXPECTED BEHAVIOR: UnzipToDir should either:
// 1. Detect and reject symlinks explicitly with a security error, OR
// 2. Extract them safely (library converts symlinks to regular files)
func TestUnzipToDir_SymlinkAttacks(t *testing.T) {
tests := []struct {
name string
symlinkName string
fileName string
errContains string
}{
{
name: "direct symlink to outside directory",
symlinkName: "evil_link",
fileName: "evil_link/payload.txt",
errContains: "not a directory", // attempt to write through symlink leaf (which is not a directory)
},
{
name: "directory symlink attack",
symlinkName: "safe_dir/link",
fileName: "safe_dir/link/payload.txt",
errContains: "not a directory", // attempt to write through symlink (which is not a directory)
},
{
name: "symlink without payload file",
symlinkName: "standalone_link",
fileName: "", // no payload file
errContains: "", // no error expected, symlink without payload is safe
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tempDir := t.TempDir()
// create outside target directory
outsideDir := filepath.Join(tempDir, "outside_target")
require.NoError(t, os.MkdirAll(outsideDir, 0755))
// create extraction directory
extractDir := filepath.Join(tempDir, "extract")
require.NoError(t, os.MkdirAll(extractDir, 0755))
maliciousZip := createMaliciousZipWithSymlink(t, tempDir, tt.symlinkName, outsideDir, tt.fileName)
err := UnzipToDir(context.Background(), maliciousZip, extractDir)
// check error expectations
if tt.errContains != "" {
require.Error(t, err)
require.Contains(t, err.Error(), tt.errContains)
} else {
require.NoError(t, err)
}
analyzeExtractionDirectory(t, extractDir)
// check if payload file escaped extraction directory
if tt.fileName != "" {
maliciousFile := filepath.Join(outsideDir, filepath.Base(tt.fileName))
checkFileOutsideExtraction(t, maliciousFile)
}
// check if symlink was created pointing outside
symlinkPath := filepath.Join(extractDir, tt.symlinkName)
checkSymlinkCreation(t, symlinkPath, extractDir, outsideDir)
})
}
}
// TestContentsFromZip_SymlinkAttacks tests the ContentsFromZip function with malicious
// ZIP archives containing symlink entries.
//
// EXPECTED BEHAVIOR: ContentsFromZip should either:
// 1. Reject symlinks explicitly, OR
// 2. Return empty content for symlinks (library behavior)
//
// Though ContentsFromZip doesn't write to disk, but if symlinks are followed, it could read sensitive
// files from outside the archive.
func TestContentsFromZip_SymlinkAttacks(t *testing.T) {
tests := []struct {
name string
symlinkName string
symlinkTarget string
requestPath string
errContains string
}{
{
name: "request symlink entry directly",
symlinkName: "evil_link",
symlinkTarget: "/etc/hosts", // attempt to read sensitive file
requestPath: "evil_link",
errContains: "", // no error expected - library returns symlink metadata
},
{
name: "symlink in nested directory",
symlinkName: "nested/link",
symlinkTarget: "/etc/hosts",
requestPath: "nested/link",
errContains: "", // no error expected - library returns symlink metadata
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tempDir := t.TempDir()
// create malicious ZIP with symlink entry (no payload file needed)
maliciousZip := createMaliciousZipWithSymlink(t, tempDir, tt.symlinkName, tt.symlinkTarget, "")
contents, err := ContentsFromZip(context.Background(), maliciousZip, tt.requestPath)
// check error expectations
if tt.errContains != "" {
require.Error(t, err)
require.Contains(t, err.Error(), tt.errContains)
return
}
require.NoError(t, err)
// verify symlink handling - library should return symlink target as content (metadata)
content, found := contents[tt.requestPath]
require.True(t, found, "symlink entry should be found in results")
// verify symlink was NOT followed (content should be target path or empty)
if content != "" && content != tt.symlinkTarget {
// content is not empty and not the symlink target - check if actual file was read
if _, statErr := os.Stat(tt.symlinkTarget); statErr == nil {
targetContent, readErr := os.ReadFile(tt.symlinkTarget)
if readErr == nil && string(targetContent) == content {
t.Errorf("critical issue!... symlink was FOLLOWED and external file content was read!")
t.Logf(" symlink: %s → %s", tt.requestPath, tt.symlinkTarget)
t.Logf(" content length: %d bytes", len(content))
}
}
}
})
}
}
// TestExtractFromZipToUniqueTempFile_SymlinkAttacks tests the ExtractFromZipToUniqueTempFile
// function with malicious ZIP archives containing symlink entries.
//
// EXPECTED BEHAVIOR: ExtractFromZipToUniqueTempFile should either:
// 1. Reject symlinks explicitly, OR
// 2. Extract them safely (library converts to empty files, filepath.Base sanitizes names)
//
// This function uses filepath.Base() on the archive entry name for temp file prefix and
// os.CreateTemp() which creates files in the specified directory, so it should be protected.
func TestExtractFromZipToUniqueTempFile_SymlinkAttacks(t *testing.T) {
tests := []struct {
name string
symlinkName string
symlinkTarget string
requestPath string
errContains string
}{
{
name: "extract symlink entry to temp file",
symlinkName: "evil_link",
symlinkTarget: "/etc/passwd",
requestPath: "evil_link",
errContains: "", // no error expected - library extracts symlink metadata
},
{
name: "extract nested symlink",
symlinkName: "nested/dir/link",
symlinkTarget: "/tmp/outside",
requestPath: "nested/dir/link",
errContains: "", // no error expected
},
{
name: "extract path traversal symlink name",
symlinkName: "../../escape",
symlinkTarget: "/tmp/outside",
requestPath: "../../escape",
errContains: "", // no error expected - filepath.Base sanitizes name
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tempDir := t.TempDir()
maliciousZip := createMaliciousZipWithSymlink(t, tempDir, tt.symlinkName, tt.symlinkTarget, "")
// create temp directory for extraction
extractTempDir := filepath.Join(tempDir, "temp_extract")
require.NoError(t, os.MkdirAll(extractTempDir, 0755))
openers, err := ExtractFromZipToUniqueTempFile(context.Background(), maliciousZip, extractTempDir, tt.requestPath)
// check error expectations
if tt.errContains != "" {
require.Error(t, err)
require.Contains(t, err.Error(), tt.errContains)
return
}
require.NoError(t, err)
// verify symlink was extracted
opener, found := openers[tt.requestPath]
require.True(t, found, "symlink entry should be extracted")
// verify temp file is within temp directory
tempFilePath := opener.path
cleanTempDir := filepath.Clean(extractTempDir)
cleanTempFile := filepath.Clean(tempFilePath)
require.True(t, strings.HasPrefix(cleanTempFile, cleanTempDir),
"temp file must be within temp directory: %s not in %s", cleanTempFile, cleanTempDir)
// verify symlink was NOT followed (content should be target path or empty)
f, openErr := opener.Open()
require.NoError(t, openErr)
defer f.Close()
content, readErr := io.ReadAll(f)
require.NoError(t, readErr)
// check if symlink was followed (content matches actual file)
if len(content) > 0 && string(content) != tt.symlinkTarget {
if _, statErr := os.Stat(tt.symlinkTarget); statErr == nil {
targetContent, readErr := os.ReadFile(tt.symlinkTarget)
if readErr == nil && string(targetContent) == string(content) {
t.Errorf("critical issue!... symlink was FOLLOWED and external file content was copied!")
t.Logf(" symlink: %s → %s", tt.requestPath, tt.symlinkTarget)
t.Logf(" content length: %d bytes", len(content))
}
}
}
})
}
}
// forensicFindings contains the results of analyzing an extraction directory
type forensicFindings struct {
symlinksFound []forensicSymlink
regularFiles []string
directories []string
symlinkVulnerabilities []string
}
type forensicSymlink struct {
path string
target string
escapesExtraction bool
resolvedPath string
}
// analyzeExtractionDirectory walks the extraction directory and detects symlinks that point
// outside the extraction directory. It is silent unless vulnerabilities are found.
func analyzeExtractionDirectory(t *testing.T, extractDir string) forensicFindings {
t.Helper()
findings := forensicFindings{}
filepath.Walk(extractDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
// only log if there's an error walking the directory
t.Logf("Error walking %s: %v", path, err)
return nil
}
relPath := strings.TrimPrefix(path, extractDir+"/")
if relPath == "" {
relPath = "."
}
// use Lstat to detect symlinks without following them
linfo, lerr := os.Lstat(path)
if lerr == nil && linfo.Mode()&os.ModeSymlink != 0 {
target, _ := os.Readlink(path)
// resolve to see where it actually points
var resolvedPath string
var escapesExtraction bool
if filepath.IsAbs(target) {
// absolute symlink
resolvedPath = target
cleanExtractDir := filepath.Clean(extractDir)
escapesExtraction = !strings.HasPrefix(filepath.Clean(target), cleanExtractDir)
if escapesExtraction {
t.Errorf("critical issue!... absolute symlink created: %s → %s", relPath, target)
t.Logf(" this symlink points outside the extraction directory")
findings.symlinkVulnerabilities = append(findings.symlinkVulnerabilities,
fmt.Sprintf("absolute symlink: %s → %s", relPath, target))
}
} else {
// relative symlink - resolve it
resolvedPath = filepath.Join(filepath.Dir(path), target)
cleanResolved := filepath.Clean(resolvedPath)
cleanExtractDir := filepath.Clean(extractDir)
escapesExtraction = !strings.HasPrefix(cleanResolved, cleanExtractDir)
if escapesExtraction {
t.Errorf("critical issue!... symlink escapes extraction dir: %s → %s", relPath, target)
t.Logf(" symlink resolves to: %s (outside extraction directory)", cleanResolved)
findings.symlinkVulnerabilities = append(findings.symlinkVulnerabilities,
fmt.Sprintf("relative symlink escape: %s → %s (resolves to %s)", relPath, target, cleanResolved))
}
}
findings.symlinksFound = append(findings.symlinksFound, forensicSymlink{
path: relPath,
target: target,
escapesExtraction: escapesExtraction,
resolvedPath: resolvedPath,
})
} else {
// regular file or directory - collect silently
if info.IsDir() {
findings.directories = append(findings.directories, relPath)
} else {
findings.regularFiles = append(findings.regularFiles, relPath)
}
}
return nil
})
return findings
}
// checkFileOutsideExtraction checks if a file was written outside the extraction directory.
// Returns true if the file exists (vulnerability), false otherwise. Silent on success.
func checkFileOutsideExtraction(t *testing.T, filePath string) bool {
t.Helper()
if stat, err := os.Stat(filePath); err == nil {
content, _ := os.ReadFile(filePath)
t.Errorf("critical issue!... file written OUTSIDE extraction directory!")
t.Logf(" location: %s", filePath)
t.Logf(" size: %d bytes", stat.Size())
t.Logf(" content: %s", string(content))
t.Logf(" ...this means an attacker can write files to arbitrary locations on the filesystem")
return true
}
// no file found outside extraction directory...
return false
}
// checkSymlinkCreation verifies if a symlink was created at the expected path and reports
// whether it points outside the extraction directory. Silent unless a symlink is found.
func checkSymlinkCreation(t *testing.T, symlinkPath, extractDir, expectedTarget string) bool {
t.Helper()
if linfo, err := os.Lstat(symlinkPath); err == nil {
if linfo.Mode()&os.ModeSymlink != 0 {
target, _ := os.Readlink(symlinkPath)
if expectedTarget != "" && target == expectedTarget {
t.Errorf("critical issue!... symlink pointing outside extraction dir was created!")
t.Logf(" Symlink: %s → %s", symlinkPath, target)
return true
}
// Check if it escapes even if target doesn't match expected
if filepath.IsAbs(target) {
cleanExtractDir := filepath.Clean(extractDir)
if !strings.HasPrefix(filepath.Clean(target), cleanExtractDir) {
t.Errorf("critical issue!... absolute symlink escapes extraction dir!")
t.Logf(" symlink: %s → %s", symlinkPath, target)
return true
}
}
}
// if it exists but is not a symlink, that's good (attack was thwarted)...
}
return false
}
// createMaliciousZipWithSymlink creates a ZIP archive containing a symlink entry pointing to an arbitrary target,
// followed by a file entry that attempts to write through that symlink.
// returns the path to the created ZIP archive.
func createMaliciousZipWithSymlink(t *testing.T, tempDir, symlinkName, symlinkTarget, fileName string) string {
t.Helper()
maliciousZip := filepath.Join(tempDir, "malicious.zip")
zipFile, err := os.Create(maliciousZip)
require.NoError(t, err)
defer zipFile.Close()
zw := zip.NewWriter(zipFile)
// create parent directories if the symlink is nested
if dir := filepath.Dir(symlinkName); dir != "." {
dirHeader := &zip.FileHeader{
Name: dir + "/",
Method: zip.Store,
}
dirHeader.SetMode(os.ModeDir | 0755)
_, err = zw.CreateHeader(dirHeader)
require.NoError(t, err)
}
// create symlink entry pointing outside extraction directory
// note: ZIP format stores symlinks as regular files with the target path as content
symlinkHeader := &zip.FileHeader{
Name: symlinkName,
Method: zip.Store,
}
symlinkHeader.SetMode(os.ModeSymlink | 0755)
symlinkWriter, err := zw.CreateHeader(symlinkHeader)
require.NoError(t, err)
// write the symlink target as the file content (this is how ZIP stores symlinks)
_, err = symlinkWriter.Write([]byte(symlinkTarget))
require.NoError(t, err)
// create file entry that will be written through the symlink
if fileName != "" {
payloadContent := []byte("MALICIOUS PAYLOAD - This should NOT be written outside extraction dir!")
payloadHeader := &zip.FileHeader{
Name: fileName,
Method: zip.Deflate,
}
payloadHeader.SetMode(0644)
payloadWriter, err := zw.CreateHeader(payloadHeader)
require.NoError(t, err)
_, err = payloadWriter.Write(payloadContent)
require.NoError(t, err)
}
require.NoError(t, zw.Close())
require.NoError(t, zipFile.Close())
return maliciousZip
}

View File

@ -1,229 +0,0 @@
package file
import (
"archive/zip"
"encoding/binary"
"errors"
"fmt"
"io"
"math"
"os"
"github.com/anchore/syft/internal/log"
)
// directoryEndLen, readByf, directoryEnd, and findSignatureInBlock were copied from the golang stdlib, specifically:
// - https://github.com/golang/go/blob/go1.16.4/src/archive/zip/struct.go
// - https://github.com/golang/go/blob/go1.16.4/src/archive/zip/reader.go
// findArchiveStartOffset is derived from the same stdlib utils, specifically the readDirectoryEnd function.
const (
directoryEndLen = 22
directory64LocLen = 20
directory64EndLen = 56
directory64LocSignature = 0x07064b50
directory64EndSignature = 0x06064b50
)
// ZipReadCloser is a drop-in replacement for zip.ReadCloser (from zip.OpenReader) that additionally considers zips
// that have bytes prefixed to the front of the archive (common with self-extracting jars).
type ZipReadCloser struct {
*zip.Reader
io.Closer
}
// OpenZip provides a ZipReadCloser for the given filepath.
func OpenZip(filepath string) (*ZipReadCloser, error) {
f, err := os.Open(filepath)
if err != nil {
return nil, err
}
fi, err := f.Stat()
if err != nil {
f.Close()
return nil, err
}
// some archives may have bytes prepended to the front of the archive, such as with self executing JARs. We first
// need to find the start of the archive and keep track of this offset.
offset, err := findArchiveStartOffset(f, fi.Size())
if err != nil {
log.Debugf("cannot find beginning of zip archive=%q : %v", filepath, err)
return nil, err
}
if _, err := f.Seek(0, io.SeekStart); err != nil {
return nil, fmt.Errorf("unable to seek to beginning of archive: %w", err)
}
if offset > math.MaxInt64 {
return nil, fmt.Errorf("archive start offset too large: %v", offset)
}
offset64 := int64(offset)
size := fi.Size() - offset64
r, err := zip.NewReader(io.NewSectionReader(f, offset64, size), size)
if err != nil {
log.Debugf("unable to open ZipReadCloser @ %q: %v", filepath, err)
return nil, err
}
return &ZipReadCloser{
Reader: r,
Closer: f,
}, nil
}
type readBuf []byte
func (b *readBuf) uint16() uint16 {
v := binary.LittleEndian.Uint16(*b)
*b = (*b)[2:]
return v
}
func (b *readBuf) uint32() uint32 {
v := binary.LittleEndian.Uint32(*b)
*b = (*b)[4:]
return v
}
func (b *readBuf) uint64() uint64 {
v := binary.LittleEndian.Uint64(*b)
*b = (*b)[8:]
return v
}
type directoryEnd struct {
diskNbr uint32 // unused
dirDiskNbr uint32 // unused
dirRecordsThisDisk uint64 // unused
directoryRecords uint64
directorySize uint64
directoryOffset uint64 // relative to file
}
// note: this is derived from readDirectoryEnd within the archive/zip package
func findArchiveStartOffset(r io.ReaderAt, size int64) (startOfArchive uint64, err error) {
// look for directoryEndSignature in the last 1k, then in the last 65k
var buf []byte
var directoryEndOffset int64
for i, bLen := range []int64{1024, 65 * 1024} {
if bLen > size {
bLen = size
}
buf = make([]byte, int(bLen))
if _, err := r.ReadAt(buf, size-bLen); err != nil && !errors.Is(err, io.EOF) {
return 0, err
}
if p := findSignatureInBlock(buf); p >= 0 {
buf = buf[p:]
directoryEndOffset = size - bLen + int64(p)
break
}
if i == 1 || bLen == size {
return 0, zip.ErrFormat
}
}
if buf == nil {
// we were unable to find the directoryEndSignature block
return 0, zip.ErrFormat
}
// read header into struct
b := readBuf(buf[4:]) // skip signature
d := &directoryEnd{
diskNbr: uint32(b.uint16()),
dirDiskNbr: uint32(b.uint16()),
dirRecordsThisDisk: uint64(b.uint16()),
directoryRecords: uint64(b.uint16()),
directorySize: uint64(b.uint32()),
directoryOffset: uint64(b.uint32()),
}
// Calculate where the zip data actually begins
// These values mean that the file can be a zip64 file
if d.directoryRecords == 0xffff || d.directorySize == 0xffff || d.directoryOffset == 0xffffffff {
p, err := findDirectory64End(r, directoryEndOffset)
if err == nil && p >= 0 {
directoryEndOffset = p
err = readDirectory64End(r, p, d)
}
if err != nil {
return 0, err
}
}
startOfArchive = uint64(directoryEndOffset) - d.directorySize - d.directoryOffset
// Make sure directoryOffset points to somewhere in our file.
if d.directoryOffset >= uint64(size) {
return 0, zip.ErrFormat
}
return startOfArchive, nil
}
// findDirectory64End tries to read the zip64 locator just before the
// directory end and returns the offset of the zip64 directory end if
// found.
func findDirectory64End(r io.ReaderAt, directoryEndOffset int64) (int64, error) {
locOffset := directoryEndOffset - directory64LocLen
if locOffset < 0 {
return -1, nil // no need to look for a header outside the file
}
buf := make([]byte, directory64LocLen)
if _, err := r.ReadAt(buf, locOffset); err != nil {
return -1, err
}
b := readBuf(buf)
if sig := b.uint32(); sig != directory64LocSignature {
return -1, nil
}
if b.uint32() != 0 { // number of the disk with the start of the zip64 end of central directory
return -1, nil // the file is not a valid zip64-file
}
p := b.uint64() // relative offset of the zip64 end of central directory record
if b.uint32() != 1 { // total number of disks
return -1, nil // the file is not a valid zip64-file
}
return int64(p), nil
}
// readDirectory64End reads the zip64 directory end and updates the
// directory end with the zip64 directory end values.
func readDirectory64End(r io.ReaderAt, offset int64, d *directoryEnd) (err error) {
buf := make([]byte, directory64EndLen)
if _, err := r.ReadAt(buf, offset); err != nil {
return err
}
b := readBuf(buf)
if sig := b.uint32(); sig != directory64EndSignature {
return errors.New("could not read directory64End")
}
b = b[12:] // skip dir size, version and version needed (uint64 + 2x uint16)
d.diskNbr = b.uint32() // number of this disk
d.dirDiskNbr = b.uint32() // number of the disk with the start of the central directory
d.dirRecordsThisDisk = b.uint64() // total number of entries in the central directory on this disk
d.directoryRecords = b.uint64() // total number of entries in the central directory
d.directorySize = b.uint64() // size of the central directory
d.directoryOffset = b.uint64() // offset of start of central directory with respect to the starting disk number
return nil
}
func findSignatureInBlock(b []byte) int {
for i := len(b) - directoryEndLen; i >= 0; i-- {
// defined from directoryEndSignature
if b[i] == 'P' && b[i+1] == 'K' && b[i+2] == 0x05 && b[i+3] == 0x06 {
// n is length of comment
n := int(b[i+directoryEndLen-2]) | int(b[i+directoryEndLen-1])<<8
if n+directoryEndLen+i <= len(b) {
return i
}
}
}
return -1
}

View File

@ -1,50 +0,0 @@
//go:build !windows
// +build !windows
package file
import (
"os"
"testing"
"github.com/stretchr/testify/assert"
)
func TestFindArchiveStartOffset(t *testing.T) {
tests := []struct {
name string
archivePrep func(tb testing.TB) string
expected uint64
}{
{
name: "standard, non-nested zip",
archivePrep: prepZipSourceFixture,
expected: 0,
},
{
name: "zip with prepended bytes",
archivePrep: prependZipSourceFixtureWithString(t, "junk at the beginning of the file..."),
expected: 36,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
archivePath := test.archivePrep(t)
f, err := os.Open(archivePath)
if err != nil {
t.Fatalf("could not open archive %q: %+v", archivePath, err)
}
fi, err := os.Stat(f.Name())
if err != nil {
t.Fatalf("unable to stat archive: %+v", err)
}
actual, err := findArchiveStartOffset(f, fi.Size())
if err != nil {
t.Fatalf("unable to find offset: %+v", err)
}
assert.Equal(t, test.expected, actual)
})
}
}

View File

@ -81,6 +81,10 @@ func Test_EnvironmentTask(t *testing.T) {
// get the source
theSource, err := syft.GetSource(context.Background(), tarPath, syft.DefaultGetSourceConfig().WithSources("docker-archive"))
require.NoError(t, err)
t.Cleanup(func() {
require.NoError(t, theSource.Close())
})
resolver, err := theSource.FileResolver(source.SquashedScope)
require.NoError(t, err)

View File

@ -27,6 +27,7 @@ func AllTypes() []any {
pkg.ELFBinaryPackageNoteJSONPayload{},
pkg.ElixirMixLockEntry{},
pkg.ErlangRebarLockEntry{},
pkg.GGUFFileHeader{},
pkg.GitHubActionsUseStatement{},
pkg.GolangBinaryBuildinfoEntry{},
pkg.GolangModuleEntry{},
@ -49,8 +50,10 @@ func AllTypes() []any {
pkg.PhpComposerLockEntry{},
pkg.PhpPearEntry{},
pkg.PhpPeclEntry{},
pkg.PnpmLockEntry{},
pkg.PortageEntry{},
pkg.PythonPackage{},
pkg.PythonPdmLockEntry{},
pkg.PythonPipfileLockEntry{},
pkg.PythonPoetryLockEntry{},
pkg.PythonRequirementsEntry{},

View File

@ -95,13 +95,15 @@ var jsonTypes = makeJSONTypes(
jsonNames(pkg.NpmPackage{}, "javascript-npm-package", "NpmPackageJsonMetadata"),
jsonNames(pkg.NpmPackageLockEntry{}, "javascript-npm-package-lock-entry", "NpmPackageLockJsonMetadata"),
jsonNames(pkg.YarnLockEntry{}, "javascript-yarn-lock-entry", "YarnLockJsonMetadata"),
jsonNames(pkg.PnpmLockEntry{}, "javascript-pnpm-lock-entry"),
jsonNames(pkg.PEBinary{}, "pe-binary"),
jsonNames(pkg.PhpComposerLockEntry{}, "php-composer-lock-entry", "PhpComposerJsonMetadata"),
jsonNamesWithoutLookup(pkg.PhpComposerInstalledEntry{}, "php-composer-installed-entry", "PhpComposerJsonMetadata"), // the legacy value is split into two types, where the other is preferred
jsonNames(pkg.PhpPeclEntry{}, "php-pecl-entry", "PhpPeclMetadata"),
jsonNames(pkg.PhpPeclEntry{}, "php-pecl-entry", "PhpPeclMetadata"), //nolint:staticcheck
jsonNames(pkg.PhpPearEntry{}, "php-pear-entry"),
jsonNames(pkg.PortageEntry{}, "portage-db-entry", "PortageMetadata"),
jsonNames(pkg.PythonPackage{}, "python-package", "PythonPackageMetadata"),
jsonNames(pkg.PythonPdmLockEntry{}, "python-pdm-lock-entry"),
jsonNames(pkg.PythonPipfileLockEntry{}, "python-pipfile-lock-entry", "PythonPipfileLockMetadata"),
jsonNames(pkg.PythonPoetryLockEntry{}, "python-poetry-lock-entry", "PythonPoetryLockMetadata"),
jsonNames(pkg.PythonRequirementsEntry{}, "python-pip-requirements-entry", "PythonRequirementsMetadata"),
@ -122,6 +124,7 @@ var jsonTypes = makeJSONTypes(
jsonNames(pkg.TerraformLockProviderEntry{}, "terraform-lock-provider-entry"),
jsonNames(pkg.DotnetPackagesLockEntry{}, "dotnet-packages-lock-entry"),
jsonNames(pkg.CondaMetaPackage{}, "conda-metadata-entry", "CondaPackageMetadata"),
jsonNames(pkg.GGUFFileHeader{}, "gguf-file-header"),
)
func expandLegacyNameVariants(names ...string) []string {

View File

@ -10,7 +10,6 @@ import (
"sort"
"strings"
"text/template"
"time"
)
// This program generates license_list.go.
@ -20,8 +19,7 @@ const (
)
var tmp = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT.
// This file was generated by robots at {{ .Timestamp }}
// using data from {{ .URL }}
// This file was generated using data from {{ .URL }}
package spdxlicense
const Version = {{ printf "%q" .Version }}
@ -78,13 +76,11 @@ func run() error {
urlToLicense := buildURLToLicenseMap(result)
err = tmp.Execute(f, struct {
Timestamp time.Time
URL string
Version string
LicenseIDs map[string]string
URLToLicense map[string]string
}{
Timestamp: time.Now(),
URL: url,
Version: result.Version,
LicenseIDs: licenseIDs,

View File

@ -3,6 +3,7 @@ package task
import (
"github.com/anchore/syft/syft/cataloging/pkgcataloging"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/ai"
"github.com/anchore/syft/syft/pkg/cataloger/alpine"
"github.com/anchore/syft/syft/pkg/cataloger/arch"
"github.com/anchore/syft/syft/pkg/cataloger/binary"
@ -52,6 +53,9 @@ const (
JavaScript = "javascript"
Node = "node"
NPM = "npm"
// Python ecosystem labels
Python = "python"
)
//nolint:funlen
@ -109,7 +113,7 @@ func DefaultPackageTaskFactories() Factories {
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
return python.NewPackageCataloger(cfg.PackagesConfig.Python)
},
pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "python",
pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, Python,
),
newSimplePackageTaskFactory(ruby.NewGemFileLockCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "ruby", "gem"),
newSimplePackageTaskFactory(ruby.NewGemSpecCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "ruby", "gem", "gemspec"),
@ -127,7 +131,7 @@ func DefaultPackageTaskFactories() Factories {
pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "dotnet", "c#",
),
newSimplePackageTaskFactory(dotnet.NewDotnetPackagesLockCataloger, pkgcataloging.DeclaredTag, pkgcataloging.ImageTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "dotnet", "c#"),
newSimplePackageTaskFactory(python.NewInstalledPackageCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "python"),
newSimplePackageTaskFactory(python.NewInstalledPackageCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, Python),
newPackageTaskFactory(
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
return golang.NewGoModuleBinaryCataloger(cfg.PackagesConfig.Golang)
@ -175,12 +179,13 @@ func DefaultPackageTaskFactories() Factories {
newSimplePackageTaskFactory(homebrew.NewCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, "homebrew"),
newSimplePackageTaskFactory(conda.NewCondaMetaCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.PackageTag, "conda"),
newSimplePackageTaskFactory(snap.NewCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, "snap"),
newSimplePackageTaskFactory(ai.NewGGUFCataloger, pkgcataloging.DirectoryTag, pkgcataloging.ImageTag, "ai", "model", "gguf", "ml"),
// deprecated catalogers ////////////////////////////////////////
// these are catalogers that should not be selectable other than specific inclusion via name or "deprecated" tag (to remain backwards compatible)
newSimplePackageTaskFactory(dotnet.NewDotnetDepsCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
newSimplePackageTaskFactory(dotnet.NewDotnetPortableExecutableCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
newSimplePackageTaskFactory(php.NewPeclCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
newSimplePackageTaskFactory(nix.NewStoreCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
newSimplePackageTaskFactory(dotnet.NewDotnetDepsCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
newSimplePackageTaskFactory(dotnet.NewDotnetPortableExecutableCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
newSimplePackageTaskFactory(php.NewPeclCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
newSimplePackageTaskFactory(nix.NewStoreCataloger, pkgcataloging.DeprecatedTag), //nolint:staticcheck // TODO: remove in syft v2.0
}
}

View File

@ -4,7 +4,8 @@ import (
"context"
"strings"
"github.com/anchore/archiver/v3"
"github.com/mholt/archives"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/sbomsync"
"github.com/anchore/syft/syft/cataloging"
@ -57,9 +58,10 @@ func (c unknownsLabelerTask) finalize(resolver file.Resolver, s *sbom.SBOM) {
}
if c.IncludeUnexpandedArchives {
ctx := context.Background()
for coords := range s.Artifacts.FileMetadata {
unarchiver, notArchiveErr := archiver.ByExtension(coords.RealPath)
if unarchiver != nil && notArchiveErr == nil && !hasPackageReference(coords) {
format, _, notArchiveErr := archives.Identify(ctx, coords.RealPath, nil)
if format != nil && notArchiveErr == nil && !hasPackageReference(coords) {
s.Artifacts.Unknowns[coords] = append(s.Artifacts.Unknowns[coords], "archive not cataloged")
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "anchore.io/schema/syft/json/16.0.40/document",
"$id": "anchore.io/schema/syft/json/16.1.0/document",
"$ref": "#/$defs/Document",
"$defs": {
"AlpmDbEntry": {
@ -130,7 +130,8 @@
"description": "Digests contains file content hashes for integrity verification"
}
},
"type": "object"
"type": "object",
"description": "AlpmFileRecord represents a single file entry within an Arch Linux package with its associated metadata tracked by pacman."
},
"ApkDbEntry": {
"properties": {
@ -433,16 +434,19 @@
"CPE": {
"properties": {
"cpe": {
"type": "string"
"type": "string",
"description": "Value is the CPE string identifier."
},
"source": {
"type": "string"
"type": "string",
"description": "Source is the source where this CPE was obtained or generated from."
}
},
"type": "object",
"required": [
"cpe"
]
],
"description": "CPE represents a Common Platform Enumeration identifier used for matching packages to known vulnerabilities in security databases."
},
"ClassifierMatch": {
"properties": {
@ -747,19 +751,23 @@
"Descriptor": {
"properties": {
"name": {
"type": "string"
"type": "string",
"description": "Name is the name of the tool that generated this SBOM (e.g., \"syft\")."
},
"version": {
"type": "string"
"type": "string",
"description": "Version is the version of the tool that generated this SBOM."
},
"configuration": true
"configuration": {
"description": "Configuration contains the tool configuration used during SBOM generation."
}
},
"type": "object",
"required": [
"name",
"version"
],
"description": "Descriptor describes what created the document as well as surrounding metadata"
"description": "Descriptor identifies the tool that generated this SBOM document, including its name, version, and configuration used during catalog generation."
},
"Digest": {
"properties": {
@ -1285,58 +1293,71 @@
"File": {
"properties": {
"id": {
"type": "string"
"type": "string",
"description": "ID is a unique identifier for this file within the SBOM."
},
"location": {
"$ref": "#/$defs/Coordinates"
"$ref": "#/$defs/Coordinates",
"description": "Location is the file path and layer information where this file was found."
},
"metadata": {
"$ref": "#/$defs/FileMetadataEntry"
"$ref": "#/$defs/FileMetadataEntry",
"description": "Metadata contains filesystem metadata such as permissions, ownership, and file type."
},
"contents": {
"type": "string"
"type": "string",
"description": "Contents is the file contents for small files."
},
"digests": {
"items": {
"$ref": "#/$defs/Digest"
},
"type": "array"
"type": "array",
"description": "Digests contains cryptographic hashes of the file contents."
},
"licenses": {
"items": {
"$ref": "#/$defs/FileLicense"
},
"type": "array"
"type": "array",
"description": "Licenses contains license information discovered within this file."
},
"executable": {
"$ref": "#/$defs/Executable"
"$ref": "#/$defs/Executable",
"description": "Executable contains executable metadata if this file is a binary."
},
"unknowns": {
"items": {
"type": "string"
},
"type": "array"
"type": "array",
"description": "Unknowns contains unknown fields for forward compatibility."
}
},
"type": "object",
"required": [
"id",
"location"
]
],
"description": "File represents a file discovered during cataloging with its metadata, content digests, licenses, and relationships to packages."
},
"FileLicense": {
"properties": {
"value": {
"type": "string"
"type": "string",
"description": "Value is the raw license identifier or text as found in the file."
},
"spdxExpression": {
"type": "string"
"type": "string",
"description": "SPDXExpression is the parsed SPDX license expression."
},
"type": {
"type": "string"
"type": "string",
"description": "Type is the license type classification (e.g., declared, concluded, discovered)."
},
"evidence": {
"$ref": "#/$defs/FileLicenseEvidence"
"$ref": "#/$defs/FileLicenseEvidence",
"description": "Evidence contains supporting evidence for this license detection."
}
},
"type": "object",
@ -1344,18 +1365,22 @@
"value",
"spdxExpression",
"type"
]
],
"description": "FileLicense represents license information discovered within a file's contents or metadata, including the matched license text and SPDX expression."
},
"FileLicenseEvidence": {
"properties": {
"confidence": {
"type": "integer"
"type": "integer",
"description": "Confidence is the confidence score for this license detection (0-100)."
},
"offset": {
"type": "integer"
"type": "integer",
"description": "Offset is the byte offset where the license text starts in the file."
},
"extent": {
"type": "integer"
"type": "integer",
"description": "Extent is the length of the license text in bytes."
}
},
"type": "object",
@ -1363,30 +1388,38 @@
"confidence",
"offset",
"extent"
]
],
"description": "FileLicenseEvidence contains supporting evidence for a license detection in a file, including the byte offset, extent, and confidence level."
},
"FileMetadataEntry": {
"properties": {
"mode": {
"type": "integer"
"type": "integer",
"description": "Mode is the Unix file permission mode in octal format."
},
"type": {
"type": "string"
"type": "string",
"description": "Type is the file type (e.g., \"RegularFile\", \"Directory\", \"SymbolicLink\")."
},
"linkDestination": {
"type": "string"
"type": "string",
"description": "LinkDestination is the target path for symbolic links."
},
"userID": {
"type": "integer"
"type": "integer",
"description": "UserID is the file owner user ID."
},
"groupID": {
"type": "integer"
"type": "integer",
"description": "GroupID is the file owner group ID."
},
"mimeType": {
"type": "string"
"type": "string",
"description": "MIMEType is the MIME type of the file contents."
},
"size": {
"type": "integer"
"type": "integer",
"description": "Size is the file size in bytes."
}
},
"type": "object",
@ -1397,7 +1430,50 @@
"groupID",
"mimeType",
"size"
]
],
"description": "FileMetadataEntry contains filesystem-level metadata attributes such as permissions, ownership, type, and size for a cataloged file."
},
"GgufFileHeader": {
"properties": {
"ggufVersion": {
"type": "integer",
"description": "GGUFVersion is the GGUF format version (e.g., 3)"
},
"fileSize": {
"type": "integer",
"description": "FileSize is the size of the GGUF file in bytes (best-effort if available from resolver)"
},
"architecture": {
"type": "string",
"description": "Architecture is the model architecture (from general.architecture, e.g., \"qwen3moe\", \"llama\")"
},
"quantization": {
"type": "string",
"description": "Quantization is the quantization type (e.g., \"IQ4_NL\", \"Q4_K_M\")"
},
"parameters": {
"type": "integer",
"description": "Parameters is the number of model parameters (if present in header)"
},
"tensorCount": {
"type": "integer",
"description": "TensorCount is the number of tensors in the model"
},
"header": {
"type": "object",
"description": "RemainingKeyValues contains the remaining key-value pairs from the GGUF header that are not already\nrepresented as typed fields above. This preserves additional metadata fields for reference\n(namespaced with general.*, llama.*, etc.) while avoiding duplication."
},
"metadataHash": {
"type": "string",
"description": "MetadataKeyValuesHash is a xx64 hash of all key-value pairs from the GGUF header metadata.\nThis hash is computed over the complete header metadata (including the fields extracted\ninto typed fields above) and provides a stable identifier for the model configuration\nacross different file locations or remotes. It allows matching identical models even\nwhen stored in different repositories or with different filenames."
}
},
"type": "object",
"required": [
"ggufVersion",
"tensorCount"
],
"description": "GGUFFileHeader represents metadata extracted from a GGUF (GPT-Generated Unified Format) model file."
},
"GithubActionsUseStatement": {
"properties": {
@ -1545,7 +1621,8 @@
"items": {
"type": "string"
},
"type": "array"
"type": "array",
"description": "IDLikes represents a list of distribution IDs that this Linux distribution is similar to or derived from, as defined in os-release ID_LIKE field."
},
"JavaArchive": {
"properties": {
@ -1876,15 +1953,48 @@
"integrity": {
"type": "string",
"description": "Integrity is Subresource Integrity hash for verification using standard SRI format (sha512-... or sha1-...). npm changed from SHA-1 to SHA-512 in newer versions. For registry sources this is the integrity from registry, for remote tarballs it's SHA-512 of the file. npm verifies tarball matches this hash before unpacking, throwing EINTEGRITY error if mismatch detected."
},
"dependencies": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object",
"description": "Dependencies is a map of dependencies and their version markers, i.e. \"lodash\": \"^1.0.0\""
}
},
"type": "object",
"required": [
"resolved",
"integrity"
"integrity",
"dependencies"
],
"description": "NpmPackageLockEntry represents a single entry within the \"packages\" section of a package-lock.json file."
},
"JavascriptPnpmLockEntry": {
"properties": {
"resolution": {
"$ref": "#/$defs/PnpmLockResolution",
"description": "Resolution is the resolution information for the package"
},
"dependencies": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object",
"description": "Dependencies is a map of dependencies and their versions"
}
},
"type": "object",
"required": [
"resolution",
"dependencies"
],
"description": "PnpmLockEntry represents a single entry in the \"packages\" section of a pnpm-lock.yaml file."
},
"JavascriptYarnLockEntry": {
"properties": {
"resolved": {
@ -1894,12 +2004,22 @@
"integrity": {
"type": "string",
"description": "Integrity is Subresource Integrity hash for verification (SRI format)"
},
"dependencies": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object",
"description": "Dependencies is a map of dependencies and their versions"
}
},
"type": "object",
"required": [
"resolved",
"integrity"
"integrity",
"dependencies"
],
"description": "YarnLockEntry represents a single entry section of a yarn.lock file."
},
@ -1931,28 +2051,34 @@
"License": {
"properties": {
"value": {
"type": "string"
"type": "string",
"description": "Value is the raw license identifier or expression as found."
},
"spdxExpression": {
"type": "string"
"type": "string",
"description": "SPDXExpression is the parsed SPDX license expression."
},
"type": {
"type": "string"
"type": "string",
"description": "Type is the license type classification (e.g., declared, concluded, discovered)."
},
"urls": {
"items": {
"type": "string"
},
"type": "array"
"type": "array",
"description": "URLs are URLs where license text or information can be found."
},
"locations": {
"items": {
"$ref": "#/$defs/Location"
},
"type": "array"
"type": "array",
"description": "Locations are file locations where this license was discovered."
},
"contents": {
"type": "string"
"type": "string",
"description": "Contents is the full license text content."
}
},
"type": "object",
@ -1962,7 +2088,8 @@
"type",
"urls",
"locations"
]
],
"description": "License represents software license information discovered for a package, including SPDX expressions and supporting evidence locations."
},
"LinuxKernelArchive": {
"properties": {
@ -2087,64 +2214,84 @@
"LinuxRelease": {
"properties": {
"prettyName": {
"type": "string"
"type": "string",
"description": "PrettyName is a human-readable operating system name with version."
},
"name": {
"type": "string"
"type": "string",
"description": "Name is the operating system name without version information."
},
"id": {
"type": "string"
"type": "string",
"description": "ID is the lower-case operating system identifier (e.g., \"ubuntu\", \"rhel\")."
},
"idLike": {
"$ref": "#/$defs/IDLikes"
"$ref": "#/$defs/IDLikes",
"description": "IDLike is a list of operating system IDs this distribution is similar to or derived from."
},
"version": {
"type": "string"
"type": "string",
"description": "Version is the operating system version including codename if available."
},
"versionID": {
"type": "string"
"type": "string",
"description": "VersionID is the operating system version number or identifier."
},
"versionCodename": {
"type": "string"
"type": "string",
"description": "VersionCodename is the operating system release codename (e.g., \"jammy\", \"bullseye\")."
},
"buildID": {
"type": "string"
"type": "string",
"description": "BuildID is a build identifier for the operating system."
},
"imageID": {
"type": "string"
"type": "string",
"description": "ImageID is an identifier for container or cloud images."
},
"imageVersion": {
"type": "string"
"type": "string",
"description": "ImageVersion is the version for container or cloud images."
},
"variant": {
"type": "string"
"type": "string",
"description": "Variant is the operating system variant name (e.g., \"Server\", \"Workstation\")."
},
"variantID": {
"type": "string"
"type": "string",
"description": "VariantID is the lower-case operating system variant identifier."
},
"homeURL": {
"type": "string"
"type": "string",
"description": "HomeURL is the homepage URL for the operating system."
},
"supportURL": {
"type": "string"
"type": "string",
"description": "SupportURL is the support or help URL for the operating system."
},
"bugReportURL": {
"type": "string"
"type": "string",
"description": "BugReportURL is the bug reporting URL for the operating system."
},
"privacyPolicyURL": {
"type": "string"
"type": "string",
"description": "PrivacyPolicyURL is the privacy policy URL for the operating system."
},
"cpeName": {
"type": "string"
"type": "string",
"description": "CPEName is the Common Platform Enumeration name for the operating system."
},
"supportEnd": {
"type": "string"
"type": "string",
"description": "SupportEnd is the end of support date or version identifier."
},
"extendedSupport": {
"type": "boolean"
"type": "boolean",
"description": "ExtendedSupport indicates whether extended security or support is available."
}
},
"type": "object"
"type": "object",
"description": "LinuxRelease contains Linux distribution identification and version information extracted from /etc/os-release or similar system files."
},
"Location": {
"properties": {
@ -2240,7 +2387,7 @@
"product_id",
"kb"
],
"description": "MicrosoftKbPatch is slightly odd in how it is expected to map onto data."
"description": "MicrosoftKbPatch represents a Windows Knowledge Base patch identifier associated with a specific Microsoft product from the MSRC (Microsoft Security Response Center)."
},
"NixDerivation": {
"properties": {
@ -2474,6 +2621,9 @@
{
"$ref": "#/$defs/ErlangRebarLockEntry"
},
{
"$ref": "#/$defs/GgufFileHeader"
},
{
"$ref": "#/$defs/GithubActionsUseStatement"
},
@ -2507,6 +2657,9 @@
{
"$ref": "#/$defs/JavascriptNpmPackageLockEntry"
},
{
"$ref": "#/$defs/JavascriptPnpmLockEntry"
},
{
"$ref": "#/$defs/JavascriptYarnLockEntry"
},
@ -2549,6 +2702,9 @@
{
"$ref": "#/$defs/PythonPackage"
},
{
"$ref": "#/$defs/PythonPdmLockEntry"
},
{
"$ref": "#/$defs/PythonPipRequirementsEntry"
},
@ -2955,6 +3111,19 @@
],
"description": "PhpPeclEntry represents a single package entry found within php pecl metadata files."
},
"PnpmLockResolution": {
"properties": {
"integrity": {
"type": "string",
"description": "Integrity is Subresource Integrity hash for verification (SRI format)"
}
},
"type": "object",
"required": [
"integrity"
],
"description": "PnpmLockResolution contains package resolution metadata from pnpm lockfiles, including the integrity hash used for verification."
},
"PortageDbEntry": {
"properties": {
"installedSize": {
@ -3131,6 +3300,100 @@
],
"description": "PythonPackage represents all captured data for a python egg or wheel package (specifically as outlined in the PyPA core metadata specification https://packaging.python.org/en/latest/specifications/core-metadata/)."
},
"PythonPdmFileEntry": {
"properties": {
"url": {
"type": "string",
"description": "URL is the file download URL"
},
"digest": {
"$ref": "#/$defs/PythonFileDigest",
"description": "Digest is the hash digest of the file hosted at the URL"
}
},
"type": "object",
"required": [
"url",
"digest"
]
},
"PythonPdmLockEntry": {
"properties": {
"summary": {
"type": "string",
"description": "Summary provides a description of the package"
},
"files": {
"items": {
"$ref": "#/$defs/PythonPdmFileEntry"
},
"type": "array",
"description": "Files are the package files with their paths and hash digests (for the base package without extras)"
},
"marker": {
"type": "string",
"description": "Marker is the \"environment\" --conditional expressions that determine whether a package should be installed based on the runtime environment"
},
"requiresPython": {
"type": "string",
"description": "RequiresPython specifies the Python version requirement (e.g., \"\u003e=3.6\")."
},
"dependencies": {
"items": {
"type": "string"
},
"type": "array",
"description": "Dependencies are the dependency specifications for the base package (without extras)"
},
"extras": {
"items": {
"$ref": "#/$defs/PythonPdmLockExtraVariant"
},
"type": "array",
"description": "Extras contains variants for different extras combinations (PDM may have multiple entries per package)"
}
},
"type": "object",
"required": [
"summary",
"files"
],
"description": "PythonPdmLockEntry represents a single package entry within a pdm.lock file."
},
"PythonPdmLockExtraVariant": {
"properties": {
"extras": {
"items": {
"type": "string"
},
"type": "array",
"description": "Extras are the optional extras enabled for this variant (e.g., [\"toml\"], [\"dev\"], or [\"toml\", \"dev\"])"
},
"dependencies": {
"items": {
"type": "string"
},
"type": "array",
"description": "Dependencies are the dependencies specific to this extras variant"
},
"files": {
"items": {
"$ref": "#/$defs/PythonPdmFileEntry"
},
"type": "array",
"description": "Files are the package files specific to this variant (only populated if different from base)"
},
"marker": {
"type": "string",
"description": "Marker is the environment conditional expression for this variant (e.g., \"python_version \u003c \\\"3.11\\\"\")"
}
},
"type": "object",
"required": [
"extras"
],
"description": "PythonPdmLockExtraVariant represents a specific extras combination variant within a PDM lock file."
},
"PythonPipRequirementsEntry": {
"properties": {
"name": {
@ -3411,22 +3674,28 @@
"Relationship": {
"properties": {
"parent": {
"type": "string"
"type": "string",
"description": "Parent is the ID of the parent artifact in this relationship."
},
"child": {
"type": "string"
"type": "string",
"description": "Child is the ID of the child artifact in this relationship."
},
"type": {
"type": "string"
"type": "string",
"description": "Type is the relationship type (e.g., \"contains\", \"dependency-of\", \"ancestor-of\")."
},
"metadata": true
"metadata": {
"description": "Metadata contains additional relationship-specific metadata."
}
},
"type": "object",
"required": [
"parent",
"child",
"type"
]
],
"description": "Relationship represents a directed relationship between two artifacts in the SBOM, such as package-contains-file or package-depends-on-package."
},
"RpmArchive": {
"properties": {
@ -3773,17 +4042,20 @@
"Schema": {
"properties": {
"version": {
"type": "string"
"type": "string",
"description": "Version is the JSON schema version for this document format."
},
"url": {
"type": "string"
"type": "string",
"description": "URL is the URL to the JSON schema definition document."
}
},
"type": "object",
"required": [
"version",
"url"
]
],
"description": "Schema specifies the JSON schema version and URL reference that defines the structure and validation rules for this document format."
},
"SnapEntry": {
"properties": {
@ -3821,21 +4093,28 @@
"Source": {
"properties": {
"id": {
"type": "string"
"type": "string",
"description": "ID is a unique identifier for the analyzed source artifact."
},
"name": {
"type": "string"
"type": "string",
"description": "Name is the name of the analyzed artifact (e.g., image name, directory path)."
},
"version": {
"type": "string"
"type": "string",
"description": "Version is the version of the analyzed artifact (e.g., image tag)."
},
"supplier": {
"type": "string"
"type": "string",
"description": "Supplier is supplier information, which can be user-provided for NTIA minimum elements compliance."
},
"type": {
"type": "string"
"type": "string",
"description": "Type is the source type (e.g., \"image\", \"directory\", \"file\")."
},
"metadata": true
"metadata": {
"description": "Metadata contains additional source-specific metadata."
}
},
"type": "object",
"required": [
@ -3845,7 +4124,7 @@
"type",
"metadata"
],
"description": "Instead, the Supplier can be determined by the user of syft and passed as a config or flag to help fulfill the NTIA minimum elements."
"description": "Source represents the artifact that was analyzed to generate this SBOM, such as a container image, directory, or file archive."
},
"SwiftPackageManagerLockEntry": {
"properties": {

View File

@ -3,6 +3,7 @@ package executable
import (
"debug/macho"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/internal/unionreader"
)
@ -19,20 +20,38 @@ const (
func findMachoFeatures(data *file.Executable, reader unionreader.UnionReader) error {
// TODO: support security features
// TODO: support multi-architecture binaries
f, err := macho.NewFile(reader)
// a universal binary may have multiple architectures, so we need to check each one
readers, err := unionreader.GetReaders(reader)
if err != nil {
return err
}
libs, err := f.ImportedLibraries()
if err != nil {
return err
var libs []string
for _, r := range readers {
f, err := macho.NewFile(r)
if err != nil {
return err
}
rLibs, err := f.ImportedLibraries()
if err != nil {
return err
}
libs = append(libs, rLibs...)
// TODO handle only some having entrypoints/exports? If that is even practical
// only check for entrypoint if we don't already have one
if !data.HasEntrypoint {
data.HasEntrypoint = machoHasEntrypoint(f)
}
// only check for exports if we don't already have them
if !data.HasExports {
data.HasExports = machoHasExports(f)
}
}
data.ImportedLibraries = libs
data.HasEntrypoint = machoHasEntrypoint(f)
data.HasExports = machoHasExports(f)
// de-duplicate libraries
data.ImportedLibraries = internal.NewSet(libs...).ToSlice()
return nil
}

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/internal/unionreader"
)
@ -83,3 +84,39 @@ func Test_machoHasExports(t *testing.T) {
})
}
}
func Test_machoUniversal(t *testing.T) {
readerForFixture := func(t *testing.T, fixture string) unionreader.UnionReader {
t.Helper()
f, err := os.Open(filepath.Join("test-fixtures/shared-info", fixture))
require.NoError(t, err)
return f
}
tests := []struct {
name string
fixture string
want file.Executable
}{
{
name: "universal lib",
fixture: "bin/libhello_universal.dylib",
want: file.Executable{HasExports: true, HasEntrypoint: false},
},
{
name: "universal application",
fixture: "bin/hello_mac_universal",
want: file.Executable{HasExports: false, HasEntrypoint: true},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var data file.Executable
err := findMachoFeatures(&data, readerForFixture(t, tt.fixture))
require.NoError(t, err)
assert.Equal(t, tt.want.HasEntrypoint, data.HasEntrypoint)
assert.Equal(t, tt.want.HasExports, data.HasExports)
})
}
}

View File

@ -2,13 +2,13 @@
BIN=../../bin
all: $(BIN)/hello_linux $(BIN)/hello.exe $(BIN)/hello_mac
all: $(BIN)/hello_linux $(BIN)/hello.exe $(BIN)/hello_mac $(BIN)/hello_mac_universal
linux: $(BIN)/libhello.so
windows: $(BIN)/libhello.dll
mac: $(BIN)/libhello.dylib
mac: $(BIN)/libhello.dylib $(BIN)/hello_mac_universal
$(BIN)/hello_linux:
gcc hello.c -o $(BIN)/hello_linux
@ -19,5 +19,8 @@ $(BIN)/hello.exe:
$(BIN)/hello_mac:
o64-clang hello.c -o $(BIN)/hello_mac
$(BIN)/hello_mac_universal:
o64-clang -arch arm64 -arch x86_64 hello.c -o $(BIN)/hello_mac_universal
clean:
rm -f $(BIN)/hello_linux $(BIN)/hello.exe $(BIN)/hello_mac
rm -f $(BIN)/hello_linux $(BIN)/hello.exe $(BIN)/hello_mac $(BIN)/hello_mac_universal

View File

@ -2,13 +2,13 @@
BIN=../../bin
all: $(BIN)/libhello.so $(BIN)/libhello.dll $(BIN)/libhello.dylib
all: $(BIN)/libhello.so $(BIN)/libhello.dll $(BIN)/libhello.dylib $(BIN)/libhello_universal.dylib
linux: $(BIN)/libhello.so
windows: $(BIN)/libhello.dll
mac: $(BIN)/libhello.dylib
mac: $(BIN)/libhello.dylib $(BIN)/libhello_universal.dylib
$(BIN)/libhello.so:
gcc -shared -fPIC -o $(BIN)/libhello.so hello.c
@ -19,5 +19,8 @@ $(BIN)/libhello.dll:
$(BIN)/libhello.dylib:
o64-clang -dynamiclib -o $(BIN)/libhello.dylib hello.c
$(BIN)/libhello_universal.dylib:
o64-clang -dynamiclib -arch arm64 -arch x86_64 hello.c -o $(BIN)/libhello_universal.dylib
clean:
rm -f $(BIN)/libhello.so $(BIN)/hello.dll $(BIN)/libhello.dylib $(BIN)/libhello.a
rm -f $(BIN)/libhello.so $(BIN)/hello.dll $(BIN)/libhello.dylib $(BIN)/libhello.a $(BIN)/libhello_universal.dylib

View File

@ -0,0 +1,95 @@
package cpes
import (
"bufio"
"errors"
"fmt"
"io"
"strings"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/format/internal"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/sbom"
)
const ID sbom.FormatID = "cpes"
const version = "1"
var _ sbom.FormatDecoder = (*decoder)(nil)
type decoder struct{}
func NewFormatDecoder() sbom.FormatDecoder {
return decoder{}
}
func (d decoder) Decode(r io.Reader) (*sbom.SBOM, sbom.FormatID, string, error) {
if r == nil {
return nil, "", "", fmt.Errorf("no reader provided")
}
s, err := toSyftModel(r)
return s, ID, version, err
}
func (d decoder) Identify(r io.Reader) (sbom.FormatID, string) {
if r == nil {
return "", ""
}
scanner := bufio.NewScanner(r)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line == "" {
// skip whitespace only lines
continue
}
err := cpe.ValidateString(line)
if err != nil {
return "", ""
}
return ID, version
}
return "", ""
}
func toSyftModel(r io.Reader) (*sbom.SBOM, error) {
var errs []error
pkgs := pkg.NewCollection()
scanner := bufio.NewScanner(r)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line == "" {
continue
}
// skip invalid CPEs
c, err := cpe.New(line, "")
if err != nil {
log.WithFields("error", err, "line", line).Debug("unable to parse cpe")
continue
}
p := pkg.Package{
Name: c.Attributes.Product,
Version: c.Attributes.Version,
CPEs: []cpe.CPE{c},
}
internal.Backfill(&p)
p.SetID()
pkgs.Add(p)
}
return &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkgs,
},
}, errors.Join(errs...)
}

View File

@ -0,0 +1,171 @@
package cpes
import (
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/stretchr/testify/require"
"github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/sbom"
)
func Test_CPEProvider(t *testing.T) {
tests := []struct {
name string
userInput string
sbom *sbom.SBOM
}{
{
name: "takes a single cpe",
userInput: "cpe:/a:apache:log4j:2.14.1",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(pkg.Package{
Name: "log4j",
Version: "2.14.1",
CPEs: []cpe.CPE{
cpe.Must("cpe:/a:apache:log4j:2.14.1", ""),
},
}),
},
},
},
{
name: "takes multiple cpes",
userInput: `cpe:/a:apache:log4j:2.14.1
cpe:2.3:a:f5:nginx:*:*:*:*:*:*:*:*;
cpe:2.3:a:f5:nginx:0.5.2:*:*:*:*:*:*:*;
cpe:2.3:a:f5:nginx:0.5.3:*:*:*:*:*:*:*;`,
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(
pkg.Package{
Name: "log4j",
Version: "2.14.1",
CPEs: []cpe.CPE{
cpe.Must("cpe:/a:apache:log4j:2.14.1", ""),
},
},
pkg.Package{
Name: "nginx",
Version: "",
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:f5:nginx:*:*:*:*:*:*:*:*;", ""),
},
},
pkg.Package{
Name: "nginx",
Version: "0.5.2",
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:f5:nginx:0.5.2:*:*:*:*:*:*:*;", ""),
},
},
pkg.Package{
Name: "nginx",
Version: "0.5.3",
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:f5:nginx:0.5.3:*:*:*:*:*:*:*;", ""),
},
},
),
},
},
},
{
name: "takes cpe with no version",
userInput: "cpe:/a:apache:log4j",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(pkg.Package{
Name: "log4j",
CPEs: []cpe.CPE{
cpe.Must("cpe:/a:apache:log4j", ""),
},
}),
},
},
},
{
name: "takes CPE 2.3 format",
userInput: "cpe:2.3:a:apache:log4j:2.14.1:*:*:*:*:*:*:*",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(pkg.Package{
Name: "log4j",
Version: "2.14.1",
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:apache:log4j:2.14.1:*:*:*:*:*:*:*", ""),
},
}),
},
},
},
{
name: "deduces target SW from CPE - known target_sw",
userInput: "cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(pkg.Package{
Name: "opensearch",
Type: pkg.GemPkg,
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*", ""),
},
}),
},
},
},
{
name: "handles unknown target_sw CPE field",
userInput: "cpe:2.3:a:amazon:opensearch:*:*:*:*:*:loremipsum:*:*",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(pkg.Package{
Name: "opensearch",
Type: "",
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:loremipsum:*:*", ""),
},
}),
},
},
},
{
name: "invalid prefix",
userInput: "dir:test-fixtures/cpe",
sbom: &sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(),
},
},
},
}
syftPkgOpts := []cmp.Option{
cmpopts.IgnoreFields(pkg.Package{}, "id", "Language"),
cmpopts.IgnoreUnexported(pkg.Package{}, file.LocationSet{}, pkg.LicenseSet{}),
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
dec := NewFormatDecoder()
decodedSBOM, _, _, err := dec.Decode(strings.NewReader(tc.userInput))
require.NoError(t, err)
gotSyftPkgs := decodedSBOM.Artifacts.Packages.Sorted()
wantSyftPkgs := tc.sbom.Artifacts.Packages.Sorted()
require.Equal(t, len(gotSyftPkgs), len(wantSyftPkgs))
for idx, wantPkg := range wantSyftPkgs {
if d := cmp.Diff(wantPkg, gotSyftPkgs[idx], syftPkgOpts...); d != "" {
t.Errorf("unexpected Syft Package (-want +got):\n%s", d)
}
}
})
}
}

View File

@ -3,6 +3,7 @@ package format
import (
"io"
"github.com/anchore/syft/syft/format/cpes"
"github.com/anchore/syft/syft/format/cyclonedxjson"
"github.com/anchore/syft/syft/format/cyclonedxxml"
"github.com/anchore/syft/syft/format/purls"
@ -26,6 +27,7 @@ func Decoders() []sbom.FormatDecoder {
spdxtagvalue.NewFormatDecoder(),
spdxjson.NewFormatDecoder(),
purls.NewFormatDecoder(),
cpes.NewFormatDecoder(),
}
}

View File

@ -1,11 +1,13 @@
package model
import (
"context"
"fmt"
"strings"
"time"
"github.com/anchore/archiver/v3"
"github.com/mholt/archives"
"github.com/anchore/packageurl-go"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/pkg"
@ -87,6 +89,9 @@ func toGithubManifests(s *sbom.SBOM) Manifests {
}
name := dependencyName(p)
if name == "" || p.PURL == "" {
continue
}
manifest.Resolved[name] = DependencyNode{
PackageURL: p.PURL,
Metadata: toDependencyMetadata(p),
@ -150,8 +155,8 @@ func trimRelative(s string) string {
// isArchive returns true if the path appears to be an archive
func isArchive(path string) bool {
_, err := archiver.ByExtension(path)
return err == nil
format, _, err := archives.Identify(context.Background(), path, nil)
return err == nil && format != nil
}
func toDependencies(s *sbom.SBOM, p pkg.Package) (out []string) {

View File

@ -16,11 +16,6 @@
"source_location": "redacted/some/path/some/path/pkg1"
},
"resolved": {
"": {
"package_url": "a-purl-2",
"relationship": "direct",
"scope": "runtime"
},
"pkg:deb/debian/package-2@2.0.1": {
"package_url": "pkg:deb/debian/package-2@2.0.1",
"relationship": "direct",

View File

@ -17,13 +17,6 @@
},
"metadata": {
"syft:filesystem":"redacted"
},
"resolved": {
"": {
"package_url": "a-purl-1",
"relationship": "direct",
"scope": "runtime"
}
}
},
"user-image-input:/somefile-2.txt": {

View File

@ -10,13 +10,31 @@ import (
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/pkg"
cataloger "github.com/anchore/syft/syft/pkg/cataloger/common/cpe"
)
// Backfill takes all information present in the package and attempts to fill in any missing information
// from any available sources, such as the Metadata and PURL.
// from any available sources, such as the Metadata, PURL, or CPEs.
//
// Backfill does not call p.SetID(), but this needs to be called later to ensure it's up to date
func Backfill(p *pkg.Package) {
backfillFromPurl(p)
backfillFromCPE(p)
}
func backfillFromCPE(p *pkg.Package) {
if len(p.CPEs) == 0 {
return
}
c := p.CPEs[0]
if p.Type == "" {
p.Type = cataloger.TargetSoftwareToPackageType(c.Attributes.TargetSW)
}
}
func backfillFromPurl(p *pkg.Package) {
if p.PURL == "" {
return
}
@ -29,6 +47,7 @@ func Backfill(p *pkg.Package) {
var cpes []cpe.CPE
epoch := ""
rpmmod := ""
for _, qualifier := range purl.Qualifiers {
switch qualifier.Key {
@ -44,6 +63,8 @@ func Backfill(p *pkg.Package) {
}
case pkg.PURLQualifierEpoch:
epoch = qualifier.Value
case pkg.PURLQualifierRpmModularity:
rpmmod = qualifier.Value
}
}
@ -63,6 +84,10 @@ func Backfill(p *pkg.Package) {
setJavaMetadataFromPurl(p, purl)
}
if p.Type == pkg.RpmPkg {
setRpmMetadataFromPurl(p, rpmmod)
}
for _, c := range cpes {
if slices.Contains(p.CPEs, c) {
continue
@ -82,6 +107,35 @@ func setJavaMetadataFromPurl(p *pkg.Package, _ packageurl.PackageURL) {
}
}
func setRpmMetadataFromPurl(p *pkg.Package, rpmmod string) {
if p.Type != pkg.RpmPkg {
return
}
if rpmmod == "" {
return
}
if p.Metadata == nil {
p.Metadata = pkg.RpmDBEntry{
ModularityLabel: &rpmmod,
}
return
}
switch m := p.Metadata.(type) {
case pkg.RpmDBEntry:
if m.ModularityLabel == nil {
m.ModularityLabel = &rpmmod
p.Metadata = m
}
case pkg.RpmArchive:
if m.ModularityLabel == nil {
m.ModularityLabel = &rpmmod
p.Metadata = m
}
}
}
func setVersionFromPurl(p *pkg.Package, purl packageurl.PackageURL, epoch string) {
if p.Version == "" {
p.Version = purl.Version

View File

@ -53,6 +53,21 @@ func Test_Backfill(t *testing.T) {
Version: "1:1.12.8-26.el8",
},
},
{
name: "rpm with rpmmod",
in: pkg.Package{
PURL: "pkg:rpm/redhat/httpd@2.4.37-51?arch=x86_64&distro=rhel-8.7&rpmmod=httpd:2.4",
},
expected: pkg.Package{
PURL: "pkg:rpm/redhat/httpd@2.4.37-51?arch=x86_64&distro=rhel-8.7&rpmmod=httpd:2.4",
Type: pkg.RpmPkg,
Name: "httpd",
Version: "2.4.37-51",
Metadata: pkg.RpmDBEntry{
ModularityLabel: strRef("httpd:2.4"),
},
},
},
{
name: "bad cpe",
in: pkg.Package{
@ -106,6 +121,20 @@ func Test_Backfill(t *testing.T) {
Metadata: pkg.JavaArchive{},
},
},
{
name: "target-sw from CPE",
in: pkg.Package{
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*", ""),
},
},
expected: pkg.Package{
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:amazon:opensearch:*:*:*:*:*:ruby:*:*", ""),
},
Type: pkg.GemPkg,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
@ -171,3 +200,7 @@ func Test_nameFromPurl(t *testing.T) {
})
}
}
func strRef(s string) *string {
return &s
}

View File

@ -40,8 +40,11 @@ func EncodeComponent(p pkg.Package, supplier string, locationSorter func(a, b fi
}
componentType := cyclonedx.ComponentTypeLibrary
if p.Type == pkg.BinaryPkg {
switch p.Type {
case pkg.BinaryPkg:
componentType = cyclonedx.ComponentTypeApplication
case pkg.ModelPkg:
componentType = cyclonedx.ComponentTypeMachineLearningModel
}
return cyclonedx.Component{

View File

@ -62,7 +62,7 @@ func collectPackages(component *cyclonedx.Component, s *sbom.SBOM, idMap map[str
switch component.Type {
case cyclonedx.ComponentTypeOS:
case cyclonedx.ComponentTypeContainer:
case cyclonedx.ComponentTypeApplication, cyclonedx.ComponentTypeFramework, cyclonedx.ComponentTypeLibrary:
case cyclonedx.ComponentTypeApplication, cyclonedx.ComponentTypeFramework, cyclonedx.ComponentTypeLibrary, cyclonedx.ComponentTypeMachineLearningModel:
p := decodeComponent(component)
idMap[component.BOMRef] = p
if component.BOMRef != "" {

View File

@ -40,8 +40,10 @@ func Test_OriginatorSupplier(t *testing.T) {
pkg.PhpComposerInstalledEntry{},
pkg.PhpPearEntry{},
pkg.PhpPeclEntry{},
pkg.PnpmLockEntry{},
pkg.PortageEntry{},
pkg.PythonPipfileLockEntry{},
pkg.PythonPdmLockEntry{},
pkg.PythonRequirementsEntry{},
pkg.PythonPoetryLockEntry{},
pkg.PythonUvLockEntry{},
@ -53,6 +55,7 @@ func Test_OriginatorSupplier(t *testing.T) {
pkg.OpamPackage{},
pkg.YarnLockEntry{},
pkg.TerraformLockProviderEntry{},
pkg.GGUFFileHeader{},
)
tests := []struct {
name string
@ -342,6 +345,25 @@ func Test_OriginatorSupplier(t *testing.T) {
originator: "Person: auth (auth@auth.gov)",
supplier: "Person: auth (auth@auth.gov)",
},
{
name: "from python PDM lock",
input: pkg.Package{
Metadata: pkg.PythonPdmLockEntry{
Files: []pkg.PythonPdmFileEntry{
{
URL: "https://pypi.org/project/testpkg/1.2.3/file1.tar.gz",
Digest: pkg.PythonFileDigest{
Algorithm: "sha256",
Value: "3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651",
},
},
},
Summary: "A test package",
},
},
originator: "",
supplier: "",
},
{
name: "from r -- maintainer > author",
input: pkg.Package{

View File

@ -82,6 +82,8 @@ func SourceInfo(p pkg.Package) string {
answer = "acquired package info from Homebrew formula"
case pkg.TerraformPkg:
answer = "acquired package info from Terraform dependency lock file"
case pkg.ModelPkg:
answer = "acquired package info from AI artifact (e.g. GGUF File"
default:
answer = "acquired package info from the following paths"
}

View File

@ -351,6 +351,14 @@ func Test_SourceInfo(t *testing.T) {
"acquired package info from Terraform dependency lock file",
},
},
{
input: pkg.Package{
Type: pkg.ModelPkg,
},
expected: []string{
"",
},
},
}
var pkgTypes []pkg.Type
for _, test := range tests {

View File

@ -35,14 +35,23 @@ func (d *Document) UnmarshalJSON(data []byte) error {
return nil
}
// Descriptor describes what created the document as well as surrounding metadata
// Descriptor identifies the tool that generated this SBOM document, including its name, version, and configuration used during catalog generation.
type Descriptor struct {
Name string `json:"name"`
Version string `json:"version"`
// Name is the name of the tool that generated this SBOM (e.g., "syft").
Name string `json:"name"`
// Version is the version of the tool that generated this SBOM.
Version string `json:"version"`
// Configuration contains the tool configuration used during SBOM generation.
Configuration interface{} `json:"configuration,omitempty"`
}
// Schema specifies the JSON schema version and URL reference that defines the structure and validation rules for this document format.
type Schema struct {
// Version is the JSON schema version for this document format.
Version string `json:"version"`
URL string `json:"url"`
// URL is the URL to the JSON schema definition document.
URL string `json:"url"`
}

View File

@ -10,25 +10,55 @@ import (
"github.com/anchore/syft/syft/license"
)
// File represents a file discovered during cataloging with its metadata, content digests, licenses, and relationships to packages.
type File struct {
ID string `json:"id"`
Location file.Coordinates `json:"location"`
Metadata *FileMetadataEntry `json:"metadata,omitempty"`
Contents string `json:"contents,omitempty"`
Digests []file.Digest `json:"digests,omitempty"`
Licenses []FileLicense `json:"licenses,omitempty"`
Executable *file.Executable `json:"executable,omitempty"`
Unknowns []string `json:"unknowns,omitempty"`
// ID is a unique identifier for this file within the SBOM.
ID string `json:"id"`
// Location is the file path and layer information where this file was found.
Location file.Coordinates `json:"location"`
// Metadata contains filesystem metadata such as permissions, ownership, and file type.
Metadata *FileMetadataEntry `json:"metadata,omitempty"`
// Contents is the file contents for small files.
Contents string `json:"contents,omitempty"`
// Digests contains cryptographic hashes of the file contents.
Digests []file.Digest `json:"digests,omitempty"`
// Licenses contains license information discovered within this file.
Licenses []FileLicense `json:"licenses,omitempty"`
// Executable contains executable metadata if this file is a binary.
Executable *file.Executable `json:"executable,omitempty"`
// Unknowns contains unknown fields for forward compatibility.
Unknowns []string `json:"unknowns,omitempty"`
}
// FileMetadataEntry contains filesystem-level metadata attributes such as permissions, ownership, type, and size for a cataloged file.
type FileMetadataEntry struct {
Mode int `json:"mode"`
Type string `json:"type"`
// Mode is the Unix file permission mode in octal format.
Mode int `json:"mode"`
// Type is the file type (e.g., "RegularFile", "Directory", "SymbolicLink").
Type string `json:"type"`
// LinkDestination is the target path for symbolic links.
LinkDestination string `json:"linkDestination,omitempty"`
UserID int `json:"userID"`
GroupID int `json:"groupID"`
MIMEType string `json:"mimeType"`
Size int64 `json:"size"`
// UserID is the file owner user ID.
UserID int `json:"userID"`
// GroupID is the file owner group ID.
GroupID int `json:"groupID"`
// MIMEType is the MIME type of the file contents.
MIMEType string `json:"mimeType"`
// Size is the file size in bytes.
Size int64 `json:"size"`
}
type auxFileMetadataEntry FileMetadataEntry
@ -82,17 +112,31 @@ type sbomImportLegacyFileMetadataEntry struct {
Size int64 `json:"Size"`
}
// FileLicense represents license information discovered within a file's contents or metadata, including the matched license text and SPDX expression.
type FileLicense struct {
Value string `json:"value"`
SPDXExpression string `json:"spdxExpression"`
Type license.Type `json:"type"`
Evidence *FileLicenseEvidence `json:"evidence,omitempty"`
// Value is the raw license identifier or text as found in the file.
Value string `json:"value"`
// SPDXExpression is the parsed SPDX license expression.
SPDXExpression string `json:"spdxExpression"`
// Type is the license type classification (e.g., declared, concluded, discovered).
Type license.Type `json:"type"`
// Evidence contains supporting evidence for this license detection.
Evidence *FileLicenseEvidence `json:"evidence,omitempty"`
}
// FileLicenseEvidence contains supporting evidence for a license detection in a file, including the byte offset, extent, and confidence level.
type FileLicenseEvidence struct {
// Confidence is the confidence score for this license detection (0-100).
Confidence int `json:"confidence"`
Offset int `json:"offset"`
Extent int `json:"extent"`
// Offset is the byte offset where the license text starts in the file.
Offset int `json:"offset"`
// Extent is the length of the license text in bytes.
Extent int `json:"extent"`
}
type intOrStringFileType struct {

View File

@ -4,28 +4,67 @@ import (
"encoding/json"
)
// IDLikes represents a list of distribution IDs that this Linux distribution is similar to or derived from, as defined in os-release ID_LIKE field.
type IDLikes []string
// LinuxRelease contains Linux distribution identification and version information extracted from /etc/os-release or similar system files.
type LinuxRelease struct {
PrettyName string `json:"prettyName,omitempty"`
Name string `json:"name,omitempty"`
ID string `json:"id,omitempty"`
IDLike IDLikes `json:"idLike,omitempty"`
Version string `json:"version,omitempty"`
VersionID string `json:"versionID,omitempty"`
VersionCodename string `json:"versionCodename,omitempty"`
BuildID string `json:"buildID,omitempty"`
ImageID string `json:"imageID,omitempty"`
ImageVersion string `json:"imageVersion,omitempty"`
Variant string `json:"variant,omitempty"`
VariantID string `json:"variantID,omitempty"`
HomeURL string `json:"homeURL,omitempty"`
SupportURL string `json:"supportURL,omitempty"`
BugReportURL string `json:"bugReportURL,omitempty"`
PrivacyPolicyURL string `json:"privacyPolicyURL,omitempty"`
CPEName string `json:"cpeName,omitempty"`
SupportEnd string `json:"supportEnd,omitempty"`
ExtendedSupport bool `json:"extendedSupport,omitempty"`
// PrettyName is a human-readable operating system name with version.
PrettyName string `json:"prettyName,omitempty"`
// Name is the operating system name without version information.
Name string `json:"name,omitempty"`
// ID is the lower-case operating system identifier (e.g., "ubuntu", "rhel").
ID string `json:"id,omitempty"`
// IDLike is a list of operating system IDs this distribution is similar to or derived from.
IDLike IDLikes `json:"idLike,omitempty"`
// Version is the operating system version including codename if available.
Version string `json:"version,omitempty"`
// VersionID is the operating system version number or identifier.
VersionID string `json:"versionID,omitempty"`
// VersionCodename is the operating system release codename (e.g., "jammy", "bullseye").
VersionCodename string `json:"versionCodename,omitempty"`
// BuildID is a build identifier for the operating system.
BuildID string `json:"buildID,omitempty"`
// ImageID is an identifier for container or cloud images.
ImageID string `json:"imageID,omitempty"`
// ImageVersion is the version for container or cloud images.
ImageVersion string `json:"imageVersion,omitempty"`
// Variant is the operating system variant name (e.g., "Server", "Workstation").
Variant string `json:"variant,omitempty"`
// VariantID is the lower-case operating system variant identifier.
VariantID string `json:"variantID,omitempty"`
// HomeURL is the homepage URL for the operating system.
HomeURL string `json:"homeURL,omitempty"`
// SupportURL is the support or help URL for the operating system.
SupportURL string `json:"supportURL,omitempty"`
// BugReportURL is the bug reporting URL for the operating system.
BugReportURL string `json:"bugReportURL,omitempty"`
// PrivacyPolicyURL is the privacy policy URL for the operating system.
PrivacyPolicyURL string `json:"privacyPolicyURL,omitempty"`
// CPEName is the Common Platform Enumeration name for the operating system.
CPEName string `json:"cpeName,omitempty"`
// SupportEnd is the end of support date or version identifier.
SupportEnd string `json:"supportEnd,omitempty"`
// ExtendedSupport indicates whether extended security or support is available.
ExtendedSupport bool `json:"extendedSupport,omitempty"`
}
func (s *IDLikes) UnmarshalJSON(data []byte) error {

View File

@ -36,22 +36,40 @@ type PackageBasicData struct {
PURL string `json:"purl"`
}
// cpes is a collection of Common Platform Enumeration identifiers for a package.
type cpes []CPE
// CPE represents a Common Platform Enumeration identifier used for matching packages to known vulnerabilities in security databases.
type CPE struct {
Value string `json:"cpe"`
// Value is the CPE string identifier.
Value string `json:"cpe"`
// Source is the source where this CPE was obtained or generated from.
Source string `json:"source,omitempty"`
}
// licenses is a collection of license findings associated with a package.
type licenses []License
// License represents software license information discovered for a package, including SPDX expressions and supporting evidence locations.
type License struct {
Value string `json:"value"`
SPDXExpression string `json:"spdxExpression"`
Type license.Type `json:"type"`
URLs []string `json:"urls"`
Locations []file.Location `json:"locations"`
Contents string `json:"contents,omitempty"`
// Value is the raw license identifier or expression as found.
Value string `json:"value"`
// SPDXExpression is the parsed SPDX license expression.
SPDXExpression string `json:"spdxExpression"`
// Type is the license type classification (e.g., declared, concluded, discovered).
Type license.Type `json:"type"`
// URLs are URLs where license text or information can be found.
URLs []string `json:"urls"`
// Locations are file locations where this license was discovered.
Locations []file.Location `json:"locations"`
// Contents is the full license text content.
Contents string `json:"contents,omitempty"`
}
func newModelLicensesFromValues(licenses []string) (ml []License) {

View File

@ -1,8 +1,16 @@
package model
// Relationship represents a directed relationship between two artifacts in the SBOM, such as package-contains-file or package-depends-on-package.
type Relationship struct {
Parent string `json:"parent"`
Child string `json:"child"`
Type string `json:"type"`
// Parent is the ID of the parent artifact in this relationship.
Parent string `json:"parent"`
// Child is the ID of the child artifact in this relationship.
Child string `json:"child"`
// Type is the relationship type (e.g., "contains", "dependency-of", "ancestor-of").
Type string `json:"type"`
// Metadata contains additional relationship-specific metadata.
Metadata interface{} `json:"metadata,omitempty"`
}

View File

@ -11,18 +11,25 @@ import (
"github.com/anchore/syft/syft/source"
)
// Source object represents the thing that was cataloged
// Note: syft currently makes no claims or runs any logic to determine the Supplier field below
// Instead, the Supplier can be determined by the user of syft and passed as a config or flag to help fulfill
// the NTIA minimum elements. For mor information see the NTIA framing document below
// https://www.ntia.gov/files/ntia/publications/framingsbom_20191112.pdf
// Source represents the artifact that was analyzed to generate this SBOM, such as a container image, directory, or file archive.
// The Supplier field can be provided by users to fulfill NTIA minimum elements requirements.
type Source struct {
ID string `json:"id"`
Name string `json:"name"`
Version string `json:"version"`
Supplier string `json:"supplier,omitempty"`
Type string `json:"type"`
// ID is a unique identifier for the analyzed source artifact.
ID string `json:"id"`
// Name is the name of the analyzed artifact (e.g., image name, directory path).
Name string `json:"name"`
// Version is the version of the analyzed artifact (e.g., image tag).
Version string `json:"version"`
// Supplier is supplier information, which can be user-provided for NTIA minimum elements compliance.
Supplier string `json:"supplier,omitempty"`
// Type is the source type (e.g., "image", "directory", "file").
Type string `json:"type"`
// Metadata contains additional source-specific metadata.
Metadata interface{} `json:"metadata"`
}

View File

@ -0,0 +1,37 @@
package syft
import (
"testing"
"github.com/anchore/stereoscope"
"github.com/anchore/syft/syft/source/sourceproviders"
)
func TestGetProviders_DefaultImagePullSource(t *testing.T) {
userInput := ""
cfg := &GetSourceConfig{DefaultImagePullSource: stereoscope.RegistryTag}
allSourceProviders := sourceproviders.All(userInput, cfg.SourceProviderConfig)
providers, err := cfg.getProviders(userInput)
if err != nil {
t.Errorf("Expected no error for DefaultImagePullSource parameter, got: %v", err)
}
if len(providers) != len(allSourceProviders) {
t.Errorf("Expected %d providers, got %d", len(allSourceProviders), len(providers))
}
}
func TestGetProviders_Sources(t *testing.T) {
userInput := ""
cfg := &GetSourceConfig{Sources: []string{stereoscope.RegistryTag}}
providers, err := cfg.getProviders(userInput)
if err != nil {
t.Errorf("Expected no error for Sources parameter, got: %v", err)
}
if len(providers) != 1 {
t.Errorf("Expected 1 providers, got %d", len(providers))
}
}

View File

@ -19,16 +19,16 @@ type Directory struct {
indexer *directoryIndexer
}
func NewFromDirectory(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
r, err := newFromDirectoryWithoutIndex(root, base, pathFilters...)
func NewFromDirectory(root, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
resolver, err := newFromDirectoryWithoutIndex(root, base, pathFilters...)
if err != nil {
return nil, err
}
return r, r.buildIndex()
return resolver, resolver.buildIndex()
}
func newFromDirectoryWithoutIndex(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
func newFromDirectoryWithoutIndex(root, base string, pathFilters ...PathIndexVisitor) (*Directory, error) {
chroot, err := NewChrootContextFromCWD(root, base)
if err != nil {
return nil, fmt.Errorf("unable to interpret chroot context: %w", err)
@ -66,6 +66,6 @@ func (r *Directory) buildIndex() error {
}
// Stringer to represent a directory path data source
func (r Directory) String() string {
func (r *Directory) String() string {
return fmt.Sprintf("dir:%s", r.path)
}

View File

@ -322,7 +322,7 @@ func (r directoryIndexer) addDirectoryToIndex(p string, info os.FileInfo) error
return err
}
metadata := file.NewMetadataFromPath(p, info)
metadata := NewMetadataFromPath(p, info)
r.index.Add(*ref, metadata)
return nil
@ -334,7 +334,7 @@ func (r directoryIndexer) addFileToIndex(p string, info os.FileInfo) error {
return err
}
metadata := file.NewMetadataFromPath(p, info)
metadata := NewMetadataFromPath(p, info)
r.index.Add(*ref, metadata)
return nil
@ -416,7 +416,7 @@ func (r directoryIndexer) addSymlinkToIndex(p string, info os.FileInfo) (string,
targetAbsPath = filepath.Clean(filepath.Join(path.Dir(p), linkTarget))
}
metadata := file.NewMetadataFromPath(p, info)
metadata := NewMetadataFromPath(p, info)
metadata.LinkDestination = linkTarget
r.index.Add(*ref, metadata)

View File

@ -17,17 +17,31 @@ type File struct {
indexer *fileIndexer
}
// parent should be the symlink free absolute path to the parent directory
// NewFromFile single file analyser
// path is the filepath of the file we're creating content access for
func NewFromFile(parent, path string, pathFilters ...PathIndexVisitor) (*File, error) {
chroot, err := NewChrootContextFromCWD(parent, parent)
func NewFromFile(path string, pathFilters ...PathIndexVisitor) (*File, error) {
resolver, err := newFromFileWithoutIndex(path, pathFilters...)
if err != nil {
return nil, err
}
return resolver, resolver.buildIndex()
}
func newFromFileWithoutIndex(path string, pathFilters ...PathIndexVisitor) (*File, error) {
absParentDir, err := absoluteSymlinkFreePathToParent(path)
if err != nil {
return nil, err
}
chroot, err := NewChrootContextFromCWD(absParentDir, absParentDir)
if err != nil {
return nil, fmt.Errorf("unable to interpret chroot context: %w", err)
}
cleanBase := chroot.Base()
file := &File{
return &File{
path: path,
FiletreeResolver: FiletreeResolver{
Chroot: *chroot,
@ -36,9 +50,7 @@ func NewFromFile(parent, path string, pathFilters ...PathIndexVisitor) (*File, e
Opener: nativeOSFileOpener,
},
indexer: newFileIndexer(path, cleanBase, pathFilters...),
}
return file, file.buildIndex()
}, nil
}
func (r *File) buildIndex() error {
@ -58,6 +70,6 @@ func (r *File) buildIndex() error {
}
// Stringer to represent a file path data source
func (r File) String() string {
func (r *File) String() string {
return fmt.Sprintf("file:%s", r.path)
}

View File

@ -173,7 +173,7 @@ func (r *fileIndexer) addDirectoryToIndex(path string, info os.FileInfo) error {
return err
}
metadata := file.NewMetadataFromPath(path, info)
metadata := NewMetadataFromPath(path, info)
r.index.Add(*ref, metadata)
return nil
@ -185,7 +185,7 @@ func (r *fileIndexer) addFileToIndex(path string, info os.FileInfo) error {
return err
}
metadata := file.NewMetadataFromPath(path, info)
metadata := NewMetadataFromPath(path, info)
r.index.Add(*ref, metadata)
return nil

View File

@ -1384,9 +1384,10 @@ func TestFileResolver_FilesByPath(t *testing.T) {
require.NoError(t, err)
require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, tt.filePath)
resolver, err := NewFromFile(tt.filePath)
require.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
refs, err := resolver.FilesByPath(tt.fileByPathInput)
require.NoError(t, err)
@ -1431,8 +1432,11 @@ func TestFileResolver_MultipleFilesByPath(t *testing.T) {
require.NoError(t, err)
require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath)
resolver, err := NewFromFile(filePath)
assert.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
refs, err := resolver.FilesByPath(tt.input...)
assert.NoError(t, err)
@ -1449,8 +1453,11 @@ func TestFileResolver_FilesByGlob(t *testing.T) {
require.NoError(t, err)
require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath)
resolver, err := NewFromFile(filePath)
assert.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
refs, err := resolver.FilesByGlob("**/*.txt")
assert.NoError(t, err)
@ -1476,8 +1483,11 @@ func Test_fileResolver_FilesByMIMEType(t *testing.T) {
require.NoError(t, err)
require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath)
resolver, err := NewFromFile(filePath)
assert.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
locations, err := resolver.FilesByMIMEType(test.mimeType)
assert.NoError(t, err)
assert.Equal(t, test.expectedPaths.Size(), len(locations))
@ -1497,10 +1507,12 @@ func Test_fileResolver_FileContentsByLocation(t *testing.T) {
require.NoError(t, err)
require.NotNil(t, parentPath)
r, err := NewFromFile(parentPath, filePath)
resolver, err := NewFromFile(filePath)
require.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
exists, existingPath, err := r.Tree.File(stereoscopeFile.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt")))
exists, existingPath, err := resolver.Tree.File(stereoscopeFile.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt")))
require.True(t, exists)
require.NoError(t, err)
require.True(t, existingPath.HasReference())
@ -1525,7 +1537,7 @@ func Test_fileResolver_FileContentsByLocation(t *testing.T) {
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actual, err := r.FileContentsByLocation(test.location)
actual, err := resolver.FileContentsByLocation(test.location)
if test.err {
require.Error(t, err)
return
@ -1546,8 +1558,11 @@ func TestFileResolver_AllLocations_errorOnDirRequest(t *testing.T) {
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
require.NoError(t, err)
require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath)
resolver, err := NewFromFile(filePath)
require.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
var dirLoc *file.Location
ctx, cancel := context.WithCancel(context.Background())
@ -1575,8 +1590,11 @@ func TestFileResolver_AllLocations(t *testing.T) {
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
require.NoError(t, err)
require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath)
resolver, err := NewFromFile(filePath)
require.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
paths := strset.New()
for loc := range resolver.AllLocations(context.Background()) {
@ -1600,8 +1618,11 @@ func Test_FileResolver_AllLocationsDoesNotLeakGoRoutine(t *testing.T) {
parentPath, err := absoluteSymlinkFreePathToParent(filePath)
require.NoError(t, err)
require.NotNil(t, parentPath)
resolver, err := NewFromFile(parentPath, filePath)
resolver, err := NewFromFile(filePath)
require.NoError(t, err)
require.NotNil(t, resolver)
assert.Equal(t, resolver.Chroot.Base(), parentPath)
require.NoError(t, err)
ctx, cancel := context.WithCancel(context.Background())

View File

@ -0,0 +1,20 @@
//go:build !windows
package fileresolver
import (
"os"
"syscall"
)
// getXid is the UID GID system info for unix
func getXid(info os.FileInfo) (uid, gid int) {
uid = -1
gid = -1
if stat, ok := info.Sys().(*syscall.Stat_t); ok {
uid = int(stat.Uid)
gid = int(stat.Gid)
}
return uid, gid
}

View File

@ -0,0 +1,12 @@
//go:build windows
package fileresolver
import (
"os"
)
// getXid is a placeholder for windows file information
func getXid(info os.FileInfo) (uid, gid int) {
return -1, -1
}

View File

@ -0,0 +1,44 @@
package fileresolver
import (
"os"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/internal/windows"
)
func NewMetadataFromPath(path string, info os.FileInfo) file.Metadata {
var mimeType string
uid, gid := getXid(info)
ty := file.TypeFromMode(info.Mode())
if ty == file.TypeRegular {
usablePath := path
// denormalize the path back to windows so we can open the file
if windows.HostRunningOnWindows() {
usablePath = windows.FromPosix(usablePath)
}
f, err := os.Open(usablePath)
if err != nil {
// TODO: it may be that the file is inaccessible, however, this is not an error or a warning. In the future we need to track these as known-unknowns
f = nil
} else {
defer internal.CloseAndLogError(f, usablePath)
}
mimeType = file.MIMEType(f)
}
return file.Metadata{
FileInfo: info,
Path: path,
Type: ty,
// unsupported across platforms
UserID: uid,
GroupID: gid,
MIMEType: mimeType,
}
}

View File

@ -0,0 +1,51 @@
package fileresolver
import (
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/anchore/stereoscope/pkg/file"
)
func TestFileMetadataFromPath(t *testing.T) {
tests := []struct {
path string
expectedType file.Type
expectedMIMEType string
}{
{
path: "test-fixtures/symlinks-simple/readme",
expectedType: file.TypeRegular,
expectedMIMEType: "text/plain",
},
{
path: "test-fixtures/symlinks-simple/link_to_new_readme",
expectedType: file.TypeSymLink,
expectedMIMEType: "",
},
{
path: "test-fixtures/symlinks-simple/link_to_link_to_new_readme",
expectedType: file.TypeSymLink,
expectedMIMEType: "",
},
{
path: "test-fixtures/symlinks-simple",
expectedType: file.TypeDirectory,
expectedMIMEType: "",
},
}
for _, test := range tests {
t.Run(test.path, func(t *testing.T) {
info, err := os.Lstat(test.path)
require.NoError(t, err)
actual := NewMetadataFromPath(test.path, info)
assert.Equal(t, test.expectedMIMEType, actual.MIMEType, "unexpected MIME type for %s", test.path)
assert.Equal(t, test.expectedType, actual.Type, "unexpected type for %s", test.path)
})
}
}

View File

@ -58,6 +58,7 @@ type AlpmDBEntry struct {
Depends []string `mapstructure:"depends" json:"depends,omitempty"`
}
// AlpmFileRecord represents a single file entry within an Arch Linux package with its associated metadata tracked by pacman.
type AlpmFileRecord struct {
// Path is the file path relative to the filesystem root
Path string `mapstruture:"path" json:"path,omitempty"`

2
syft/pkg/cataloger/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
# these are generated by pkgtest helpers, no need to check them in
**/test-fixtures/test-observations.json

View File

@ -0,0 +1,16 @@
/*
Package ai provides concrete Cataloger implementations for AI artifacts and machine learning models,
including support for GGUF (GPT-Generated Unified Format) model files.
*/
package ai
import (
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
)
// NewGGUFCataloger returns a new cataloger instance for GGUF model files.
func NewGGUFCataloger() pkg.Cataloger {
return generic.NewCataloger("gguf-cataloger").
WithParserByGlobs(parseGGUFModel, "**/*.gguf")
}

View File

@ -0,0 +1,140 @@
package ai
import (
"os"
"path/filepath"
"testing"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
)
func TestGGUFCataloger_Globs(t *testing.T) {
tests := []struct {
name string
fixture string
expected []string
}{
{
name: "obtain gguf files",
fixture: "test-fixtures/glob-paths",
expected: []string{
"models/model.gguf",
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
pkgtest.NewCatalogTester().
FromDirectory(t, test.fixture).
ExpectsResolverContentQueries(test.expected).
TestCataloger(t, NewGGUFCataloger())
})
}
}
func TestGGUFCataloger(t *testing.T) {
tests := []struct {
name string
setup func(t *testing.T) string
expectedPackages []pkg.Package
expectedRelationships []artifact.Relationship
}{
{
name: "catalog single GGUF file",
setup: func(t *testing.T) string {
dir := t.TempDir()
data := newTestGGUFBuilder().
withVersion(3).
withStringKV("general.architecture", "llama").
withStringKV("general.name", "llama3-8b").
withStringKV("general.version", "3.0").
withStringKV("general.license", "Apache-2.0").
withStringKV("general.quantization", "Q4_K_M").
withUint64KV("general.parameter_count", 8030000000).
withStringKV("general.some_random_kv", "foobar").
build()
path := filepath.Join(dir, "llama3-8b.gguf")
os.WriteFile(path, data, 0644)
return dir
},
expectedPackages: []pkg.Package{
{
Name: "llama3-8b",
Version: "3.0",
Type: pkg.ModelPkg,
Licenses: pkg.NewLicenseSet(
pkg.NewLicenseFromFields("Apache-2.0", "", nil),
),
Metadata: pkg.GGUFFileHeader{
Architecture: "llama",
Quantization: "Unknown",
Parameters: 0,
GGUFVersion: 3,
TensorCount: 0,
MetadataKeyValuesHash: "6e3d368066455ce4",
RemainingKeyValues: map[string]interface{}{
"general.some_random_kv": "foobar",
},
},
},
},
expectedRelationships: nil,
},
{
name: "catalog GGUF file with minimal metadata",
setup: func(t *testing.T) string {
dir := t.TempDir()
data := newTestGGUFBuilder().
withVersion(3).
withStringKV("general.architecture", "gpt2").
withStringKV("general.name", "gpt2-small").
withStringKV("gpt2.context_length", "1024").
withUint32KV("gpt2.embedding_length", 768).
build()
path := filepath.Join(dir, "gpt2-small.gguf")
os.WriteFile(path, data, 0644)
return dir
},
expectedPackages: []pkg.Package{
{
Name: "gpt2-small",
Version: "",
Type: pkg.ModelPkg,
Licenses: pkg.NewLicenseSet(),
Metadata: pkg.GGUFFileHeader{
Architecture: "gpt2",
Quantization: "Unknown",
Parameters: 0,
GGUFVersion: 3,
TensorCount: 0,
MetadataKeyValuesHash: "9dc6f23591062a27",
RemainingKeyValues: map[string]interface{}{
"gpt2.context_length": "1024",
"gpt2.embedding_length": uint32(768),
},
},
},
},
expectedRelationships: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
fixtureDir := tt.setup(t)
// Use pkgtest to catalog and compare
pkgtest.NewCatalogTester().
FromDirectory(t, fixtureDir).
Expects(tt.expectedPackages, tt.expectedRelationships).
IgnoreLocationLayer().
IgnorePackageFields("FoundBy", "Locations").
TestCataloger(t, NewGGUFCataloger())
})
}
}

View File

@ -0,0 +1,22 @@
package ai
import (
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
)
func newGGUFPackage(metadata *pkg.GGUFFileHeader, modelName, version, license string, locations ...file.Location) pkg.Package {
p := pkg.Package{
Name: modelName,
Version: version,
Locations: file.NewLocationSet(locations...),
Type: pkg.ModelPkg,
Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromValues(license)...),
Metadata: *metadata,
// NOTE: PURL is intentionally not set as the package-url spec
// has not yet finalized support for ML model packages
}
p.SetID()
return p
}

View File

@ -0,0 +1,121 @@
package ai
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
)
func TestNewGGUFPackage(t *testing.T) {
tests := []struct {
name string
metadata *pkg.GGUFFileHeader
input struct {
modelName string
version string
license string
locations []file.Location
}
expected pkg.Package
}{
{
name: "complete GGUF package with all fields",
input: struct {
modelName string
version string
license string
locations []file.Location
}{
modelName: "llama3-8b",
version: "3.0",
license: "Apache-2.0",
locations: []file.Location{file.NewLocation("/models/llama3-8b.gguf")},
},
metadata: &pkg.GGUFFileHeader{
Architecture: "llama",
Quantization: "Q4_K_M",
Parameters: 8030000000,
GGUFVersion: 3,
TensorCount: 291,
RemainingKeyValues: map[string]any{
"general.random_kv": "foobar",
},
},
expected: pkg.Package{
Name: "llama3-8b",
Version: "3.0",
Type: pkg.ModelPkg,
Licenses: pkg.NewLicenseSet(
pkg.NewLicenseFromFields("Apache-2.0", "", nil),
),
Metadata: pkg.GGUFFileHeader{
Architecture: "llama",
Quantization: "Q4_K_M",
Parameters: 8030000000,
GGUFVersion: 3,
TensorCount: 291,
RemainingKeyValues: map[string]any{
"general.random_kv": "foobar",
},
},
Locations: file.NewLocationSet(file.NewLocation("/models/llama3-8b.gguf")),
},
},
{
name: "minimal GGUF package",
input: struct {
modelName string
version string
license string
locations []file.Location
}{
modelName: "gpt2-small",
version: "1.0",
license: "MIT",
locations: []file.Location{file.NewLocation("/models/simple.gguf")},
},
metadata: &pkg.GGUFFileHeader{
Architecture: "gpt2",
GGUFVersion: 3,
TensorCount: 50,
},
expected: pkg.Package{
Name: "gpt2-small",
Version: "1.0",
Type: pkg.ModelPkg,
Licenses: pkg.NewLicenseSet(
pkg.NewLicenseFromFields("MIT", "", nil),
),
Metadata: pkg.GGUFFileHeader{
Architecture: "gpt2",
GGUFVersion: 3,
TensorCount: 50,
},
Locations: file.NewLocationSet(file.NewLocation("/models/simple.gguf")),
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
actual := newGGUFPackage(
tt.metadata,
tt.input.modelName,
tt.input.version,
tt.input.license,
tt.input.locations...,
)
// Verify metadata type
_, ok := actual.Metadata.(pkg.GGUFFileHeader)
require.True(t, ok, "metadata should be GGUFFileHeader")
// Use AssertPackagesEqual for comprehensive comparison
pkgtest.AssertPackagesEqual(t, tt.expected, actual)
})
}
}

View File

@ -0,0 +1,63 @@
package ai
import (
"encoding/binary"
"fmt"
"io"
gguf_parser "github.com/gpustack/gguf-parser-go"
)
// GGUF file format constants
const (
ggufMagicNumber = 0x46554747 // "GGUF" in little-endian
maxHeaderSize = 50 * 1024 * 1024 // 50MB for large tokenizer vocabularies
)
// copyHeader copies the GGUF header from the reader to the writer.
// It validates the magic number first, then copies the rest of the data.
// The reader should be wrapped with io.LimitedReader to prevent OOM issues.
func copyHeader(w io.Writer, r io.Reader) error {
// Read initial chunk to validate magic number
// GGUF format: magic(4) + version(4) + tensor_count(8) + metadata_kv_count(8) + metadata_kvs + tensors_info
initialBuf := make([]byte, 24) // Enough for magic, version, tensor count, and kv count
if _, err := io.ReadFull(r, initialBuf); err != nil {
return fmt.Errorf("failed to read GGUF header prefix: %w", err)
}
// Verify magic number
magic := binary.LittleEndian.Uint32(initialBuf[0:4])
if magic != ggufMagicNumber {
return fmt.Errorf("invalid GGUF magic number: 0x%08X", magic)
}
// Write the initial buffer to the writer
if _, err := w.Write(initialBuf); err != nil {
return fmt.Errorf("failed to write GGUF header prefix: %w", err)
}
// Copy the rest of the header from reader to writer
// The LimitedReader will return EOF once maxHeaderSize is reached
if _, err := io.Copy(w, r); err != nil {
return fmt.Errorf("failed to copy GGUF header: %w", err)
}
return nil
}
// Helper to convert gguf_parser metadata to simpler types
func convertGGUFMetadataKVs(kvs gguf_parser.GGUFMetadataKVs) map[string]interface{} {
result := make(map[string]interface{})
for _, kv := range kvs {
// Skip standard fields that are extracted separately
switch kv.Key {
case "general.architecture", "general.name", "general.license",
"general.version", "general.parameter_count", "general.quantization":
continue
}
result[kv.Key] = kv.Value
}
return result
}

View File

@ -0,0 +1,135 @@
package ai
import (
"context"
"encoding/json"
"fmt"
"io"
"os"
"path/filepath"
"sort"
"strings"
"github.com/cespare/xxhash/v2"
gguf_parser "github.com/gpustack/gguf-parser-go"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/unknown"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/generic"
)
// parseGGUFModel parses a GGUF model file and returns the discovered package.
// This implementation only reads the header portion of the file, not the entire model.
func parseGGUFModel(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
defer internal.CloseAndLogError(reader, reader.Path())
// Create a temporary file for the library to parse
// The library requires a file path, so we create a temp file
tempFile, err := os.CreateTemp("", "syft-gguf-*.gguf")
if err != nil {
return nil, nil, fmt.Errorf("failed to create temp file: %w", err)
}
tempPath := tempFile.Name()
defer os.Remove(tempPath)
// Copy and validate the GGUF file header using LimitedReader to prevent OOM
// We use LimitedReader to cap reads at maxHeaderSize (50MB)
limitedReader := &io.LimitedReader{R: reader, N: maxHeaderSize}
if err := copyHeader(tempFile, limitedReader); err != nil {
tempFile.Close()
return nil, nil, fmt.Errorf("failed to copy GGUF header: %w", err)
}
tempFile.Close()
// Parse using gguf-parser-go with options to skip unnecessary data
ggufFile, err := gguf_parser.ParseGGUFFile(tempPath,
gguf_parser.SkipLargeMetadata(),
)
if err != nil {
return nil, nil, fmt.Errorf("failed to parse GGUF file: %w", err)
}
// Extract metadata
metadata := ggufFile.Metadata()
// Extract version separately (will be set on Package.Version)
modelVersion := extractVersion(ggufFile.Header.MetadataKV)
// Convert to syft metadata structure
syftMetadata := &pkg.GGUFFileHeader{
Architecture: metadata.Architecture,
Quantization: metadata.FileTypeDescriptor,
Parameters: uint64(metadata.Parameters),
GGUFVersion: uint32(ggufFile.Header.Version),
TensorCount: ggufFile.Header.TensorCount,
RemainingKeyValues: convertGGUFMetadataKVs(ggufFile.Header.MetadataKV),
MetadataKeyValuesHash: computeKVMetadataHash(ggufFile.Header.MetadataKV),
}
// If model name is not in metadata, use filename
if metadata.Name == "" {
metadata.Name = extractModelNameFromPath(reader.Path())
}
// Create package from metadata
p := newGGUFPackage(
syftMetadata,
metadata.Name,
modelVersion,
metadata.License,
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
)
return []pkg.Package{p}, nil, unknown.IfEmptyf([]pkg.Package{p}, "unable to parse GGUF file")
}
// computeKVMetadataHash computes a stable hash of the KV metadata for use as a global identifier
func computeKVMetadataHash(metadata gguf_parser.GGUFMetadataKVs) string {
// Sort the KV pairs by key for stable hashing
sortedKVs := make([]gguf_parser.GGUFMetadataKV, len(metadata))
copy(sortedKVs, metadata)
sort.Slice(sortedKVs, func(i, j int) bool {
return sortedKVs[i].Key < sortedKVs[j].Key
})
// Marshal sorted KVs to JSON for stable hashing
jsonBytes, err := json.Marshal(sortedKVs)
if err != nil {
log.Debugf("failed to marshal metadata for hashing: %v", err)
return ""
}
// Compute xxhash
hash := xxhash.Sum64(jsonBytes)
return fmt.Sprintf("%016x", hash) // 16 hex chars (64 bits)
}
// extractVersion attempts to extract version from metadata KV pairs
func extractVersion(kvs gguf_parser.GGUFMetadataKVs) string {
for _, kv := range kvs {
if kv.Key == "general.version" {
if v, ok := kv.Value.(string); ok && v != "" {
return v
}
}
}
return ""
}
// extractModelNameFromPath extracts the model name from the file path
func extractModelNameFromPath(path string) string {
// Get the base filename
base := filepath.Base(path)
// Remove .gguf extension
name := strings.TrimSuffix(base, ".gguf")
return name
}
// integrity check
var _ generic.Parser = parseGGUFModel

View File

@ -0,0 +1,128 @@
package ai
import (
"bytes"
"encoding/binary"
)
// GGUF type constants for test builder
// https://github.com/ggml-org/ggml/blob/master/docs/gguf.md
const (
ggufMagic = 0x46554747 // "GGUF" in little-endian
ggufTypeUint8 = 0
ggufTypeInt8 = 1
ggufTypeUint16 = 2
ggufTypeInt16 = 3
ggufTypeUint32 = 4
ggufTypeInt32 = 5
ggufTypeFloat32 = 6
ggufTypeBool = 7
ggufTypeString = 8
ggufTypeArray = 9
ggufTypeUint64 = 10
ggufTypeInt64 = 11
ggufTypeFloat64 = 12
)
// testGGUFBuilder helps build GGUF files for testing
type testGGUFBuilder struct {
buf *bytes.Buffer
version uint32
tensorCount uint64
kvPairs []testKVPair
}
type testKVPair struct {
key string
valueType uint32
value interface{}
}
func newTestGGUFBuilder() *testGGUFBuilder {
return &testGGUFBuilder{
buf: new(bytes.Buffer),
version: 3,
tensorCount: 0,
kvPairs: []testKVPair{},
}
}
func (b *testGGUFBuilder) withVersion(v uint32) *testGGUFBuilder {
b.version = v
return b
}
func (b *testGGUFBuilder) withTensorCount(count uint64) *testGGUFBuilder {
b.tensorCount = count
return b
}
func (b *testGGUFBuilder) withStringKV(key, value string) *testGGUFBuilder {
b.kvPairs = append(b.kvPairs, testKVPair{key: key, valueType: ggufTypeString, value: value})
return b
}
func (b *testGGUFBuilder) withUint64KV(key string, value uint64) *testGGUFBuilder {
b.kvPairs = append(b.kvPairs, testKVPair{key: key, valueType: ggufTypeUint64, value: value})
return b
}
func (b *testGGUFBuilder) withUint32KV(key string, value uint32) *testGGUFBuilder {
b.kvPairs = append(b.kvPairs, testKVPair{key: key, valueType: ggufTypeUint32, value: value})
return b
}
func (b *testGGUFBuilder) writeString(s string) {
binary.Write(b.buf, binary.LittleEndian, uint64(len(s)))
b.buf.WriteString(s)
}
func (b *testGGUFBuilder) build() []byte {
// Write magic number "GGUF"
binary.Write(b.buf, binary.LittleEndian, uint32(ggufMagic))
// Write version
binary.Write(b.buf, binary.LittleEndian, b.version)
// Write tensor count
binary.Write(b.buf, binary.LittleEndian, b.tensorCount)
// Write KV count
binary.Write(b.buf, binary.LittleEndian, uint64(len(b.kvPairs)))
// Write KV pairs
for _, kv := range b.kvPairs {
// Write key
b.writeString(kv.key)
// Write value type
binary.Write(b.buf, binary.LittleEndian, kv.valueType)
// Write value based on type
switch kv.valueType {
case ggufTypeString:
b.writeString(kv.value.(string))
case ggufTypeUint32:
binary.Write(b.buf, binary.LittleEndian, kv.value.(uint32))
case ggufTypeUint64:
binary.Write(b.buf, binary.LittleEndian, kv.value.(uint64))
case ggufTypeUint8:
binary.Write(b.buf, binary.LittleEndian, kv.value.(uint8))
case ggufTypeInt32:
binary.Write(b.buf, binary.LittleEndian, kv.value.(int32))
case ggufTypeBool:
var v uint8
if kv.value.(bool) {
v = 1
}
binary.Write(b.buf, binary.LittleEndian, v)
}
}
return b.buf.Bytes()
}
// buildInvalidMagic creates a file with invalid magic number
func (b *testGGUFBuilder) buildInvalidMagic() []byte {
buf := new(bytes.Buffer)
binary.Write(buf, binary.LittleEndian, uint32(0x12345678))
return buf.Bytes()
}

View File

@ -1403,6 +1403,22 @@ func Test_Cataloger_PositiveCases(t *testing.T) {
Metadata: metadata("ffmpeg-library"),
},
},
{
logicalFixture: "elixir/1.19.1/linux-amd64",
expected: pkg.Package{
Name: "elixir",
Version: "1.19.1",
Type: "binary",
PURL: "pkg:generic/elixir@1.19.1",
Locations: locations("elixir", "lib/elixir/ebin/elixir.app"),
Metadata: pkg.BinarySignature{
Matches: []pkg.ClassifierMatch{
match("elixir-binary", "elixir"),
match("elixir-library", "lib/elixir/ebin/elixir.app"),
},
},
},
},
}
for _, test := range tests {

View File

@ -663,6 +663,26 @@ func DefaultClassifiers() []binutils.Classifier {
PURL: mustPURL("pkg:generic/ffmpeg@version"),
CPEs: singleCPE("cpe:2.3:a:ffmpeg:ffmpeg:*:*:*:*:*:*:*:*", cpe.NVDDictionaryLookupSource),
},
{
Class: "elixir-binary",
FileGlob: "**/elixir",
EvidenceMatcher: m.FileContentsVersionMatcher(
`(?m)ELIXIR_VERSION=(?P<version>[0-9]+\.[0-9]+\.[0-9]+)`),
Package: "elixir",
PURL: mustPURL("pkg:generic/elixir@version"),
CPEs: []cpe.CPE{
cpe.Must("cpe:2.3:a:elixir-lang:elixir:*:*:*:*:*:*:*:*", cpe.NVDDictionaryLookupSource),
},
},
{
Class: "elixir-library",
FileGlob: "**/elixir/ebin/elixir.app",
EvidenceMatcher: m.FileContentsVersionMatcher(
`(?m)\{vsn,"(?P<version>[0-9]+\.[0-9]+\.[0-9]+(-[a-z0-9]+)?)"\}`),
Package: "elixir",
PURL: mustPURL("pkg:generic/elixir@version"),
CPEs: singleCPE("cpe:2.3:a:elixir-lang:elixir:*:*:*:*:*:*:*:*", cpe.NVDDictionaryLookupSource),
},
}
return append(classifiers, defaultJavaClassifiers()...)

View File

@ -6,6 +6,7 @@ import (
"sort"
"strings"
packageurl "github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
)
@ -32,6 +33,19 @@ func newPEPackage(versionResources map[string]string, f file.Location) pkg.Packa
Metadata: newPEBinaryVersionResourcesFromMap(versionResources),
}
// If this appears to be Ghostscript, emit a canonical generic purl
// Example expected: pkg:generic/ghostscript@<version>
prod := strings.ToLower(spaceNormalize(versionResources["ProductName"]))
if prod == "" {
// fall back to FileDescription if ProductName is missing
prod = strings.ToLower(spaceNormalize(versionResources["FileDescription"]))
}
if p.Version != "" && strings.Contains(prod, "ghostscript") {
// build a generic PURL for ghostscript
purl := packageurl.NewPackageURL(packageurl.TypeGeneric, "", "ghostscript", p.Version, nil, "").ToString()
p.PURL = purl
}
p.SetID()
return p

View File

@ -0,0 +1,24 @@
package binary
import (
"testing"
"github.com/anchore/syft/syft/file"
)
func TestGhostscriptPEGeneratesGenericPURL(t *testing.T) {
vr := map[string]string{
"CompanyName": "Artifex Software, Inc.",
"ProductName": "GPL Ghostscript",
"FileDescription": "Ghostscript Interpreter",
"ProductVersion": "9.54.0",
}
loc := file.NewLocation("/usr/bin/gswin64c.exe")
p := newPEPackage(vr, loc)
expected := "pkg:generic/ghostscript@9.54.0"
if p.PURL != expected {
t.Fatalf("expected purl %q, got %q", expected, p.PURL)
}
}

View File

@ -0,0 +1,20 @@
#!/bin/sh
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: 2021 The Elixir Team
# SPDX-FileCopyrightText: 2012 Plataformatec
set -e
ELIXIR_VERSION=1.19.1
if [ $# -eq 0 ] || { [ $# -eq 1 ] && { [ "$1" = "--help" ] || [ "$1" = "-h" ]; }; }; then
cat <<USAGE >&2
Usage: $(basename "$0") [options] [.exs file] [data]
## General options
-e "COMMAND" Evaluates the given command (*)
-h, --help Prints this message (standalone)
-r "FILE" Requires the given files/patterns (*)
-S SCRIPT Finds and executes the given script in \$PATH

View File

@ -0,0 +1,19 @@
{application,elixir,
[{description,"elixir"},
{vsn,"1.19.1"},
{modules,
['Elixir.Access','Elixir.Agent.Server','Elixir.Agent',
'Elixir.Application','Elixir.ArgumentError',
elixir_overridable,elixir_parser,elixir_quote,elixir_rewrite,
elixir_sup,elixir_tokenizer,elixir_utils,iex]},
{registered,[elixir_sup,elixir_config,elixir_code_server]},
{applications,[kernel,stdlib,compiler]},
{mod,{elixir,[]}},
{env,
[{ansi_syntax_colors,
[{atom,cyan},
{binary,default_color},
{operator,default_color}]},
{check_endianness,true},
{dbg_callback,{'Elixir.Macro',dbg,[]}},
{time_zone_database,'Elixir.Calendar.UTCOnlyTimeZoneDatabase'}]}]}.

View File

@ -0,0 +1,58 @@
package cpe
import (
"strings"
"github.com/anchore/syft/syft/pkg"
)
// TargetSoftwareToPackageType is derived from looking at target_software attributes in the NVD dataset
// TODO: ideally this would be driven from the store, where we can resolve ecosystem aliases directly
func TargetSoftwareToPackageType(tsw string) pkg.Type {
tsw = strings.NewReplacer("-", "_", " ", "_").Replace(strings.ToLower(tsw))
switch tsw {
case "alpine", "apk":
return pkg.ApkPkg
case "debian", "dpkg":
return pkg.DebPkg
case "java", "maven", "ant", "gradle", "jenkins", "jenkins_ci", "kafka", "logstash", "mule", "nifi", "solr", "spark", "storm", "struts", "tomcat", "zookeeper", "log4j":
return pkg.JavaPkg
case "javascript", "node", "nodejs", "node.js", "npm", "yarn", "apache", "jquery", "next.js", "prismjs":
return pkg.NpmPkg
case "c", "c++", "c/c++", "conan", "gnu_c++", "qt":
return pkg.ConanPkg
case "dart":
return pkg.DartPubPkg
case "redhat", "rpm", "redhat_enterprise_linux", "rhel", "suse", "suse_linux", "opensuse", "opensuse_linux", "fedora", "centos", "oracle_linux", "ol":
return pkg.RpmPkg
case "elixir", "hex":
return pkg.HexPkg
case "erlang":
return pkg.ErlangOTPPkg
case ".net", ".net_framework", "asp", "asp.net", "dotnet", "dotnet_framework", "c#", "csharp", "nuget":
return pkg.DotnetPkg
case "ruby", "gem", "nokogiri", "ruby_on_rails":
return pkg.GemPkg
case "rust", "cargo", "crates":
return pkg.RustPkg
case "python", "pip", "pypi", "flask":
return pkg.PythonPkg
case "kb", "knowledgebase", "msrc", "mskb", "microsoft":
return pkg.KbPkg
case "portage", "gentoo":
return pkg.PortagePkg
case "go", "golang", "gomodule":
return pkg.GoModulePkg
case "linux_kernel", "linux", "z/linux":
return pkg.LinuxKernelPkg
case "php":
return pkg.PhpComposerPkg
case "swift":
return pkg.SwiftPkg
case "wordpress", "wordpress_plugin", "wordpress_":
return pkg.WordpressPluginPkg
case "lua", "luarocks":
return pkg.LuaRocksPkg
}
return ""
}

View File

@ -1,9 +1,12 @@
package dart
import (
"context"
"github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/internal/licenses"
)
func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...file.Location) pkg.Package {
@ -29,7 +32,7 @@ func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...fil
return p
}
func newPubspecPackage(raw pubspecPackage, locations ...file.Location) pkg.Package {
func newPubspecPackage(ctx context.Context, resolver file.Resolver, raw pubspecPackage, locations ...file.Location) pkg.Package {
var env *pkg.DartPubspecEnvironment
if raw.Environment.SDK != "" || raw.Environment.Flutter != "" {
// this is required only after pubspec v2, but might have been optional before this
@ -58,6 +61,8 @@ func newPubspecPackage(raw pubspecPackage, locations ...file.Location) pkg.Packa
p.SetID()
p = licenses.RelativeToPackage(ctx, resolver, p)
return p
}

View File

@ -29,7 +29,7 @@ type dartPubspecEnvironment struct {
Flutter string `mapstructure:"flutter" yaml:"flutter"`
}
func parsePubspec(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
func parsePubspec(ctx context.Context, resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package
dec := yaml.NewDecoder(reader)
@ -41,6 +41,8 @@ func parsePubspec(_ context.Context, _ file.Resolver, _ *generic.Environment, re
pkgs = append(pkgs,
newPubspecPackage(
ctx,
resolver,
p,
reader.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),

Some files were not shown because too many files have changed in this diff Show More