diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 9d128c8b3..301266c7c 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -47,7 +47,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@1b168cd39490f61582a9beae412bb7057a6b2c4e #v3.29.5 + uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 #v3.29.5 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -58,7 +58,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@1b168cd39490f61582a9beae412bb7057a6b2c4e #v3.29.5 + uses: github/codeql-action/autobuild@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 #v3.29.5 # â„šī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -72,4 +72,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@1b168cd39490f61582a9beae412bb7057a6b2c4e #v3.29.5 + uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 #v3.29.5 diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 5d19a3c26..d86c77c6d 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,128 +1,5 @@ -# Contributor Covenant Code of Conduct +# Code of Conduct -## Our Pledge +All contributors for any Anchore project must follow the [Contributor Covenant Code of Conduct](https://oss.anchore.com/docs/contributing/code-of-conduct/). -We as members, contributors, and leaders pledge to make participation in our -community a harassment-free experience for everyone, regardless of age, body -size, visible or invisible disability, ethnicity, sex characteristics, gender -identity and expression, level of experience, education, socio-economic status, -nationality, personal appearance, race, religion, or sexual identity -and orientation. - -We pledge to act and interact in ways that contribute to an open, welcoming, -diverse, inclusive, and healthy community. - -## Our Standards - -Examples of behavior that contributes to a positive environment for our -community include: - -* Demonstrating empathy and kindness toward other people -* Being respectful of differing opinions, viewpoints, and experiences -* Giving and gracefully accepting constructive feedback -* Accepting responsibility and apologizing to those affected by our mistakes, - and learning from the experience -* Focusing on what is best not just for us as individuals, but for the - overall community - -Examples of unacceptable behavior include: - -* The use of sexualized language or imagery, and sexual attention or - advances of any kind -* Trolling, insulting or derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or email - address, without their explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Enforcement Responsibilities - -Community leaders are responsible for clarifying and enforcing our standards of -acceptable behavior and will take appropriate and fair corrective action in -response to any behavior that they deem inappropriate, threatening, offensive, -or harmful. - -Community leaders have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, and will communicate reasons for moderation -decisions when appropriate. - -## Scope - -This Code of Conduct applies within all community spaces, and also applies when -an individual is officially representing the community in public spaces. -Examples of representing our community include using an official e-mail address, -posting via an official social media account, or acting as an appointed -representative at an online or offline event. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported to the community leaders responsible for enforcement at -[opensource@anchore.com](mailto:opensource@anchore.com). -All complaints will be reviewed and investigated promptly and fairly. - -All community leaders are obligated to respect the privacy and security of the -reporter of any incident. - -## Enforcement Guidelines - -Community leaders will follow these Community Impact Guidelines in determining -the consequences for any action they deem in violation of this Code of Conduct: - -### 1. Correction - -**Community Impact**: Use of inappropriate language or other behavior deemed -unprofessional or unwelcome in the community. - -**Consequence**: A private, written warning from community leaders, providing -clarity around the nature of the violation and an explanation of why the -behavior was inappropriate. A public apology may be requested. - -### 2. Warning - -**Community Impact**: A violation through a single incident or series -of actions. - -**Consequence**: A warning with consequences for continued behavior. No -interaction with the people involved, including unsolicited interaction with -those enforcing the Code of Conduct, for a specified period of time. This -includes avoiding interactions in community spaces as well as external channels -like social media. Violating these terms may lead to a temporary or -permanent ban. - -### 3. Temporary Ban - -**Community Impact**: A serious violation of community standards, including -sustained inappropriate behavior. - -**Consequence**: A temporary ban from any sort of interaction or public -communication with the community for a specified period of time. No public or -private interaction with the people involved, including unsolicited interaction -with those enforcing the Code of Conduct, is allowed during this period. -Violating these terms may lead to a permanent ban. - -### 4. Permanent Ban - -**Community Impact**: Demonstrating a pattern of violation of community -standards, including sustained inappropriate behavior, harassment of an -individual, or aggression toward or disparagement of classes of individuals. - -**Consequence**: A permanent ban from any sort of public interaction within -the community. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], -version 2.0, available at -https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. - -Community Impact Guidelines were inspired by [Mozilla's code of conduct -enforcement ladder](https://github.com/mozilla/diversity). - -[homepage]: https://www.contributor-covenant.org - -For answers to common questions about this code of conduct, see the FAQ at -https://www.contributor-covenant.org/faq. Translations are available at -https://www.contributor-covenant.org/translations. +**TLDR:** Be kind, be respectful, and assume good intentions. We're all here to build great software together. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fae74a587..9eb896221 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,154 +1,13 @@ -[#](#) Contributing to Syft +# Contributing -If you are looking to contribute to this project and want to open a GitHub pull request ("PR"), there are a few guidelines of what we are looking for in patches. Make sure you go through this document and ensure that your code proposal is aligned. +Thank you for your interest in contributing to Syft! -## Setting up your environment +Please see the [contribution guide](https://oss.anchore.com/docs/contributing/syft/) for development requirements and helpful tips to get started developing in the repo. For a deeper dive, please see the [architecture docs](https://oss.anchore.com/docs/architecture/syft/). -Before you can contribute to Syft, you need to configure your development environment. +**Have a question or need help?** Check out our [issues and discussions guide](https://oss.anchore.com/docs/contributing/issues-and-discussions/) to find the right place to start a conversation. -### Debian setup +**Ready to submit code?** Our [pull request guide](https://oss.anchore.com/docs/contributing/pull-requests/) covers everything from title conventions to the review process. Don't forget that ***all commits require a [sign-off](https://oss.anchore.com/docs/contributing/sign-off/)***. -You will need to install Go. The version on https://go.dev works best, using the system golang doesn't always work the way you might expect. +**Found a security issue?** Please do **not** open a public issue. Instead, see our [security policy](https://oss.anchore.com/docs/contributing/security/) for how to report vulnerabilities responsibly. -Refer to the go.mod file in the root of this repo for the recommended version of Go to install. - -You will also need Docker. There's no reason the system packages shouldn't work, but we used the official Docker package. You can find instructions for installing Docker in Debian [here](https://docs.docker.com/engine/install/debian/). - -You also need to install some Debian packages - -```sh -sudo apt-get install build-essential zip bc libxml2-utils git -``` - -## Configuring Git - -You will need to configure your git client with your name and email address. This is easily done from the command line. - -```text -$ git config --global user.name "John Doe" -$ git config --global user.email "john.doe@example.com" -``` - -This username and email address will matter later in this guide. - -## Fork the repo - -You should fork the Syft repo using the "Fork" button at the top right of the Syft GitHub [site](https://github.com/anchore/syft/). You will be doing your development in your fork, then submit a pull request to Syft. There are many resources how to use GitHub effectively, we will not cover those here. - -## Adding a feature or fix - -If you look at the Syft [Issue](https://github.com/anchore/syft/issues) there are plenty of bugs and feature requests. Maybe look at the [good first issue](https://github.com/anchore/syft/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) list if you're not sure where to start. - -## Commit guidelines - -In the Syft project we like commits and pull requests (PR) to be easy to understand and review. Open source thrives best when everything happening is over documented and small enough to be understood. - -### Granular commits - -Please try to make every commit as simple as possible, but no simpler. The idea is that each commit should be a logical unit of code. Try not to commit too many tiny changes, for example every line changed in a file as a separate commit. And also try not to make a commit enormous, for example committing all your work at the end of the day. - -Rather than try to follow a strict guide on what is or is not best, we try to be flexible and simple in this space. Do what makes the most sense for the changes you are trying to include. - -### Commit title and description - -Remember that the message you leave for a commit is for the reviewer in the present, and for someone (maybe you) changing something in the future. Please make sure the title and description used is easy to understand and explains what was done. Jokes and clever comments generally don't age well in commit messages. Just the facts please. - -## Sign off your work - -The `sign-off` is an added line at the end of the explanation for the commit, certifying that you wrote it or otherwise have the right to submit it as an open-source patch. By submitting a contribution, you agree to be bound by the terms of the DCO Version 1.1 and Apache License Version 2.0. - -Signing off a commit certifies the below Developer's Certificate of Origin (DCO): - -```text -Developer's Certificate of Origin 1.1 - -By making a contribution to this project, I certify that: - - (a) The contribution was created in whole or in part by me and I - have the right to submit it under the open source license - indicated in the file; or - - (b) The contribution is based upon previous work that, to the best - of my knowledge, is covered under an appropriate open source - license and I have the right under that license to submit that - work with modifications, whether created in whole or in part - by me, under the same open source license (unless I am - permitted to submit under a different license), as indicated - in the file; or - - (c) The contribution was provided directly to me by some other - person who certified (a), (b) or (c) and I have not modified - it. - - (d) I understand and agree that this project and the contribution - are public and that a record of the contribution (including all - personal information I submit with it, including my sign-off) is - maintained indefinitely and may be redistributed consistent with - this project or the open source license(s) involved. -``` - -All contributions to this project are licensed under the [Apache License Version 2.0, January 2004](http://www.apache.org/licenses/). - -When committing your change, you can add the required line manually so that it looks like this: - -```text -Signed-off-by: John Doe -``` - -Creating a signed-off commit is then possible with `-s` or `--signoff`: - -```text -$ git commit -s -m "this is a commit message" -``` - -To double-check that the commit was signed-off, look at the log output: - -```text -$ git log -1 -commit 37ceh170e4hb283bb73d958f2036ee5k07e7fde7 (HEAD -> issue-35, origin/main, main) -Author: John Doe -Date: Mon Aug 1 11:27:13 2020 -0400 - - this is a commit message - - Signed-off-by: John Doe -``` - -## Test your changes - -This project has a `Makefile` which includes many helpers running both unit and integration tests. You can run `make help` to see all the options. Although PRs will have automatic checks for these, it is useful to run them locally, ensuring they pass before submitting changes. Ensure you've bootstrapped once before running tests: - -```text -$ make bootstrap -``` - -You only need to bootstrap once. After the bootstrap process, you can run the tests as many times as needed: - -```text -$ make unit -$ make integration -``` - -You can also run `make all` to run a more extensive test suite, but there is additional configuration that will be needed for those tests to run correctly. We will not cover the extra steps here. - -## Pull Request - -If you made it this far and all the tests are passing, it's time to submit a Pull Request (PR) for Syft. Submitting a PR is always a scary moment as what happens next can be an unknown. The Syft project strives to be easy to work with, we appreciate all contributions. Nobody is going to yell at you or try to make you feel bad. We love contributions and know how scary that first PR can be. - -### PR Title and Description - -Just like the commit title and description mentioned above, the PR title and description is very important for letting others know what's happening. Please include any details you think a reviewer will need to more properly review your PR. - -A PR that is very large or poorly described has a higher likelihood of being pushed to the end of the list. Reviewers like PRs they can understand and quickly review. - -### What to expect next - -Please be patient with the project. We try to review PRs in a timely manner, but this is highly dependent on all the other tasks we have going on. It's OK to ask for a status update every week or two, it's not OK to ask for a status update every day. - -It's very likely the reviewer will have questions and suggestions for changes to your PR. If your changes don't match the current style and flow of the other code, expect a request to change what you've done. - -## Document your changes - -And lastly, when proposed changes are modifying user-facing functionality or output, it is expected the PR will include updates to the documentation as well. Syft is not a project that is heavy on documentation. This will mostly be updating the README and help for the tool. - -If nobody knows new features exist, they can't use them! +**Want to help improve the docs?** Check out the [anchore/oss-docs](https://github.com/anchore/oss-docs) repository. diff --git a/DEVELOPING.md b/DEVELOPING.md deleted file mode 100644 index 8fd5b57f3..000000000 --- a/DEVELOPING.md +++ /dev/null @@ -1,392 +0,0 @@ -# Developing - -## Getting started - -In order to test and develop in this repo you will need the following dependencies installed: -- Golang -- docker -- make -- Python (>= 3.9) - -### Docker settings for getting started -Make sure you've updated your docker settings so the default docker socket path is available. - -Go to: - -docker -> settings -> advanced - -Make sure: - -``` -Allow the default Docker socket to be used -``` - -is checked. - -Also double check that the docker context being used is the default context. If it is not, run: - -`docker context use default` - -After cloning, the following steps can help you get setup: -1. run `make bootstrap` to download go mod dependencies, create the `/.tmp` dir, and download helper utilities. -2. run `make` to view the selection of developer commands in the Makefile -3. run `make build` to build the release snapshot binaries and packages -4. for an even quicker start you can run `go run cmd/syft/main.go` to print the syft help. - - this command `go run cmd/syft/main.go alpine:latest` will compile and run syft against `alpine:latest` -5. view the README or syft help output for more output options - -The main make tasks for common static analysis and testing are `lint`, `format`, `lint-fix`, `unit`, `integration`, and `cli`. - -See `make help` for all the current make tasks. - -### Internal Artifactory Settings - -**Not always applicable** - -Some companies have Artifactory setup internally as a solution for sourcing secure dependencies. -If you're seeing an issue where the unit tests won't run because of the below error then this section might be relevant for your use case. - -``` -[ERROR] [ERROR] Some problems were encountered while processing the POMs -``` - -If you're dealing with an issue where the unit tests will not pull/build certain java fixtures check some of these settings: - -- a `settings.xml` file should be available to help you communicate with your internal artifactory deployment -- this can be moved to `syft/pkg/cataloger/java/test-fixtures/java-builds/example-jenkins-plugin/` to help build the unit test-fixtures -- you'll also want to modify the `build-example-jenkins-plugin.sh` to use `settings.xml` - -For more information on this setup and troubleshooting see [issue 1895](https://github.com/anchore/syft/issues/1895#issuecomment-1610085319) - - -## Architecture - -At a high level, this is the package structure of syft: -``` -./cmd/syft/ -│ ├── cli/ -│ │ ├── cli.go // where all commands are wired up -│ │ ├── commands/ // all command implementations -│ │ ├── options/ // all command flags and configuration options -│ │ └── ui/ // all handlers for events that are shown on the UI -│ └── main.go // entrypoint for the application -└── syft/ // the "core" syft library - ├── format/ // contains code to encode or decode to and from SBOM formats - ├── pkg/ // contains code to catalog packages from a source - ├── sbom/ // contains the definition of an SBOM - └── source/ // contains code to create a source object for some input type (e.g. container image, directory, etc) -``` - -Syft's core library is implemented in the `syft` package and subpackages, where the major packages are: - -- the `syft/source` package produces a `source.Source` object that can be used to catalog a directory, container, and other source types. -- the `syft` package contains a single function that can take a `source.Source` object and catalog it, producing an `sbom.SBOM` object -- the `syft/format` package contains the ability to encode and decode SBOMs to and from different SBOM formats (such as SPDX and CycloneDX) - -The `cmd` package at the highest level execution flow wires up [`spf13/cobra`](https://github.com/spf13/cobra) commands for execution in the main application: -```mermaid -sequenceDiagram - participant main as cmd/syft/main - participant cli as cli.New() - participant root as root.Execute() - participant cmd as .Execute() - - main->>+cli: - - Note right of cli: wire ALL CLI commands - Note right of cli: add flags for ALL commands - - cli-->>-main: root command - - main->>+root: - root->>+cmd: - cmd-->>-root: (error) - - root-->>-main: (error) - - Note right of cmd: Execute SINGLE command from USER -``` - -The `packages` command uses the core library to generate an SBOM for the given user input: -```mermaid -sequenceDiagram - participant source as source.New(ubuntu:latest) - participant sbom as sbom.SBOM - participant catalog as syft.CatalogPackages(src) - participant encoder as syft.Encode(sbom, format) - - Note right of source: use "ubuntu:latest" as SBOM input - - source-->>+sbom: add source to SBOM struct - source-->>+catalog: pass src to generate catalog - catalog-->-sbom: add cataloging results onto SBOM - sbom-->>encoder: pass SBOM and format desired to syft encoder - encoder-->>source: return bytes that are the SBOM of the original input - - Note right of catalog: cataloger configuration is done based on src -``` - -Additionally, here is a [gist of using syft as a library](https://gist.github.com/spiffcs/3027638b7ba904d07e482a712bc00d3d) to generate a SBOM for a docker image. - - -### `pkg.Package` object - -The `pkg.Package` object is a core data structure that represents a software package. Fields like `name` and `version` probably don't need -a detailed explanation, but some of the other fields are worth a quick overview: - -- `FoundBy`: the name of the cataloger that discovered this package (e.g. `python-pip-cataloger`). -- `Locations`: these are the set of paths and layer ids that were parsed to discover this package (e.g. `python-pip-cataloger`). -- `Language`: the language of the package (e.g. `python`). -- `Type`: this is a high-level categorization of the ecosystem the package resides in. For instance, even if the package is a egg, wheel, or requirements.txt reference, it is still logically a "python" package. Not all package types align with a language (e.g. `rpm`) but it is common. -- `Metadata`: specialized data for specific location(s) parsed. We should try and raise up as much raw information that seems useful. As a rule of thumb the object here should be as flat as possible and use the raw names and values from the underlying source material parsed. - -When `pkg.Package` is serialized an additional `MetadataType` is shown. This is a label that helps consumers understand the datashape of the `Metadata` field. - -By convention the `MetadataType` value should follow these rules of thumb: - -- Only use lowercase letters, numbers, and hyphens. Use hyphens to separate words. -- **Try to anchor the name in the ecosystem, language, or packaging tooling it belongs to**. For a package manager for a language ecosystem the language, framework or runtime should be used as a prefix. For instance `pubspec-lock` is an OK name, but `dart-pubspec-lock` is better. For an OS package manager this is not necessary (e.g. `apk-db-entry` is a good name, but `alpine-apk-db-entry` is not since `alpine` and the `a` in `apk` is redundant). -- **Be as specific as possible to what the data represents**. For instance `ruby-gem` is NOT a good `MetadataType` value, but `ruby-gemspec` is. Why? Ruby gem information can come from a gemspec file or a Gemfile.lock, which are very different. The latter name provides more context as to what to expect. -- **Should describe WHAT the data is, NOT HOW it's used**. For instance `r-description-installed-file` is NOT a good `MetadataType` value since it's trying to convey that we use the DESCRIPTION file in the R ecosystem to detect installed packages. Instead simply describe what the DESCRIPTION file is itself without context of how it's used: `r-description`. -- **Use the `lock` suffix** to distinct between manifest files that loosely describe package version requirements vs files that strongly specify one and only one version of a package ("lock" files). These should only be used with respect to package managers that have the guide and lock distinction, but would not be appropriate otherwise (e.g. `rpm` does not have a guide vs lock, so `lock` should NOT be used to describe a db entry). -- **Use the `archive` suffix to indicate a package archive** (e.g. rpm file, apk file, etc) that describes the contents of the package. For example an RPM file that was cataloged would have a `rpm-archive` metadata type (not to be confused with an RPM DB record entry which would be `rpm-db-entry`). -- **Use the `entry` suffix** to indicate information about a package that was found as a single entry within file that has multiple package entries. If the entry was found within a DB or a flat-file store for an OS package manager, you should use `db-entry`. -- **Should NOT contain the phrase `package`**, though exceptions are allowed (say if the canonical name literally has the phrase package in it). -- **Should NOT contain have a `file` suffix** unless the canonical name has the term "file", such as a `pipfile` or `gemfile`. An example of a bad name for this rule is`ruby-gemspec-file`; a better name would be `ruby-gemspec`. -- **Should NOT contain the exact filename+extensions**. For instance `pipfile.lock` shouldn't really be in the name, instead try and describe what the file is: `python-pipfile-lock` (but shouldn't this be `python-pip-lock` you might ask? No, since the `pip` package manger is not related to the `pipfile` project). -- **Should NOT contain the phrase `metadata`**, unless the canonical name has this term. -- **Should represent a single use case**. For example, trying to describe Hackage metadata with a single `HackageMetadata` struct (and thus `MetadataType`) is not allowed since it represents 3 mutually exclusive use cases: representing a `stack.yaml`, `stack.lock`, or `cabal.project` file. Instead, each of these should have their own struct types and `MetadataType` values. - -There are other cases that are not covered by these rules... and that's ok! The goal is to provide a consistent naming scheme that is easy to understand and use when it's applicable. If the rules do not exactly apply in your situation then just use your best judgement (or amend these rules as needed whe new common cases come up). - -What if the underlying parsed data represents multiple files? There are two approaches to this: -- use the primary file to represent all the data. For instance, though the `dpkg-cataloger` looks at multiple files to get all information about a package, it's the `status` file that gets represented. -- nest each individual file's data under the `Metadata` field. For instance, the `java-archive-cataloger` may find information from on or all of the files: `pom.xml`, `pom.properties`, and `MANIFEST.MF`. However, the metadata is simply `java-metadata' with each possibility as a nested optional field. - -### Syft Catalogers - -Catalogers are the way in which syft is able to identify and construct packages given a set a targeted list of files. -For example, a cataloger can ask syft for all `package-lock.json` files in order to parse and raise up javascript packages -(see [how file globs](https://github.com/anchore/syft/tree/v0.70.0/syft/pkg/cataloger/javascript/cataloger.go#L16-L21) and -[file parser functions](https://github.com/anchore/syft/tree/v0.70.0/syft/pkg/cataloger/javascript/cataloger.go#L16-L21) are used -for a quick example). - -From a high level catalogers have the following properties: - -- _They are independent from one another_. The java cataloger has no idea of the processes, assumptions, or results of the python cataloger, for example. - -- _They do not know what source is being analyzed_. Are we analyzing a local directory? an image? if so, the squashed representation or all layers? The catalogers do not know the answers to these questions. Only that there is an interface to query for file paths and contents from an underlying "source" being scanned. - -- _Packages created by the cataloger should not be mutated after they are created_. There is one exception made for adding CPEs to a package after the cataloging phase, but that will most likely be moved back into the cataloger in the future. - - -Cataloger names should be unique and named with the following rules of thumb in mind: - -- Must end with `-cataloger` -- Use lowercase letters, numbers, and hyphens only -- Use hyphens to separate words -- Catalogers for language ecosystems should start with the language name (e.g. `python-` for a cataloger that raises up python packages) -- Distinguish between when the cataloger is searching for evidence of installed packages vs declared packages. For example, there are currently two different gemspec-based catalogers, the `ruby-gemspec-cataloger` and `ruby-installed-gemspec-cataloger`, where the latter requires that the gemspec is found within a `specifications` directory (which means it was installed, not just at the root of a source repo). - -#### Building a new Cataloger - -Catalogers must fulfill the [`pkg.Cataloger` interface](https://github.com/anchore/syft/tree/v0.70.0/syft/pkg/cataloger.go) in order to add packages to the SBOM. -All catalogers should be added to: -- the [global list of catalogers](https://github.com/anchore/syft/blob/9995950c70e849f9921919faffbfcf46401f71f3/syft/pkg/cataloger/cataloger.go#L92-L125) -- at least one source-specific list, today the two lists are [directory catalogers and image catalogers](https://github.com/anchore/syft/blob/9995950c70e849f9921919faffbfcf46401f71f3/syft/pkg/cataloger/cataloger.go#L39-L89) - -For reference, catalogers are [invoked within syft](https://github.com/anchore/syft/tree/v0.70.0/syft/pkg/cataloger/catalog.go#L41-L100) one after the other, and can be invoked in parallel. - -`generic.NewCataloger` is an abstraction syft used to make writing common components easier (see the [apkdb cataloger](https://github.com/anchore/syft/tree/v0.70.0/syft/pkg/cataloger/apkdb/cataloger.go) for example usage). -It takes the following information as input: -- A `catalogerName` to identify the cataloger uniquely among all other catalogers. -- Pairs of file globs as well as parser functions to parse those files. These parser functions return a slice of [`pkg.Package`](https://github.com/anchore/syft/blob/9995950c70e849f9921919faffbfcf46401f71f3/syft/pkg/package.go#L19) as well as a slice of [`artifact.Relationship`](https://github.com/anchore/syft/blob/9995950c70e849f9921919faffbfcf46401f71f3/syft/artifact/relationship.go#L31) to describe how the returned packages are related. See this [the apkdb cataloger parser function](https://github.com/anchore/syft/tree/v0.70.0/syft/pkg/cataloger/apkdb/parse_apk_db.go#L22-L102) as an example. - -Identified packages share a common `pkg.Package` struct so be sure that when the new cataloger is constructing a new package it is using the [`Package` struct](https://github.com/anchore/syft/tree/v0.70.0/syft/pkg/package.go#L16-L31). -If you want to return more information than what is available on the `pkg.Package` struct then you can do so in the `pkg.Package.Metadata` section of the struct, which is unique for each [`pkg.Type`](https://github.com/anchore/syft/blob/v0.70.0/syft/pkg/type.go). -See [the `pkg` package](https://github.com/anchore/syft/tree/v0.70.0/syft/pkg) for examples of the different metadata types that are supported today. -These are plugged into the `MetadataType` and `Metadata` fields in the above struct. `MetadataType` informs which type is being used. `Metadata` is an interface converted to that type. - -Finally, here is an example of where the package construction is done within the apk cataloger: -- [Calling the APK package constructor from the parser function](https://github.com/anchore/syft/blob/v0.70.0/syft/pkg/cataloger/apkdb/parse_apk_db.go#L106) -- [The APK package constructor itself](https://github.com/anchore/syft/tree/v0.70.0/syft/pkg/cataloger/apkdb/package.go#L12-L27) - -Interested in building a new cataloger? Checkout the [list of issues with the `new-cataloger` label](https://github.com/anchore/syft/issues?q=is%3Aopen+is%3Aissue+label%3Anew-cataloger+no%3Aassignee)! -If you have questions about implementing a cataloger feel free to file an issue or reach out to us [on discourse](https://anchore.com/discourse)! - - -#### Searching for files - -All catalogers are provided an instance of the [`file.Resolver`](https://github.com/anchore/syft/blob/v0.70.0/syft/source/file_resolver.go#L8) to interface with the image and search for files. The implementations for these -abstractions leverage [`stereoscope`](https://github.com/anchore/stereoscope) in order to perform searching. Here is a -rough outline how that works: - -1. a stereoscope `file.Index` is searched based on the input given (a path, glob, or MIME type). The index is relatively fast to search, but requires results to be filtered down to the files that exist in the specific layer(s) of interest. This is done automatically by the `filetree.Searcher` abstraction. This abstraction will fallback to searching directly against the raw `filetree.FileTree` if the index does not contain the file(s) of interest. Note: the `filetree.Searcher` is used by the `file.Resolver` abstraction. -2. Once the set of files are returned from the `filetree.Searcher` the results are filtered down further to return the most unique file results. For example, you may have requested for files by a glob that returns multiple results. These results are filtered down to deduplicate by real files, so if a result contains two references to the same file, say one accessed via symlink and one accessed via the real path, then the real path reference is returned and the symlink reference is filtered out. If both were accessed by symlink then the first (by lexical order) is returned. This is done automatically by the `file.Resolver` abstraction. -3. By the time results reach the `pkg.Cataloger` you are guaranteed to have a set of unique files that exist in the layer(s) of interest (relative to what the resolver supports). - -## Testing - -### Testing commands - -* `make help` shows a list of available commands -* `make unit`, `make integration`, `make cli`, and `make acceptance` run those test suites (see below) -* `make test` runs all those tests (and is therefore pretty slow) -* `make fixtures` clears and re-fetches all test fixtures. -* `go test ./syft/pkg/` for example can test particular packages, assuming fixtures are already made -* `make clean-cache` cleans all test cache. Note that subsequent test runs will be slower after this - - -### Levels of testing - -- `unit`: The default level of test which is distributed throughout the repo are unit tests. Any `_test.go` file that - does not reside somewhere within the `/test` directory is a unit test. Other forms of testing should be organized in - the `/test` directory. These tests should focus on correctness of functionality in depth. % test coverage metrics - only considers unit tests and no other forms of testing. - -- `integration`: located within `cmd/syft/internal/test/integration`, these tests focus on the behavior surfaced by the common library - entrypoints from the `syft` package and make light assertions about the results surfaced. Additionally, these tests - tend to make diversity assertions for enum-like objects, ensuring that as enum values are added to a definition - that integration tests will automatically fail if no test attempts to use that enum value. For more details see - the "Data diversity and freshness assertions" section below. - -- `cli`: located with in `test/cli`, these are tests that test the correctness of application behavior from a - snapshot build. This should be used in cases where a unit or integration test will not do or if you are looking - for in-depth testing of code in the `cmd/` package (such as testing the proper behavior of application configuration, - CLI switches, and glue code before syft library calls). - -- `acceptance`: located within `test/compare` and `test/install`, these are smoke-like tests that ensure that application - packaging and installation works as expected. For example, during release we provide RPM packages as a download - artifact. We also have an accompanying RPM acceptance test that installs the RPM from a snapshot build and ensures the - output of a syft invocation matches canned expected output. New acceptance tests should be added for each release artifact - and architecture supported (when possible). - -### Data diversity and freshness assertions - -It is important that tests against the codebase are flexible enough to begin failing when they do not cover "enough" -of the objects under test. "Cover" in this case does not mean that some percentage of the code has been executed -during testing, but instead that there is enough diversity of data input reflected in testing relative to the -definitions available. - -For instance, consider an enum-like value like so: -```go -type Language string - -const ( - Java Language = "java" - JavaScript Language = "javascript" - Python Language = "python" - Ruby Language = "ruby" - Go Language = "go" -) -``` - -Say we have a test that exercises all the languages defined today: - -```go -func TestCatalogPackages(t *testing.T) { - testTable := []struct { - // ... the set of test cases that test all languages - } - for _, test := range cases { - t.Run(test.name, func (t *testing.T) { - // use inputFixturePath and assert that syft.CatalogPackages() returns the set of expected Package objects - // ... - }) - } -} -``` - -Where each test case has a `inputFixturePath` that would result with packages from each language. This test is -brittle since it does not assert that all languages were exercised directly and future modifications (such as -adding a new language) won't be covered by any test cases. - -To address this the enum-like object should have a definition of all objects that can be used in testing: - -```go -type Language string - -// const( Java Language = ..., ... ) - -var AllLanguages = []Language{ - Java, - JavaScript, - Python, - Ruby, - Go, - Rust, -} -``` - -Allowing testing to automatically fail when adding a new language: - -```go -func TestCatalogPackages(t *testing.T) { - testTable := []struct { - // ... the set of test cases that (hopefully) covers all languages - } - - // new stuff... - observedLanguages := strset.New() - - for _, test := range cases { - t.Run(test.name, func (t *testing.T) { - // use inputFixturePath and assert that syft.CatalogPackages() returns the set of expected Package objects - // ... - - // new stuff... - for _, actualPkg := range actual { - observedLanguages.Add(string(actualPkg.Language)) - } - - }) - } - - // new stuff... - for _, expectedLanguage := range pkg.AllLanguages { - if !observedLanguages.Contains(expectedLanguage) { - t.Errorf("failed to test language=%q", expectedLanguage) - } - } -} -``` - -This is a better test since it will fail when someone adds a new language but fails to write a test case that should -exercise that new language. This method is ideal for integration-level testing, where testing correctness in depth -is not needed (that is what unit tests are for) but instead testing in breadth to ensure that units are well integrated. - -A similar case can be made for data freshness; if the quality of the results will be diminished if the input data -is not kept up to date then a test should be written (when possible) to assert any input data is not stale. - -An example of this is the static list of licenses that is stored in `internal/spdxlicense` for use by the SPDX -presenters. This list is updated and published periodically by an external group and syft can grab and update this -list by running `go generate ./...` from the root of the repo. - -An integration test has been written to grabs the latest license list version externally and compares that version -with the version generated in the codebase. If they differ, the test fails, indicating to someone that there is an -action needed to update it. - -**_The key takeaway is to try and write tests that fail when data assumptions change and not just when code changes.**_ - -### Snapshot tests - -The format objects make a lot of use of "snapshot" testing, where you save the expected output bytes from a call into the -git repository and during testing make a comparison of the actual bytes from the subject under test with the golden -copy saved in the repo. The "golden" files are stored in the `test-fixtures/snapshot` directory relative to the go -package under test and should always be updated by invoking `go test` on the specific test file with a specific CLI -update flag provided. - -Many of the `Format` tests make use of this approach, where the raw SBOM report is saved in the repo and the test -compares that SBOM with what is generated from the latest presenter code. The following command can be used to -update the golden files for the various snapshot tests: - -```bash -make update-format-golden-files -``` - -These flags are defined at the top of the test files that have tests that use the snapshot files. - -Snapshot testing is only as good as the manual verification of the golden snapshot file saved to the repo! Be careful -and diligent when updating these files. - - diff --git a/README.md b/README.md index febec6934..b5e57d0ca 100644 --- a/README.md +++ b/README.md @@ -18,171 +18,74 @@ ![syft-demo](https://user-images.githubusercontent.com/590471/90277200-2a253000-de33-11ea-893f-32c219eea11a.gif) -## Introduction - -Syft is a powerful and easy-to-use open-source tool for generating Software Bill of Materials (SBOMs) for container images and filesystems. It provides detailed visibility into the packages and dependencies in your software, helping you manage vulnerabilities, license compliance, and software supply chain security. - -Syft development is sponsored by [Anchore](https://anchore.com/), and is released under the [Apache-2.0 License](https://github.com/anchore/syft?tab=Apache-2.0-1-ov-file). For commercial support options with Syft or Grype, please [contact Anchore](https://get.anchore.com/contact/). - ## Features -- Generates SBOMs for container images, filesystems, archives, and more to discover packages and libraries -- Supports OCI, Docker and [Singularity](https://github.com/sylabs/singularity) image formats -- Linux distribution identification -- Works seamlessly with [Grype](https://github.com/anchore/grype) (a fast, modern vulnerability scanner) -- Able to create signed SBOM attestations using the [in-toto specification](https://github.com/in-toto/attestation/blob/main/spec/README.md) -- Convert between SBOM formats, such as CycloneDX, SPDX, and Syft's own format. + +- Generates SBOMs for **container images**, **filesystems**, **archives** (see the docs for a full list of [supported scan targets](https://oss.anchore.com/docs/guides/sbom/scan-targets/)) +- Supports dozens of packaging ecosystems (e.g. Alpine (apk), Debian (dpkg), RPM, Go, Python, Java, JavaScript, Ruby, Rust, PHP, .NET, and [many more](https://oss.anchore.com/docs/capabilities/all-packages/)) +- Supports OCI, Docker, [Singularity](https://github.com/sylabs/singularity), and [more image formats](https://oss.anchore.com/docs/guides/sbom/scan-targets/) +- Works seamlessly with [Grype](https://github.com/anchore/grype) for vulnerability scanning +- Multiple output formats (**CycloneDX**, **SPDX**, **Syft JSON**, and [more](https://oss.anchore.com/docs/guides/sbom/formats/)) including the ability to [convert between SBOM formats](https://oss.anchore.com/docs/guides/sbom/conversion/) +- Create signed SBOM attestations using the [in-toto specification](https://github.com/in-toto/attestation/blob/main/spec/README.md) + +> [!TIP] +> **New to Syft? Check out the [Getting Started guide](https://oss.anchore.com/docs/guides/sbom/getting-started/) for a walkthrough!** ## Installation -Syft binaries are provided for Linux, macOS and Windows. - -### Recommended -> ```bash -> curl -sSfL https://get.anchore.io/syft | sudo sh -s -- -b /usr/local/bin -> ``` - -Install script options: -- `-b`: Specify a custom installation directory (defaults to `./bin`) -- `-d`: More verbose logging levels (`-d` for debug, `-dd` for trace) -- `-v`: Verify the signature of the downloaded artifact before installation (requires [`cosign`](https://github.com/sigstore/cosign) to be installed) - -### Homebrew +The quickest way to get up and going: ```bash -brew install syft +curl -sSfL https://get.anchore.io/syft | sudo sh -s -- -b /usr/local/bin ``` -### Scoop +> [!TIP] +> **See [Installation docs](https://oss.anchore.com/docs/installation/syft/) for more ways to get Syft, including Homebrew, Docker, Scoop, Chocolatey, Nix, and more!** -```powershell -scoop install syft -``` +## The basics -### Chocolatey - -The chocolatey distribution of Syft is community-maintained and not distributed by the Anchore team - -```powershell -choco install syft -y -``` - -### Nix - -**Note**: Nix packaging of Syft is [community maintained](https://github.com/NixOS/nixpkgs/blob/master/pkgs/by-name/sy/syft/package.nix). Syft is available in the [stable channel](https://wiki.nixos.org/wiki/Nix_channels#The_official_channels) since NixOS `22.05`. +See the packages within a container image or directory: ```bash -nix-env -i syft +# container image +syft alpine:latest + +# directory +syft ./my-project ``` -... or, just try it out in an ephemeral nix shell: +To get an SBOM, specify one or more output formats: ```bash -nix-shell -p syft +# SBOM to stdout +syft -o cyclonedx-json + +# Multiple SBOMs to files +syft -o spdx-json=./spdx.json -o cyclonedx-json=./cdx.json ``` -## Getting started -### SBOM +> [!TIP] +> **Check out the [Getting Started guide](https://oss.anchore.com/docs/guides/sbom/getting-started/)** to explore all of the capabilities and features. +> +> **Want to know all of the ins-and-outs of Syft?** Check out the [CLI docs](https://oss.anchore.com/docs/reference/syft/cli/), [configuration docs](https://oss.anchore.com/docs/reference/syft/configuration/), and [JSON schema](https://oss.anchore.com/docs/reference/syft/json/latest/). -To generate an SBOM for a container image: - -```bash -syft -``` - -The above output includes only software that is visible in the container (i.e., the squashed representation of the image). To include software from all image layers in the SBOM, regardless of its presence in the final image, provide `--scope all-layers`: - -```bash -syft --scope all-layers -``` - -### Output formats - -The output format for Syft is configurable as well using the `-o` (or `--output`) option: - -``` -syft -o -``` - -Where the `formats` available are: -- `syft-json`: Use this to get as much information out of Syft as possible! -- `syft-text`: A row-oriented, human-and-machine-friendly output. -- `cyclonedx-xml`: An XML report conforming to the [CycloneDX 1.6 specification](https://cyclonedx.org/specification/overview/). -- `cyclonedx-xml@1.5`: An XML report conforming to the [CycloneDX 1.5 specification](https://cyclonedx.org/specification/overview/). -- `cyclonedx-json`: A JSON report conforming to the [CycloneDX 1.6 specification](https://cyclonedx.org/specification/overview/). -- `cyclonedx-json@1.5`: A JSON report conforming to the [CycloneDX 1.5 specification](https://cyclonedx.org/specification/overview/). -- `spdx-tag-value`: A tag-value formatted report conforming to the [SPDX 2.3 specification](https://spdx.github.io/spdx-spec/v2.3/). -- `spdx-tag-value@2.2`: A tag-value formatted report conforming to the [SPDX 2.2 specification](https://spdx.github.io/spdx-spec/v2.2.2/). -- `spdx-json`: A JSON report conforming to the [SPDX 2.3 JSON Schema](https://github.com/spdx/spdx-spec/blob/v2.3/schemas/spdx-schema.json). -- `spdx-json@2.2`: A JSON report conforming to the [SPDX 2.2 JSON Schema](https://github.com/spdx/spdx-spec/blob/v2.2/schemas/spdx-schema.json). -- `github-json`: A JSON report conforming to GitHub's dependency snapshot format. -- `syft-table`: A columnar summary (default). -- `template`: Lets the user specify the output format. See ["Using templates"](https://github.com/anchore/syft/wiki/using-templates) below. - -Note that flags using the @ can be used for earlier versions of each specification as well. - -### Supported Ecosystems - -- Alpine (apk) -- Bitnami packages -- C (conan) -- C++ (conan) -- Dart (pubs) -- Debian (dpkg) -- Dotnet (deps.json) -- Objective-C (cocoapods) -- Elixir (mix) -- Erlang (rebar3) -- Go (go.mod, Go binaries) -- GitHub (workflows, actions) -- Haskell (cabal, stack) -- Java (jar, ear, war, par, sar, nar, rar, native-image) -- JavaScript (npm, yarn) -- Jenkins Plugins (jpi, hpi) -- Linux kernel archives (vmlinz) -- Linux kernel modules (ko) -- Nix (outputs in /nix/store) -- PHP (composer, PECL, Pear) -- Python (wheel, egg, poetry, requirements.txt, uv) -- Red Hat (rpm) -- Ruby (gem) -- Rust (cargo.lock, auditable binary) -- Swift (cocoapods, swift-package-manager) -- Wordpress plugins -- Terraform providers (.terraform.lock.hcl) - -## Documentation - -Our [wiki](https://github.com/anchore/syft/wiki) contains further details on the following topics: - -* [Supported Sources](https://github.com/anchore/syft/wiki/supported-sources) -* [File Selection](https://github.com/anchore/syft/wiki/file-selection) -* [Excluding file paths](https://github.com/anchore/syft/wiki/excluding-file-paths) -* [Output formats](https://github.com/anchore/syft/wiki/output-formats) -* [Package Cataloger Selection](https://github.com/anchore/syft/wiki/package-cataloger-selection) - * [Concepts](https://github.com/anchore/syft/wiki/package-cataloger-selection#concepts) - * [Examples](https://github.com/anchore/syft/wiki/package-cataloger-selection#examples) -* [Using templates](https://github.com/anchore/syft/wiki/using-templates) -* [Multiple outputs](https://github.com/anchore/syft/wiki/multiple-outputs) -* [Private Registry Authentication](https://github.com/anchore/syft/wiki/private-registry-authentication) - * [Local Docker Credentials](https://github.com/anchore/syft/wiki/private-registry-authentication#local-docker) - * [Docker Credentials in Kubernetes](https://github.com/anchore/syft/wiki/private-registry-authentication#docker-credentials-in-kubernetes) -* [Attestation (experimental)](https://github.com/anchore/syft/wiki/attestation) - * [Keyless Support](https://github.com/anchore/syft/wiki/attestation#keyless-support) - * [Local private key support](https://github.com/anchore/syft/wiki/attestation#local-private-key-support) - * [Adding an SBOM to an image as an attestation using Syft](https://github.com/anchore/syft/wiki/attestation#adding-an-sbom-to-an-image-as-an-attestation-using-syft) -* [Configuration](https://github.com/anchore/syft/wiki/configuration) ## Contributing -Check out our [contributing](/CONTRIBUTING.md) guide and [developer](/DEVELOPING.md) docs. +We encourage users to help make these tools better by [submitting issues](https://github.com/anchore/syft/issues) when you find a bug or want a new feature. +Check out our [contributing overview](https://oss.anchore.com/docs/contributing/) and [developer-specific documentation](https://oss.anchore.com/docs/contributing/syft/) if you are interested in providing code contributions. -## Syft Team Meetings -The Syft Team hold regular community meetings online. All are welcome to join to bring topics for discussion. + +

+ Syft development is sponsored by Anchore, and is released under the Apache-2.0 License. + The Syft logo by Anchore is licensed under CC BY 4.0 +

+ +For commercial support options with Syft or Grype, please [contact Anchore](https://get.anchore.com/contact/). + +## Come talk to us! + +The Syft Team holds regular community meetings online. All are welcome to join to bring topics for discussion. - Check the [calendar](https://calendar.google.com/calendar/u/0/r?cid=Y182OTM4dGt0MjRtajI0NnNzOThiaGtnM29qNEBncm91cC5jYWxlbmRhci5nb29nbGUuY29t) for the next meeting date. - Add items to the [agenda](https://docs.google.com/document/d/1ZtSAa6fj2a6KRWviTn3WoJm09edvrNUp4Iz_dOjjyY8/edit?usp=sharing) (join [this group](https://groups.google.com/g/anchore-oss-community) for write access to the [agenda](https://docs.google.com/document/d/1ZtSAa6fj2a6KRWviTn3WoJm09edvrNUp4Iz_dOjjyY8/edit?usp=sharing)) - See you there! - -## Syft Logo - -

Syft Logo by Anchore is licensed under CC BY 4.0

diff --git a/SECURITY.md b/SECURITY.md index 9163ece3c..64bf53551 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -2,31 +2,15 @@ ## Supported Versions - - Security updates are applied only to the most recent release, try to always be up to date. ## Reporting a Vulnerability - - To report a security issue, please email [security@anchore.com](mailto:security@anchore.com) with a description of the issue, the steps you took to create the issue, affected versions, and, if known, mitigations for the issue. All support will be made on a best effort basis, so please indicate the "urgency level" of the vulnerability as Critical, High, Medium or Low. + +For more details, see our [security policy documentation](https://oss.anchore.com/docs/contributing/security/). diff --git a/cmd/syft/internal/options/dotnet.go b/cmd/syft/internal/options/dotnet.go index b941a8bba..9daa38462 100644 --- a/cmd/syft/internal/options/dotnet.go +++ b/cmd/syft/internal/options/dotnet.go @@ -13,6 +13,8 @@ type dotnetConfig struct { PropagateDLLClaimsToParents bool `mapstructure:"propagate-dll-claims-to-parents" json:"propagate-dll-claims-to-parents" yaml:"propagate-dll-claims-to-parents"` RelaxDLLClaimsWhenBundlingDetected bool `mapstructure:"relax-dll-claims-when-bundling-detected" json:"relax-dll-claims-when-bundling-detected" yaml:"relax-dll-claims-when-bundling-detected"` + + ExcludeProjectReferences bool `mapstructure:"exclude-project-references" json:"exclude-project-references" yaml:"exclude-project-references"` } var _ interface { @@ -24,6 +26,7 @@ func (o *dotnetConfig) DescribeFields(descriptions clio.FieldDescriptionSet) { descriptions.Add(&o.DepPackagesMustClaimDLL, `only keep dep.json packages which have a runtime/resource DLL claimed in the deps.json targets section (but not necessarily found on disk). The package is also included if any child package claims a DLL, even if the package itself does not claim a DLL.`) descriptions.Add(&o.PropagateDLLClaimsToParents, `treat DLL claims or on-disk evidence for child packages as DLL claims or on-disk evidence for any parent package`) descriptions.Add(&o.RelaxDLLClaimsWhenBundlingDetected, `show all packages from the deps.json if bundling tooling is present as a dependency (e.g. ILRepack)`) + descriptions.Add(&o.ExcludeProjectReferences, `exclude packages with type "project" from deps.json output (these are internal project references, not NuGet packages)`) } func defaultDotnetConfig() dotnetConfig { @@ -33,5 +36,6 @@ func defaultDotnetConfig() dotnetConfig { DepPackagesMustClaimDLL: def.DepPackagesMustClaimDLL, PropagateDLLClaimsToParents: def.PropagateDLLClaimsToParents, RelaxDLLClaimsWhenBundlingDetected: def.RelaxDLLClaimsWhenBundlingDetected, + ExcludeProjectReferences: def.ExcludeProjectReferences, } } diff --git a/cmd/syft/internal/test/integration/catalog_packages_cases_test.go b/cmd/syft/internal/test/integration/catalog_packages_cases_test.go index 3014be2a0..2ed19dee7 100644 --- a/cmd/syft/internal/test/integration/catalog_packages_cases_test.go +++ b/cmd/syft/internal/test/integration/catalog_packages_cases_test.go @@ -97,7 +97,7 @@ var imageOnlyTestCases = []testCase{ "Serilog.Sinks.Console": "4.0.1", //"System.Diagnostics.DiagnosticSource": "6.0.0", // no dll claims in deps.json targets section //"System.Runtime.CompilerServices.Unsafe": "6.0.0", // no dll claims in deps.json targets section - "TestCommon": "1.0.0", + //"TestCommon": "1.0.0", "TestLibrary": "1.0.0", }, }, @@ -274,7 +274,7 @@ var dirOnlyTestCases = []testCase{ "Serilog.Sinks.Console": "4.0.1", //"System.Diagnostics.DiagnosticSource": "6.0.0", // no dll claims in deps.json targets section //"System.Runtime.CompilerServices.Unsafe": "6.0.0", // no dll claims in deps.json targets section - "TestCommon": "1.0.0", + //"TestCommon": "1.0.0", "TestLibrary": "1.0.0", }, }, diff --git a/go.mod b/go.mod index 9ff604092..052f70ac8 100644 --- a/go.mod +++ b/go.mod @@ -163,7 +163,7 @@ require ( github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-restruct/restruct v1.2.0-alpha // indirect - github.com/goccy/go-yaml v1.19.0 + github.com/goccy/go-yaml v1.19.1 github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e // indirect diff --git a/go.sum b/go.sum index 6607f720c..06572102e 100644 --- a/go.sum +++ b/go.sum @@ -431,8 +431,8 @@ github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlnd github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= -github.com/goccy/go-yaml v1.19.0 h1:EmkZ9RIsX+Uq4DYFowegAuJo8+xdX3T/2dwNPXbxEYE= -github.com/goccy/go-yaml v1.19.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= +github.com/goccy/go-yaml v1.19.1 h1:3rG3+v8pkhRqoQ/88NYNMHYVGYztCOCIZ7UQhu7H+NE= +github.com/goccy/go-yaml v1.19.1/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= diff --git a/internal/constants.go b/internal/constants.go index f9be7bcd0..482b5be96 100644 --- a/internal/constants.go +++ b/internal/constants.go @@ -3,11 +3,12 @@ package internal const ( // JSONSchemaVersion is the current schema version output by the JSON encoder // This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment. - JSONSchemaVersion = "16.1.2" + JSONSchemaVersion = "16.1.3" // Changelog // 16.1.0 - reformulated the python pdm fields (added "URL" and removed the unused "path" field). // 16.1.1 - correct elf package osCpe field according to the document of systemd (also add appCpe field) - // 16.1.2 - add file executable toolchain and symbol information + // 16.1.2 - add DotnetDepsEntry.type package metadata field + // 16.1.3 - add file executable toolchain and symbol information ) diff --git a/schema/json/schema-16.1.2.json b/schema/json/schema-16.1.2.json index 9e83026ee..ed2ebee78 100644 --- a/schema/json/schema-16.1.2.json +++ b/schema/json/schema-16.1.2.json @@ -853,6 +853,10 @@ "type": "string", "description": "HashPath is the relative path to the .nupkg.sha512 hash file (e.g. \"app.metrics.3.0.0.nupkg.sha512\")" }, + "type": { + "type": "string", + "description": "Type is type of entry could be package or project for internal refs" + }, "executables": { "patternProperties": { ".*": { @@ -1283,20 +1287,6 @@ "elfSecurityFeatures": { "$ref": "#/$defs/ELFSecurityFeatures", "description": "ELFSecurityFeatures contains ELF-specific security hardening information when Format is ELF." - }, - "symbolNames": { - "items": { - "type": "string" - }, - "type": "array", - "description": "Symbols captures the selection from the symbol table found in the binary." - }, - "toolchains": { - "items": { - "$ref": "#/$defs/Toolchain" - }, - "type": "array", - "description": "Toolchains captures information about the compiler, linker, runtime, or other toolchains used to build (or otherwise exist within) the executable." } }, "type": "object", @@ -4239,27 +4229,6 @@ ], "description": "TerraformLockProviderEntry represents a single provider entry in a Terraform dependency lock file (.terraform.lock.hcl)." }, - "Toolchain": { - "properties": { - "name": { - "type": "string", - "description": "Name is the name of the toolchain (e.g., \"gcc\", \"clang\", \"ld\", etc.)." - }, - "version": { - "type": "string", - "description": "Version is the version of the toolchain." - }, - "kind": { - "type": "string", - "description": "Kind indicates the type of toolchain (e.g., compiler, linker, runtime)." - } - }, - "type": "object", - "required": [ - "name", - "kind" - ] - }, "WordpressPluginEntry": { "properties": { "pluginInstallDirectory": { diff --git a/schema/json/schema-16.1.3.json b/schema/json/schema-16.1.3.json new file mode 100644 index 000000000..be086ef18 --- /dev/null +++ b/schema/json/schema-16.1.3.json @@ -0,0 +1,4301 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "anchore.io/schema/syft/json/16.1.3/document", + "$ref": "#/$defs/Document", + "$defs": { + "AlpmDbEntry": { + "properties": { + "basepackage": { + "type": "string", + "description": "BasePackage is the base package name this package was built from (source package in Arch build system)" + }, + "package": { + "type": "string", + "description": "Package is the package name as found in the desc file" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the desc file" + }, + "description": { + "type": "string", + "description": "Description is a human-readable package description" + }, + "architecture": { + "type": "string", + "description": "Architecture is the target CPU architecture as defined in Arch architecture spec (e.g. x86_64, aarch64, or \"any\" for arch-independent packages)" + }, + "size": { + "type": "integer", + "description": "Size is the installed size in bytes" + }, + "packager": { + "type": "string", + "description": "Packager is the name and email of the person who packaged this (RFC822 format)" + }, + "url": { + "type": "string", + "description": "URL is the upstream project URL" + }, + "validation": { + "type": "string", + "description": "Validation is the validation method used for package integrity (e.g. pgp signature, sha256 checksum)" + }, + "reason": { + "type": "integer", + "description": "Reason is the installation reason tracked by pacman (0=explicitly installed by user, 1=installed as dependency)" + }, + "files": { + "items": { + "$ref": "#/$defs/AlpmFileRecord" + }, + "type": "array", + "description": "Files are the files installed by this package" + }, + "backup": { + "items": { + "$ref": "#/$defs/AlpmFileRecord" + }, + "type": "array", + "description": "Backup is the list of configuration files that pacman backs up before upgrades" + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Provides are virtual packages provided by this package (allows other packages to depend on capabilities rather than specific packages)" + }, + "depends": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Depends are the runtime dependencies required by this package" + } + }, + "type": "object", + "required": [ + "basepackage", + "package", + "version", + "description", + "architecture", + "size", + "packager", + "url", + "validation", + "reason", + "files", + "backup" + ], + "description": "AlpmDBEntry is a struct that represents the package data stored in the pacman flat-file stores for arch linux." + }, + "AlpmFileRecord": { + "properties": { + "path": { + "type": "string", + "description": "Path is the file path relative to the filesystem root" + }, + "type": { + "type": "string", + "description": "Type is the file type (e.g. regular file, directory, symlink)" + }, + "uid": { + "type": "string", + "description": "UID is the file owner user ID as recorded by pacman" + }, + "gid": { + "type": "string", + "description": "GID is the file owner group ID as recorded by pacman" + }, + "time": { + "type": "string", + "format": "date-time", + "description": "Time is the file modification timestamp" + }, + "size": { + "type": "string", + "description": "Size is the file size in bytes" + }, + "link": { + "type": "string", + "description": "Link is the symlink target path if this is a symlink" + }, + "digest": { + "items": { + "$ref": "#/$defs/Digest" + }, + "type": "array", + "description": "Digests contains file content hashes for integrity verification" + } + }, + "type": "object", + "description": "AlpmFileRecord represents a single file entry within an Arch Linux package with its associated metadata tracked by pacman." + }, + "ApkDbEntry": { + "properties": { + "package": { + "type": "string", + "description": "Package is the package name as found in the installed file" + }, + "originPackage": { + "type": "string", + "description": "OriginPackage is the original source package name this binary was built from (used to track which aport/source built this)" + }, + "maintainer": { + "type": "string", + "description": "Maintainer is the package maintainer name and email" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the installed file" + }, + "architecture": { + "type": "string", + "description": "Architecture is the target CPU architecture" + }, + "url": { + "type": "string", + "description": "URL is the upstream project URL" + }, + "description": { + "type": "string", + "description": "Description is a human-readable package description" + }, + "size": { + "type": "integer", + "description": "Size is the package archive size in bytes (.apk file size)" + }, + "installedSize": { + "type": "integer", + "description": "InstalledSize is the total size of installed files in bytes" + }, + "pullDependencies": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Dependencies are the runtime dependencies required by this package" + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Provides are virtual packages provided by this package (for capability-based dependencies)" + }, + "pullChecksum": { + "type": "string", + "description": "Checksum is the package content checksum for integrity verification" + }, + "gitCommitOfApkPort": { + "type": "string", + "description": "GitCommit is the git commit hash of the APK port definition in Alpine's aports repository" + }, + "files": { + "items": { + "$ref": "#/$defs/ApkFileRecord" + }, + "type": "array", + "description": "Files are the files installed by this package" + } + }, + "type": "object", + "required": [ + "package", + "originPackage", + "maintainer", + "version", + "architecture", + "url", + "description", + "size", + "installedSize", + "pullDependencies", + "provides", + "pullChecksum", + "gitCommitOfApkPort", + "files" + ], + "description": "ApkDBEntry represents all captured data for the alpine linux package manager flat-file store." + }, + "ApkFileRecord": { + "properties": { + "path": { + "type": "string", + "description": "Path is the file path relative to the filesystem root" + }, + "ownerUid": { + "type": "string", + "description": "OwnerUID is the file owner user ID" + }, + "ownerGid": { + "type": "string", + "description": "OwnerGID is the file owner group ID" + }, + "permissions": { + "type": "string", + "description": "Permissions is the file permission mode string (e.g. \"0755\", \"0644\")" + }, + "digest": { + "$ref": "#/$defs/Digest", + "description": "Digest is the file content hash for integrity verification" + } + }, + "type": "object", + "required": [ + "path" + ], + "description": "ApkFileRecord represents a single file listing and metadata from a APK DB entry (which may have many of these file records)." + }, + "BinarySignature": { + "properties": { + "matches": { + "items": { + "$ref": "#/$defs/ClassifierMatch" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "matches" + ], + "description": "BinarySignature represents a set of matched values within a binary file." + }, + "BitnamiSbomEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name as found in the Bitnami SPDX file" + }, + "arch": { + "type": "string", + "description": "Architecture is the target CPU architecture (amd64 or arm64 in Bitnami images)" + }, + "distro": { + "type": "string", + "description": "Distro is the distribution name this package is for (base OS like debian, ubuntu, etc.)" + }, + "revision": { + "type": "string", + "description": "Revision is the Bitnami-specific package revision number (incremented for Bitnami rebuilds of same upstream version)" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the Bitnami SPDX file" + }, + "path": { + "type": "string", + "description": "Path is the installation path in the filesystem where the package is located" + }, + "files": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Files are the file paths owned by this package (tracked via SPDX relationships)" + } + }, + "type": "object", + "required": [ + "name", + "arch", + "distro", + "revision", + "version", + "path", + "files" + ], + "description": "BitnamiSBOMEntry represents all captured data from Bitnami packages described in Bitnami' SPDX files." + }, + "CConanFileEntry": { + "properties": { + "ref": { + "type": "string", + "description": "Ref is the package reference string in format name/version@user/channel" + } + }, + "type": "object", + "required": [ + "ref" + ], + "description": "ConanfileEntry represents a single \"Requires\" entry from a conanfile.txt." + }, + "CConanInfoEntry": { + "properties": { + "ref": { + "type": "string", + "description": "Ref is the package reference string in format name/version@user/channel" + }, + "package_id": { + "type": "string", + "description": "PackageID is a unique package variant identifier" + } + }, + "type": "object", + "required": [ + "ref" + ], + "description": "ConaninfoEntry represents a single \"full_requires\" entry from a conaninfo.txt." + }, + "CConanLockEntry": { + "properties": { + "ref": { + "type": "string", + "description": "Ref is the package reference string in format name/version@user/channel" + }, + "package_id": { + "type": "string", + "description": "PackageID is a unique package variant identifier computed from settings/options (static hash in Conan 1.x, can have collisions with complex dependency graphs)" + }, + "prev": { + "type": "string", + "description": "Prev is the previous lock entry reference for versioning" + }, + "requires": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Requires are the runtime package dependencies" + }, + "build_requires": { + "items": { + "type": "string" + }, + "type": "array", + "description": "BuildRequires are the build-time dependencies (e.g. cmake, compilers)" + }, + "py_requires": { + "items": { + "type": "string" + }, + "type": "array", + "description": "PythonRequires are the Python dependencies needed for Conan recipes" + }, + "options": { + "$ref": "#/$defs/KeyValues", + "description": "Options are package configuration options as key-value pairs (e.g. shared=True, fPIC=True)" + }, + "path": { + "type": "string", + "description": "Path is the filesystem path to the package in Conan cache" + }, + "context": { + "type": "string", + "description": "Context is the build context information" + } + }, + "type": "object", + "required": [ + "ref" + ], + "description": "ConanV1LockEntry represents a single \"node\" entry from a conan.lock V1 file." + }, + "CConanLockV2Entry": { + "properties": { + "ref": { + "type": "string", + "description": "Ref is the package reference string in format name/version@user/channel" + }, + "packageID": { + "type": "string", + "description": "PackageID is a unique package variant identifier (dynamic in Conan 2.0, more accurate than V1)" + }, + "username": { + "type": "string", + "description": "Username is the Conan user/organization name" + }, + "channel": { + "type": "string", + "description": "Channel is the Conan channel name indicating stability/purpose (e.g. stable, testing, experimental)" + }, + "recipeRevision": { + "type": "string", + "description": "RecipeRevision is a git-like revision hash (RREV) of the recipe" + }, + "packageRevision": { + "type": "string", + "description": "PackageRevision is a git-like revision hash of the built binary package" + }, + "timestamp": { + "type": "string", + "description": "TimeStamp is when this package was built/locked" + } + }, + "type": "object", + "required": [ + "ref" + ], + "description": "ConanV2LockEntry represents a single \"node\" entry from a conan.lock V2 file." + }, + "CPE": { + "properties": { + "cpe": { + "type": "string", + "description": "Value is the CPE string identifier." + }, + "source": { + "type": "string", + "description": "Source is the source where this CPE was obtained or generated from." + } + }, + "type": "object", + "required": [ + "cpe" + ], + "description": "CPE represents a Common Platform Enumeration identifier used for matching packages to known vulnerabilities in security databases." + }, + "ClassifierMatch": { + "properties": { + "classifier": { + "type": "string" + }, + "location": { + "$ref": "#/$defs/Location" + } + }, + "type": "object", + "required": [ + "classifier", + "location" + ], + "description": "ClassifierMatch represents a single matched value within a binary file and the \"class\" name the search pattern represents." + }, + "CocoaPodfileLockEntry": { + "properties": { + "checksum": { + "type": "string", + "description": "Checksum is the SHA-1 hash of the podspec file for integrity verification (generated via `pod ipc spec ... | openssl sha1`), ensuring all team members use the same pod specification version" + } + }, + "type": "object", + "required": [ + "checksum" + ], + "description": "CocoaPodfileLockEntry represents a single entry from the \"Pods\" section of a Podfile.lock file." + }, + "CondaLink": { + "properties": { + "source": { + "type": "string", + "description": "Source is the original path where the package was extracted from cache." + }, + "type": { + "type": "integer", + "description": "Type indicates the link type (1 for hard link, 2 for soft link, 3 for copy)." + } + }, + "type": "object", + "required": [ + "source", + "type" + ], + "description": "CondaLink represents link metadata from a Conda package's link.json file describing package installation source." + }, + "CondaMetadataEntry": { + "properties": { + "arch": { + "type": "string", + "description": "Arch is the target CPU architecture for the package (e.g., \"arm64\", \"x86_64\")." + }, + "name": { + "type": "string", + "description": "Name is the package name as found in the conda-meta JSON file." + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the conda-meta JSON file." + }, + "build": { + "type": "string", + "description": "Build is the build string identifier (e.g., \"h90dfc92_1014\")." + }, + "build_number": { + "type": "integer", + "description": "BuildNumber is the sequential build number for this version." + }, + "channel": { + "type": "string", + "description": "Channel is the Conda channel URL where the package was retrieved from." + }, + "subdir": { + "type": "string", + "description": "Subdir is the subdirectory within the channel (e.g., \"osx-arm64\", \"linux-64\")." + }, + "noarch": { + "type": "string", + "description": "Noarch indicates if the package is platform-independent (e.g., \"python\", \"generic\")." + }, + "license": { + "type": "string", + "description": "License is the package license identifier." + }, + "license_family": { + "type": "string", + "description": "LicenseFamily is the general license category (e.g., \"MIT\", \"Apache\", \"GPL\")." + }, + "md5": { + "type": "string", + "description": "MD5 is the MD5 hash of the package archive." + }, + "sha256": { + "type": "string", + "description": "SHA256 is the SHA-256 hash of the package archive." + }, + "size": { + "type": "integer", + "description": "Size is the package archive size in bytes." + }, + "timestamp": { + "type": "integer", + "description": "Timestamp is the Unix timestamp when the package was built." + }, + "fn": { + "type": "string", + "description": "Filename is the original package archive filename (e.g., \"zlib-1.2.11-h90dfc92_1014.tar.bz2\")." + }, + "url": { + "type": "string", + "description": "URL is the full download URL for the package archive." + }, + "extracted_package_dir": { + "type": "string", + "description": "ExtractedPackageDir is the local cache directory where the package was extracted." + }, + "depends": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Depends is the list of runtime dependencies with version constraints." + }, + "files": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Files is the list of files installed by this package." + }, + "paths_data": { + "$ref": "#/$defs/CondaPathsData", + "description": "PathsData contains detailed file metadata from the paths.json file." + }, + "link": { + "$ref": "#/$defs/CondaLink", + "description": "Link contains installation source metadata from the link.json file." + } + }, + "type": "object", + "required": [ + "name", + "version", + "build", + "build_number" + ], + "description": "CondaMetaPackage represents metadata for a Conda package extracted from the conda-meta/*.json files." + }, + "CondaPathData": { + "properties": { + "_path": { + "type": "string", + "description": "Path is the file path relative to the Conda environment root." + }, + "path_type": { + "type": "string", + "description": "PathType indicates the link type for the file (e.g., \"hardlink\", \"softlink\", \"directory\")." + }, + "sha256": { + "type": "string", + "description": "SHA256 is the SHA-256 hash of the file contents." + }, + "sha256_in_prefix": { + "type": "string", + "description": "SHA256InPrefix is the SHA-256 hash of the file after prefix replacement during installation." + }, + "size_in_bytes": { + "type": "integer", + "description": "SizeInBytes is the file size in bytes." + } + }, + "type": "object", + "required": [ + "_path", + "path_type", + "sha256", + "sha256_in_prefix", + "size_in_bytes" + ], + "description": "CondaPathData represents metadata for a single file within a Conda package from the paths.json file." + }, + "CondaPathsData": { + "properties": { + "paths_version": { + "type": "integer", + "description": "PathsVersion is the schema version of the paths data format." + }, + "paths": { + "items": { + "$ref": "#/$defs/CondaPathData" + }, + "type": "array", + "description": "Paths is the list of file metadata entries for all files in the package." + } + }, + "type": "object", + "required": [ + "paths_version", + "paths" + ], + "description": "CondaPathsData represents the paths.json file structure from a Conda package containing file metadata." + }, + "Coordinates": { + "properties": { + "path": { + "type": "string", + "description": "RealPath is the canonical absolute form of the path accessed (all symbolic links have been followed and relative path components like '.' and '..' have been removed)." + }, + "layerID": { + "type": "string", + "description": "FileSystemID is an ID representing and entire filesystem. For container images, this is a layer digest. For directories or a root filesystem, this is blank." + } + }, + "type": "object", + "required": [ + "path" + ], + "description": "Coordinates contains the minimal information needed to describe how to find a file within any possible source object (e.g." + }, + "DartPubspec": { + "properties": { + "homepage": { + "type": "string", + "description": "Homepage is the package homepage URL" + }, + "repository": { + "type": "string", + "description": "Repository is the source code repository URL" + }, + "documentation": { + "type": "string", + "description": "Documentation is the documentation site URL" + }, + "publish_to": { + "type": "string", + "description": "PublishTo is the package repository to publish to, or \"none\" to prevent accidental publishing" + }, + "environment": { + "$ref": "#/$defs/DartPubspecEnvironment", + "description": "Environment is SDK version constraints for Dart and Flutter" + }, + "platforms": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Platforms are the supported platforms (Android, iOS, web, etc.)" + }, + "ignored_advisories": { + "items": { + "type": "string" + }, + "type": "array", + "description": "IgnoredAdvisories are the security advisories to explicitly ignore for this package" + } + }, + "type": "object", + "description": "DartPubspec is a struct that represents a package described in a pubspec.yaml file" + }, + "DartPubspecEnvironment": { + "properties": { + "sdk": { + "type": "string", + "description": "SDK is the Dart SDK version constraint (e.g. \"\u003e=2.12.0 \u003c3.0.0\")" + }, + "flutter": { + "type": "string", + "description": "Flutter is the Flutter SDK version constraint if this is a Flutter package" + } + }, + "type": "object", + "description": "DartPubspecEnvironment represents SDK version constraints from the environment section of pubspec.yaml." + }, + "DartPubspecLockEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name as found in the pubspec.lock file" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the pubspec.lock file" + }, + "hosted_url": { + "type": "string", + "description": "HostedURL is the URL of the package repository for hosted packages (typically pub.dev, but can be custom repository identified by hosted-url). When PUB_HOSTED_URL environment variable changes, lockfile tracks the source." + }, + "vcs_url": { + "type": "string", + "description": "VcsURL is the URL of the VCS repository for git/path dependencies (for packages fetched from version control systems like Git)" + } + }, + "type": "object", + "required": [ + "name", + "version" + ], + "description": "DartPubspecLockEntry is a struct that represents a single entry found in the \"packages\" section in a Dart pubspec.lock file." + }, + "Descriptor": { + "properties": { + "name": { + "type": "string", + "description": "Name is the name of the tool that generated this SBOM (e.g., \"syft\")." + }, + "version": { + "type": "string", + "description": "Version is the version of the tool that generated this SBOM." + }, + "configuration": { + "description": "Configuration contains the tool configuration used during SBOM generation." + } + }, + "type": "object", + "required": [ + "name", + "version" + ], + "description": "Descriptor identifies the tool that generated this SBOM document, including its name, version, and configuration used during catalog generation." + }, + "Digest": { + "properties": { + "algorithm": { + "type": "string", + "description": "Algorithm specifies the hash algorithm used (e.g., \"sha256\", \"md5\")." + }, + "value": { + "type": "string", + "description": "Value is the hexadecimal string representation of the hash." + } + }, + "type": "object", + "required": [ + "algorithm", + "value" + ], + "description": "Digest represents a cryptographic hash of file contents." + }, + "Document": { + "properties": { + "artifacts": { + "items": { + "$ref": "#/$defs/Package" + }, + "type": "array" + }, + "artifactRelationships": { + "items": { + "$ref": "#/$defs/Relationship" + }, + "type": "array" + }, + "files": { + "items": { + "$ref": "#/$defs/File" + }, + "type": "array" + }, + "source": { + "$ref": "#/$defs/Source" + }, + "distro": { + "$ref": "#/$defs/LinuxRelease" + }, + "descriptor": { + "$ref": "#/$defs/Descriptor" + }, + "schema": { + "$ref": "#/$defs/Schema" + } + }, + "type": "object", + "required": [ + "artifacts", + "artifactRelationships", + "source", + "distro", + "descriptor", + "schema" + ], + "description": "Document represents the syft cataloging findings as a JSON document" + }, + "DotnetDepsEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name as found in the deps.json file" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the deps.json file" + }, + "path": { + "type": "string", + "description": "Path is the relative path to the package within the deps structure (e.g. \"app.metrics/3.0.0\")" + }, + "sha512": { + "type": "string", + "description": "Sha512 is the SHA-512 hash of the NuGet package content WITHOUT the signed content for verification (won't match hash from NuGet API or manual calculation of .nupkg file)" + }, + "hashPath": { + "type": "string", + "description": "HashPath is the relative path to the .nupkg.sha512 hash file (e.g. \"app.metrics.3.0.0.nupkg.sha512\")" + }, + "type": { + "type": "string", + "description": "Type is type of entry could be package or project for internal refs" + }, + "executables": { + "patternProperties": { + ".*": { + "$ref": "#/$defs/DotnetPortableExecutableEntry" + } + }, + "type": "object", + "description": "Executables are the map of .NET Portable Executable files within this package with their version resources" + } + }, + "type": "object", + "required": [ + "name", + "version", + "path", + "sha512", + "hashPath" + ], + "description": "DotnetDepsEntry is a struct that represents a single entry found in the \"libraries\" section in a .NET [*.]deps.json file." + }, + "DotnetPackagesLockEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name as found in the packages.lock.json file" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the packages.lock.json file" + }, + "contentHash": { + "type": "string", + "description": "ContentHash is the hash of the package content for verification" + }, + "type": { + "type": "string", + "description": "Type is the dependency type indicating how this dependency was added (Direct=explicit in project file, Transitive=pulled in by another package, Project=project reference)" + } + }, + "type": "object", + "required": [ + "name", + "version", + "contentHash", + "type" + ], + "description": "DotnetPackagesLockEntry is a struct that represents a single entry found in the \"dependencies\" section in a .NET packages.lock.json file." + }, + "DotnetPortableExecutableEntry": { + "properties": { + "assemblyVersion": { + "type": "string", + "description": "AssemblyVersion is the .NET assembly version number (strong-named version)" + }, + "legalCopyright": { + "type": "string", + "description": "LegalCopyright is the copyright notice string" + }, + "comments": { + "type": "string", + "description": "Comments are additional comments or description embedded in PE resources" + }, + "internalName": { + "type": "string", + "description": "InternalName is the internal name of the file" + }, + "companyName": { + "type": "string", + "description": "CompanyName is the company that produced the file" + }, + "productName": { + "type": "string", + "description": "ProductName is the name of the product this file is part of" + }, + "productVersion": { + "type": "string", + "description": "ProductVersion is the version of the product (may differ from AssemblyVersion)" + } + }, + "type": "object", + "required": [ + "assemblyVersion", + "legalCopyright", + "companyName", + "productName", + "productVersion" + ], + "description": "DotnetPortableExecutableEntry is a struct that represents a single entry found within \"VersionResources\" section of a .NET Portable Executable binary file." + }, + "DpkgArchiveEntry": { + "properties": { + "package": { + "type": "string", + "description": "Package is the package name as found in the status file" + }, + "source": { + "type": "string", + "description": "Source is the source package name this binary was built from (one source can produce multiple binary packages)" + }, + "version": { + "type": "string", + "description": "Version is the binary package version as found in the status file" + }, + "sourceVersion": { + "type": "string", + "description": "SourceVersion is the source package version (may differ from binary version when binNMU rebuilds occur)" + }, + "architecture": { + "type": "string", + "description": "Architecture is the target architecture per Debian spec (specific arch like amd64/arm64, wildcard like any, architecture-independent \"all\", or \"source\" for source packages)" + }, + "maintainer": { + "type": "string", + "description": "Maintainer is the package maintainer's name and email in RFC822 format (name must come first, then email in angle brackets)" + }, + "installedSize": { + "type": "integer", + "description": "InstalledSize is the total size of installed files in kilobytes" + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Provides are the virtual packages provided by this package (allows other packages to depend on capabilities. Can include versioned provides like \"libdigest-md5-perl (= 2.55.01)\")" + }, + "depends": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Depends are the packages required for this package to function (will not be installed unless these requirements are met, creates strict ordering constraint)" + }, + "preDepends": { + "items": { + "type": "string" + }, + "type": "array", + "description": "PreDepends are the packages that must be installed and configured BEFORE even starting installation of this package (stronger than Depends, discouraged unless absolutely necessary as it adds strict constraints for apt)" + }, + "files": { + "items": { + "$ref": "#/$defs/DpkgFileRecord" + }, + "type": "array", + "description": "Files are the files installed by this package" + } + }, + "type": "object", + "required": [ + "package", + "source", + "version", + "sourceVersion", + "architecture", + "maintainer", + "installedSize", + "files" + ], + "description": "DpkgArchiveEntry represents package metadata extracted from a .deb archive file." + }, + "DpkgDbEntry": { + "properties": { + "package": { + "type": "string", + "description": "Package is the package name as found in the status file" + }, + "source": { + "type": "string", + "description": "Source is the source package name this binary was built from (one source can produce multiple binary packages)" + }, + "version": { + "type": "string", + "description": "Version is the binary package version as found in the status file" + }, + "sourceVersion": { + "type": "string", + "description": "SourceVersion is the source package version (may differ from binary version when binNMU rebuilds occur)" + }, + "architecture": { + "type": "string", + "description": "Architecture is the target architecture per Debian spec (specific arch like amd64/arm64, wildcard like any, architecture-independent \"all\", or \"source\" for source packages)" + }, + "maintainer": { + "type": "string", + "description": "Maintainer is the package maintainer's name and email in RFC822 format (name must come first, then email in angle brackets)" + }, + "installedSize": { + "type": "integer", + "description": "InstalledSize is the total size of installed files in kilobytes" + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Provides are the virtual packages provided by this package (allows other packages to depend on capabilities. Can include versioned provides like \"libdigest-md5-perl (= 2.55.01)\")" + }, + "depends": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Depends are the packages required for this package to function (will not be installed unless these requirements are met, creates strict ordering constraint)" + }, + "preDepends": { + "items": { + "type": "string" + }, + "type": "array", + "description": "PreDepends are the packages that must be installed and configured BEFORE even starting installation of this package (stronger than Depends, discouraged unless absolutely necessary as it adds strict constraints for apt)" + }, + "files": { + "items": { + "$ref": "#/$defs/DpkgFileRecord" + }, + "type": "array", + "description": "Files are the files installed by this package" + } + }, + "type": "object", + "required": [ + "package", + "source", + "version", + "sourceVersion", + "architecture", + "maintainer", + "installedSize", + "files" + ], + "description": "DpkgDBEntry represents all captured data for a Debian package DB entry; available fields are described at http://manpages.ubuntu.com/manpages/xenial/man1/dpkg-query.1.html in the --showformat section." + }, + "DpkgFileRecord": { + "properties": { + "path": { + "type": "string", + "description": "Path is the file path relative to the filesystem root" + }, + "digest": { + "$ref": "#/$defs/Digest", + "description": "Digest is the file content hash (typically MD5 for dpkg compatibility with legacy systems)" + }, + "isConfigFile": { + "type": "boolean", + "description": "IsConfigFile is whether this file is marked as a configuration file (dpkg will preserve user modifications during upgrades)" + } + }, + "type": "object", + "required": [ + "path", + "isConfigFile" + ], + "description": "DpkgFileRecord represents a single file attributed to a debian package." + }, + "ELFSecurityFeatures": { + "properties": { + "symbolTableStripped": { + "type": "boolean", + "description": "SymbolTableStripped indicates whether debugging symbols have been removed." + }, + "stackCanary": { + "type": "boolean", + "description": "StackCanary indicates whether stack smashing protection is enabled." + }, + "nx": { + "type": "boolean", + "description": "NoExecutable indicates whether NX (no-execute) protection is enabled for the stack." + }, + "relRO": { + "type": "string", + "description": "RelocationReadOnly indicates the RELRO protection level." + }, + "pie": { + "type": "boolean", + "description": "PositionIndependentExecutable indicates whether the binary is compiled as PIE." + }, + "dso": { + "type": "boolean", + "description": "DynamicSharedObject indicates whether the binary is a shared library." + }, + "safeStack": { + "type": "boolean", + "description": "LlvmSafeStack represents a compiler-based security mechanism that separates the stack into a safe stack for storing return addresses and other critical data, and an unsafe stack for everything else, to mitigate stack-based memory corruption errors\nsee https://clang.llvm.org/docs/SafeStack.html" + }, + "cfi": { + "type": "boolean", + "description": "ControlFlowIntegrity represents runtime checks to ensure a program's control flow adheres to the legal paths determined at compile time, thus protecting against various types of control-flow hijacking attacks\nsee https://clang.llvm.org/docs/ControlFlowIntegrity.html" + }, + "fortify": { + "type": "boolean", + "description": "ClangFortifySource is a broad suite of extensions to libc aimed at catching misuses of common library functions\nsee https://android.googlesource.com/platform//bionic/+/d192dbecf0b2a371eb127c0871f77a9caf81c4d2/docs/clang_fortify_anatomy.md" + } + }, + "type": "object", + "required": [ + "symbolTableStripped", + "nx", + "relRO", + "pie", + "dso" + ], + "description": "ELFSecurityFeatures captures security hardening and protection mechanisms in ELF binaries." + }, + "ElfBinaryPackageNoteJsonPayload": { + "properties": { + "type": { + "type": "string", + "description": "Type is the type of the package (e.g. \"rpm\", \"deb\", \"apk\", etc.)" + }, + "architecture": { + "type": "string", + "description": "Architecture of the binary package (e.g. \"amd64\", \"arm\", etc.)" + }, + "osCPE": { + "type": "string", + "description": "OSCPE is a CPE name for the OS, typically corresponding to CPE_NAME in os-release (e.g. cpe:/o:fedoraproject:fedora:33)\n\nDeprecated: in Syft 2.0 the struct tag will be corrected to `osCpe` to match the systemd spec casing." + }, + "appCpe": { + "type": "string", + "description": "AppCpe is a CPE name for the upstream Application, as found in NVD CPE search (e.g. cpe:2.3:a:gnu:coreutils:5.0)" + }, + "os": { + "type": "string", + "description": "OS is the OS name, typically corresponding to ID in os-release (e.g. \"fedora\")" + }, + "osVersion": { + "type": "string", + "description": "osVersion is the version of the OS, typically corresponding to VERSION_ID in os-release (e.g. \"33\")" + }, + "system": { + "type": "string", + "description": "System is a context-specific name for the system that the binary package is intended to run on or a part of" + }, + "vendor": { + "type": "string", + "description": "Vendor is the individual or organization that produced the source code for the binary" + }, + "sourceRepo": { + "type": "string", + "description": "SourceRepo is the URL to the source repository for which the binary was built from" + }, + "commit": { + "type": "string", + "description": "Commit is the commit hash of the source repository for which the binary was built from" + } + }, + "type": "object", + "description": "ELFBinaryPackageNoteJSONPayload Represents metadata captured from the .note.package section of an ELF-formatted binary" + }, + "ElixirMixLockEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name as found in the mix.lock file" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the mix.lock file" + }, + "pkgHash": { + "type": "string", + "description": "PkgHash is the outer checksum (SHA-256) of the entire Hex package tarball for integrity verification (preferred method, replaces deprecated inner checksum)" + }, + "pkgHashExt": { + "type": "string", + "description": "PkgHashExt is the extended package hash format (inner checksum is deprecated - SHA-256 of concatenated file contents excluding CHECKSUM file, now replaced by outer checksum)" + } + }, + "type": "object", + "required": [ + "name", + "version", + "pkgHash", + "pkgHashExt" + ], + "description": "ElixirMixLockEntry is a struct that represents a single entry in a mix.lock file" + }, + "ErlangRebarLockEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name as found in the rebar.lock file" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the rebar.lock file" + }, + "pkgHash": { + "type": "string", + "description": "PkgHash is the outer checksum (SHA-256) of the entire Hex package tarball for integrity verification (preferred method over deprecated inner checksum)" + }, + "pkgHashExt": { + "type": "string", + "description": "PkgHashExt is the extended package hash format (inner checksum deprecated - was SHA-256 of concatenated file contents)" + } + }, + "type": "object", + "required": [ + "name", + "version", + "pkgHash", + "pkgHashExt" + ], + "description": "ErlangRebarLockEntry represents a single package entry from the \"deps\" section within an Erlang rebar.lock file." + }, + "Executable": { + "properties": { + "format": { + "type": "string", + "description": "Format denotes either ELF, Mach-O, or PE" + }, + "hasExports": { + "type": "boolean", + "description": "HasExports indicates whether the binary exports symbols." + }, + "hasEntrypoint": { + "type": "boolean", + "description": "HasEntrypoint indicates whether the binary has an entry point function." + }, + "importedLibraries": { + "items": { + "type": "string" + }, + "type": "array", + "description": "ImportedLibraries lists the shared libraries required by this executable." + }, + "elfSecurityFeatures": { + "$ref": "#/$defs/ELFSecurityFeatures", + "description": "ELFSecurityFeatures contains ELF-specific security hardening information when Format is ELF." + }, + "symbolNames": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Symbols captures the selection from the symbol table found in the binary." + }, + "toolchains": { + "items": { + "$ref": "#/$defs/Toolchain" + }, + "type": "array", + "description": "Toolchains captures information about the compiler, linker, runtime, or other toolchains used to build (or otherwise exist within) the executable." + } + }, + "type": "object", + "required": [ + "format", + "hasExports", + "hasEntrypoint", + "importedLibraries" + ], + "description": "Executable contains metadata about binary files and their security features." + }, + "File": { + "properties": { + "id": { + "type": "string", + "description": "ID is a unique identifier for this file within the SBOM." + }, + "location": { + "$ref": "#/$defs/Coordinates", + "description": "Location is the file path and layer information where this file was found." + }, + "metadata": { + "$ref": "#/$defs/FileMetadataEntry", + "description": "Metadata contains filesystem metadata such as permissions, ownership, and file type." + }, + "contents": { + "type": "string", + "description": "Contents is the file contents for small files." + }, + "digests": { + "items": { + "$ref": "#/$defs/Digest" + }, + "type": "array", + "description": "Digests contains cryptographic hashes of the file contents." + }, + "licenses": { + "items": { + "$ref": "#/$defs/FileLicense" + }, + "type": "array", + "description": "Licenses contains license information discovered within this file." + }, + "executable": { + "$ref": "#/$defs/Executable", + "description": "Executable contains executable metadata if this file is a binary." + }, + "unknowns": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Unknowns contains unknown fields for forward compatibility." + } + }, + "type": "object", + "required": [ + "id", + "location" + ], + "description": "File represents a file discovered during cataloging with its metadata, content digests, licenses, and relationships to packages." + }, + "FileLicense": { + "properties": { + "value": { + "type": "string", + "description": "Value is the raw license identifier or text as found in the file." + }, + "spdxExpression": { + "type": "string", + "description": "SPDXExpression is the parsed SPDX license expression." + }, + "type": { + "type": "string", + "description": "Type is the license type classification (e.g., declared, concluded, discovered)." + }, + "evidence": { + "$ref": "#/$defs/FileLicenseEvidence", + "description": "Evidence contains supporting evidence for this license detection." + } + }, + "type": "object", + "required": [ + "value", + "spdxExpression", + "type" + ], + "description": "FileLicense represents license information discovered within a file's contents or metadata, including the matched license text and SPDX expression." + }, + "FileLicenseEvidence": { + "properties": { + "confidence": { + "type": "integer", + "description": "Confidence is the confidence score for this license detection (0-100)." + }, + "offset": { + "type": "integer", + "description": "Offset is the byte offset where the license text starts in the file." + }, + "extent": { + "type": "integer", + "description": "Extent is the length of the license text in bytes." + } + }, + "type": "object", + "required": [ + "confidence", + "offset", + "extent" + ], + "description": "FileLicenseEvidence contains supporting evidence for a license detection in a file, including the byte offset, extent, and confidence level." + }, + "FileMetadataEntry": { + "properties": { + "mode": { + "type": "integer", + "description": "Mode is the Unix file permission mode in octal format." + }, + "type": { + "type": "string", + "description": "Type is the file type (e.g., \"RegularFile\", \"Directory\", \"SymbolicLink\")." + }, + "linkDestination": { + "type": "string", + "description": "LinkDestination is the target path for symbolic links." + }, + "userID": { + "type": "integer", + "description": "UserID is the file owner user ID." + }, + "groupID": { + "type": "integer", + "description": "GroupID is the file owner group ID." + }, + "mimeType": { + "type": "string", + "description": "MIMEType is the MIME type of the file contents." + }, + "size": { + "type": "integer", + "description": "Size is the file size in bytes." + } + }, + "type": "object", + "required": [ + "mode", + "type", + "userID", + "groupID", + "mimeType", + "size" + ], + "description": "FileMetadataEntry contains filesystem-level metadata attributes such as permissions, ownership, type, and size for a cataloged file." + }, + "GgufFileHeader": { + "properties": { + "ggufVersion": { + "type": "integer", + "description": "GGUFVersion is the GGUF format version (e.g., 3)" + }, + "fileSize": { + "type": "integer", + "description": "FileSize is the size of the GGUF file in bytes (best-effort if available from resolver)" + }, + "architecture": { + "type": "string", + "description": "Architecture is the model architecture (from general.architecture, e.g., \"qwen3moe\", \"llama\")" + }, + "quantization": { + "type": "string", + "description": "Quantization is the quantization type (e.g., \"IQ4_NL\", \"Q4_K_M\")" + }, + "parameters": { + "type": "integer", + "description": "Parameters is the number of model parameters (if present in header)" + }, + "tensorCount": { + "type": "integer", + "description": "TensorCount is the number of tensors in the model" + }, + "header": { + "type": "object", + "description": "RemainingKeyValues contains the remaining key-value pairs from the GGUF header that are not already\nrepresented as typed fields above. This preserves additional metadata fields for reference\n(namespaced with general.*, llama.*, etc.) while avoiding duplication." + }, + "metadataHash": { + "type": "string", + "description": "MetadataKeyValuesHash is a xx64 hash of all key-value pairs from the GGUF header metadata.\nThis hash is computed over the complete header metadata (including the fields extracted\ninto typed fields above) and provides a stable identifier for the model configuration\nacross different file locations or remotes. It allows matching identical models even\nwhen stored in different repositories or with different filenames." + } + }, + "type": "object", + "required": [ + "ggufVersion", + "tensorCount" + ], + "description": "GGUFFileHeader represents metadata extracted from a GGUF (GPT-Generated Unified Format) model file." + }, + "GithubActionsUseStatement": { + "properties": { + "value": { + "type": "string", + "description": "Value is the action reference (e.g. \"actions/checkout@v3\")" + }, + "comment": { + "type": "string", + "description": "Comment is the inline comment associated with this uses statement" + } + }, + "type": "object", + "required": [ + "value" + ], + "description": "GitHubActionsUseStatement represents a single 'uses' statement in a GitHub Actions workflow file referencing an action or reusable workflow." + }, + "GoModuleBuildinfoEntry": { + "properties": { + "goBuildSettings": { + "$ref": "#/$defs/KeyValues", + "description": "BuildSettings contains the Go build settings and flags used to compile the binary (e.g., GOARCH, GOOS, CGO_ENABLED)." + }, + "goCompiledVersion": { + "type": "string", + "description": "GoCompiledVersion is the version of Go used to compile the binary." + }, + "architecture": { + "type": "string", + "description": "Architecture is the target CPU architecture for the binary (extracted from GOARCH build setting)." + }, + "h1Digest": { + "type": "string", + "description": "H1Digest is the Go module hash in h1: format for the main module from go.sum." + }, + "mainModule": { + "type": "string", + "description": "MainModule is the main module path for the binary (e.g., \"github.com/anchore/syft\")." + }, + "goCryptoSettings": { + "items": { + "type": "string" + }, + "type": "array", + "description": "GoCryptoSettings contains FIPS and cryptographic configuration settings if present." + }, + "goExperiments": { + "items": { + "type": "string" + }, + "type": "array", + "description": "GoExperiments lists experimental Go features enabled during compilation (e.g., \"arenas\", \"cgocheck2\")." + } + }, + "type": "object", + "required": [ + "goCompiledVersion", + "architecture" + ], + "description": "GolangBinaryBuildinfoEntry represents all captured data for a Golang binary" + }, + "GoModuleEntry": { + "properties": { + "h1Digest": { + "type": "string", + "description": "H1Digest is the Go module hash in h1: format from go.sum for verifying module contents." + } + }, + "type": "object", + "description": "GolangModuleEntry represents all captured data for a Golang source scan with go.mod/go.sum" + }, + "GoSourceEntry": { + "properties": { + "h1Digest": { + "type": "string", + "description": "H1Digest is the Go module hash in h1: format from go.sum for verifying module contents." + }, + "os": { + "type": "string", + "description": "OperatingSystem is the target OS for build constraints (e.g., \"linux\", \"darwin\", \"windows\")." + }, + "architecture": { + "type": "string", + "description": "Architecture is the target CPU architecture for build constraints (e.g., \"amd64\", \"arm64\")." + }, + "buildTags": { + "type": "string", + "description": "BuildTags are the build tags used to conditionally compile code (e.g., \"integration,debug\")." + }, + "cgoEnabled": { + "type": "boolean", + "description": "CgoEnabled indicates whether CGO was enabled for this package." + } + }, + "type": "object", + "required": [ + "cgoEnabled" + ], + "description": "GolangSourceEntry represents all captured data for a Golang package found through source analysis" + }, + "HaskellHackageStackEntry": { + "properties": { + "pkgHash": { + "type": "string", + "description": "PkgHash is the package content hash for verification" + } + }, + "type": "object", + "description": "HackageStackYamlEntry represents a single entry from the \"extra-deps\" section of a stack.yaml file." + }, + "HaskellHackageStackLockEntry": { + "properties": { + "pkgHash": { + "type": "string", + "description": "PkgHash is the package content hash for verification" + }, + "snapshotURL": { + "type": "string", + "description": "SnapshotURL is the URL to the Stack snapshot this package came from" + } + }, + "type": "object", + "description": "HackageStackYamlLockEntry represents a single entry from the \"packages\" section of a stack.yaml.lock file." + }, + "HomebrewFormula": { + "properties": { + "tap": { + "type": "string", + "description": "Tap is Homebrew tap this formula belongs to (e.g. \"homebrew/core\")" + }, + "homepage": { + "type": "string", + "description": "Homepage is the upstream project homepage URL" + }, + "description": { + "type": "string", + "description": "Description is a human-readable formula description" + } + }, + "type": "object", + "description": "HomebrewFormula represents metadata about a Homebrew formula package extracted from formula JSON files." + }, + "IDLikes": { + "items": { + "type": "string" + }, + "type": "array", + "description": "IDLikes represents a list of distribution IDs that this Linux distribution is similar to or derived from, as defined in os-release ID_LIKE field." + }, + "JavaArchive": { + "properties": { + "virtualPath": { + "type": "string", + "description": "VirtualPath is path within the archive hierarchy, where nested entries are delimited with ':' (for nested JARs)" + }, + "manifest": { + "$ref": "#/$defs/JavaManifest", + "description": "Manifest is parsed META-INF/MANIFEST.MF contents" + }, + "pomProperties": { + "$ref": "#/$defs/JavaPomProperties", + "description": "PomProperties is parsed pom.properties file contents" + }, + "pomProject": { + "$ref": "#/$defs/JavaPomProject", + "description": "PomProject is parsed pom.xml file contents" + }, + "digest": { + "items": { + "$ref": "#/$defs/Digest" + }, + "type": "array", + "description": "ArchiveDigests is cryptographic hashes of the archive file" + } + }, + "type": "object", + "required": [ + "virtualPath" + ], + "description": "JavaArchive encapsulates all Java ecosystem metadata for a package as well as an (optional) parent relationship." + }, + "JavaJvmInstallation": { + "properties": { + "release": { + "$ref": "#/$defs/JavaVMRelease", + "description": "Release is JVM release information and version details" + }, + "files": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Files are the list of files that are part of this JVM installation" + } + }, + "type": "object", + "required": [ + "release", + "files" + ], + "description": "JavaVMInstallation represents a Java Virtual Machine installation discovered on the system with its release information and file list." + }, + "JavaManifest": { + "properties": { + "main": { + "$ref": "#/$defs/KeyValues", + "description": "Main is main manifest attributes as key-value pairs" + }, + "sections": { + "items": { + "$ref": "#/$defs/KeyValues" + }, + "type": "array", + "description": "Sections are the named sections from the manifest (e.g. per-entry attributes)" + } + }, + "type": "object", + "description": "JavaManifest represents the fields of interest extracted from a Java archive's META-INF/MANIFEST.MF file." + }, + "JavaPomParent": { + "properties": { + "groupId": { + "type": "string", + "description": "GroupID is the parent Maven group identifier" + }, + "artifactId": { + "type": "string", + "description": "ArtifactID is the parent Maven artifact identifier" + }, + "version": { + "type": "string", + "description": "Version is the parent version (child inherits configuration from this specific version of parent POM)" + } + }, + "type": "object", + "required": [ + "groupId", + "artifactId", + "version" + ], + "description": "JavaPomParent contains the fields within the \u003cparent\u003e tag in a pom.xml file" + }, + "JavaPomProject": { + "properties": { + "path": { + "type": "string", + "description": "Path is path to the pom.xml file within the archive" + }, + "parent": { + "$ref": "#/$defs/JavaPomParent", + "description": "Parent is the parent POM reference for inheritance (child POMs inherit configuration from parent)" + }, + "groupId": { + "type": "string", + "description": "GroupID is Maven group identifier (reversed domain name like org.apache.maven)" + }, + "artifactId": { + "type": "string", + "description": "ArtifactID is Maven artifact identifier (project name)" + }, + "version": { + "type": "string", + "description": "Version is project version (together with groupId and artifactId forms Maven coordinates groupId:artifactId:version)" + }, + "name": { + "type": "string", + "description": "Name is a human-readable project name (displayed in Maven-generated documentation)" + }, + "description": { + "type": "string", + "description": "Description is detailed project description" + }, + "url": { + "type": "string", + "description": "URL is the project URL (typically project website or repository)" + } + }, + "type": "object", + "required": [ + "path", + "groupId", + "artifactId", + "version", + "name" + ], + "description": "JavaPomProject represents fields of interest extracted from a Java archive's pom.xml file." + }, + "JavaPomProperties": { + "properties": { + "path": { + "type": "string", + "description": "Path is path to the pom.properties file within the archive" + }, + "name": { + "type": "string", + "description": "Name is the project name" + }, + "groupId": { + "type": "string", + "description": "GroupID is Maven group identifier uniquely identifying the project across all projects (follows reversed domain name convention like com.company.project)" + }, + "artifactId": { + "type": "string", + "description": "ArtifactID is Maven artifact identifier, the name of the jar/artifact (unique within the groupId scope)" + }, + "version": { + "type": "string", + "description": "Version is artifact version" + }, + "scope": { + "type": "string", + "description": "Scope is dependency scope determining when dependency is available (compile=default all phases, test=test compilation/execution only, runtime=runtime and test not compile, provided=expected from JDK or container)" + }, + "extraFields": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "Extra is additional custom properties not in standard Maven coordinates" + } + }, + "type": "object", + "required": [ + "path", + "name", + "groupId", + "artifactId", + "version" + ], + "description": "JavaPomProperties represents the fields of interest extracted from a Java archive's pom.properties file." + }, + "JavaVMRelease": { + "properties": { + "implementor": { + "type": "string", + "description": "Implementor is extracted with the `java.vendor` JVM property" + }, + "implementorVersion": { + "type": "string", + "description": "ImplementorVersion is extracted with the `java.vendor.version` JVM property" + }, + "javaRuntimeVersion": { + "type": "string", + "description": "JavaRuntimeVersion is extracted from the 'java.runtime.version' JVM property" + }, + "javaVersion": { + "type": "string", + "description": "JavaVersion matches that from `java -version` command output" + }, + "javaVersionDate": { + "type": "string", + "description": "JavaVersionDate is extracted from the 'java.version.date' JVM property" + }, + "libc": { + "type": "string", + "description": "Libc can either be 'glibc' or 'musl'" + }, + "modules": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Modules is a list of JVM modules that are packaged" + }, + "osArch": { + "type": "string", + "description": "OsArch is the target CPU architecture" + }, + "osName": { + "type": "string", + "description": "OsName is the name of the target runtime operating system environment" + }, + "osVersion": { + "type": "string", + "description": "OsVersion is the version of the target runtime operating system environment" + }, + "source": { + "type": "string", + "description": "Source refers to the origin repository of OpenJDK source" + }, + "buildSource": { + "type": "string", + "description": "BuildSource Git SHA of the build repository" + }, + "buildSourceRepo": { + "type": "string", + "description": "BuildSourceRepo refers to rhe repository URL for the build source" + }, + "sourceRepo": { + "type": "string", + "description": "SourceRepo refers to the OpenJDK repository URL" + }, + "fullVersion": { + "type": "string", + "description": "FullVersion is extracted from the 'java.runtime.version' JVM property" + }, + "semanticVersion": { + "type": "string", + "description": "SemanticVersion is derived from the OpenJDK version" + }, + "buildInfo": { + "type": "string", + "description": "BuildInfo contains additional build information" + }, + "jvmVariant": { + "type": "string", + "description": "JvmVariant specifies the JVM variant (e.g., Hotspot or OpenJ9)" + }, + "jvmVersion": { + "type": "string", + "description": "JvmVersion is extracted from the 'java.vm.version' JVM property" + }, + "imageType": { + "type": "string", + "description": "ImageType can be 'JDK' or 'JRE'" + }, + "buildType": { + "type": "string", + "description": "BuildType can be 'commercial' (used in some older oracle JDK distributions)" + } + }, + "type": "object", + "description": "JavaVMRelease represents JVM version and build information extracted from the release file in a Java installation." + }, + "JavascriptNpmPackage": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name as found in package.json" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in package.json" + }, + "author": { + "type": "string", + "description": "Author is package author name" + }, + "homepage": { + "type": "string", + "description": "Homepage is project homepage URL" + }, + "description": { + "type": "string", + "description": "Description is a human-readable package description" + }, + "url": { + "type": "string", + "description": "URL is repository or project URL" + }, + "private": { + "type": "boolean", + "description": "Private is whether this is a private package" + } + }, + "type": "object", + "required": [ + "name", + "version", + "author", + "homepage", + "description", + "url", + "private" + ], + "description": "NpmPackage represents the contents of a javascript package.json file." + }, + "JavascriptNpmPackageLockEntry": { + "properties": { + "resolved": { + "type": "string", + "description": "Resolved is URL where this package was downloaded from (registry source)" + }, + "integrity": { + "type": "string", + "description": "Integrity is Subresource Integrity hash for verification using standard SRI format (sha512-... or sha1-...). npm changed from SHA-1 to SHA-512 in newer versions. For registry sources this is the integrity from registry, for remote tarballs it's SHA-512 of the file. npm verifies tarball matches this hash before unpacking, throwing EINTEGRITY error if mismatch detected." + }, + "dependencies": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "Dependencies is a map of dependencies and their version markers, i.e. \"lodash\": \"^1.0.0\"" + } + }, + "type": "object", + "required": [ + "resolved", + "integrity", + "dependencies" + ], + "description": "NpmPackageLockEntry represents a single entry within the \"packages\" section of a package-lock.json file." + }, + "JavascriptPnpmLockEntry": { + "properties": { + "resolution": { + "$ref": "#/$defs/PnpmLockResolution", + "description": "Resolution is the resolution information for the package" + }, + "dependencies": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "Dependencies is a map of dependencies and their versions" + } + }, + "type": "object", + "required": [ + "resolution", + "dependencies" + ], + "description": "PnpmLockEntry represents a single entry in the \"packages\" section of a pnpm-lock.yaml file." + }, + "JavascriptYarnLockEntry": { + "properties": { + "resolved": { + "type": "string", + "description": "Resolved is URL where this package was downloaded from" + }, + "integrity": { + "type": "string", + "description": "Integrity is Subresource Integrity hash for verification (SRI format)" + }, + "dependencies": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "Dependencies is a map of dependencies and their versions" + } + }, + "type": "object", + "required": [ + "resolved", + "integrity", + "dependencies" + ], + "description": "YarnLockEntry represents a single entry section of a yarn.lock file." + }, + "KeyValue": { + "properties": { + "key": { + "type": "string", + "description": "Key is the key name" + }, + "value": { + "type": "string", + "description": "Value is the value associated with the key" + } + }, + "type": "object", + "required": [ + "key", + "value" + ], + "description": "KeyValue represents a single key-value pair." + }, + "KeyValues": { + "items": { + "$ref": "#/$defs/KeyValue" + }, + "type": "array", + "description": "KeyValues represents an ordered collection of key-value pairs that preserves insertion order." + }, + "License": { + "properties": { + "value": { + "type": "string", + "description": "Value is the raw license identifier or expression as found." + }, + "spdxExpression": { + "type": "string", + "description": "SPDXExpression is the parsed SPDX license expression." + }, + "type": { + "type": "string", + "description": "Type is the license type classification (e.g., declared, concluded, discovered)." + }, + "urls": { + "items": { + "type": "string" + }, + "type": "array", + "description": "URLs are URLs where license text or information can be found." + }, + "locations": { + "items": { + "$ref": "#/$defs/Location" + }, + "type": "array", + "description": "Locations are file locations where this license was discovered." + }, + "contents": { + "type": "string", + "description": "Contents is the full license text content." + } + }, + "type": "object", + "required": [ + "value", + "spdxExpression", + "type", + "urls", + "locations" + ], + "description": "License represents software license information discovered for a package, including SPDX expressions and supporting evidence locations." + }, + "LinuxKernelArchive": { + "properties": { + "name": { + "type": "string", + "description": "Name is kernel name (typically \"Linux\")" + }, + "architecture": { + "type": "string", + "description": "Architecture is the target CPU architecture" + }, + "version": { + "type": "string", + "description": "Version is kernel version string" + }, + "extendedVersion": { + "type": "string", + "description": "ExtendedVersion is additional version information" + }, + "buildTime": { + "type": "string", + "description": "BuildTime is when the kernel was built" + }, + "author": { + "type": "string", + "description": "Author is who built the kernel" + }, + "format": { + "type": "string", + "description": "Format is kernel image format (e.g. bzImage, zImage)" + }, + "rwRootFS": { + "type": "boolean", + "description": "RWRootFS is whether root filesystem is mounted read-write" + }, + "swapDevice": { + "type": "integer", + "description": "SwapDevice is swap device number" + }, + "rootDevice": { + "type": "integer", + "description": "RootDevice is root device number" + }, + "videoMode": { + "type": "string", + "description": "VideoMode is default video mode setting" + } + }, + "type": "object", + "required": [ + "name", + "architecture", + "version" + ], + "description": "LinuxKernel represents all captured data for a Linux kernel" + }, + "LinuxKernelModule": { + "properties": { + "name": { + "type": "string", + "description": "Name is module name" + }, + "version": { + "type": "string", + "description": "Version is module version string" + }, + "sourceVersion": { + "type": "string", + "description": "SourceVersion is the source code version identifier" + }, + "path": { + "type": "string", + "description": "Path is the filesystem path to the .ko kernel object file (absolute path)" + }, + "description": { + "type": "string", + "description": "Description is a human-readable module description" + }, + "author": { + "type": "string", + "description": "Author is module author name and email" + }, + "license": { + "type": "string", + "description": "License is module license (e.g. GPL, BSD) which must be compatible with kernel" + }, + "kernelVersion": { + "type": "string", + "description": "KernelVersion is kernel version this module was built for" + }, + "versionMagic": { + "type": "string", + "description": "VersionMagic is version magic string for compatibility checking (includes kernel version, SMP status, module loading capabilities like \"3.17.4-302.fc21.x86_64 SMP mod_unload modversions\"). Module will NOT load if vermagic doesn't match running kernel." + }, + "parameters": { + "patternProperties": { + ".*": { + "$ref": "#/$defs/LinuxKernelModuleParameter" + } + }, + "type": "object", + "description": "Parameters are the module parameters that can be configured at load time (user-settable values like module options)" + } + }, + "type": "object", + "description": "LinuxKernelModule represents a loadable kernel module (.ko file) with its metadata, parameters, and dependencies." + }, + "LinuxKernelModuleParameter": { + "properties": { + "type": { + "type": "string", + "description": "Type is parameter data type (e.g. int, string, bool, array types)" + }, + "description": { + "type": "string", + "description": "Description is a human-readable parameter description explaining what the parameter controls" + } + }, + "type": "object", + "description": "LinuxKernelModuleParameter represents a configurable parameter for a kernel module with its type and description." + }, + "LinuxRelease": { + "properties": { + "prettyName": { + "type": "string", + "description": "PrettyName is a human-readable operating system name with version." + }, + "name": { + "type": "string", + "description": "Name is the operating system name without version information." + }, + "id": { + "type": "string", + "description": "ID is the lower-case operating system identifier (e.g., \"ubuntu\", \"rhel\")." + }, + "idLike": { + "$ref": "#/$defs/IDLikes", + "description": "IDLike is a list of operating system IDs this distribution is similar to or derived from." + }, + "version": { + "type": "string", + "description": "Version is the operating system version including codename if available." + }, + "versionID": { + "type": "string", + "description": "VersionID is the operating system version number or identifier." + }, + "versionCodename": { + "type": "string", + "description": "VersionCodename is the operating system release codename (e.g., \"jammy\", \"bullseye\")." + }, + "buildID": { + "type": "string", + "description": "BuildID is a build identifier for the operating system." + }, + "imageID": { + "type": "string", + "description": "ImageID is an identifier for container or cloud images." + }, + "imageVersion": { + "type": "string", + "description": "ImageVersion is the version for container or cloud images." + }, + "variant": { + "type": "string", + "description": "Variant is the operating system variant name (e.g., \"Server\", \"Workstation\")." + }, + "variantID": { + "type": "string", + "description": "VariantID is the lower-case operating system variant identifier." + }, + "homeURL": { + "type": "string", + "description": "HomeURL is the homepage URL for the operating system." + }, + "supportURL": { + "type": "string", + "description": "SupportURL is the support or help URL for the operating system." + }, + "bugReportURL": { + "type": "string", + "description": "BugReportURL is the bug reporting URL for the operating system." + }, + "privacyPolicyURL": { + "type": "string", + "description": "PrivacyPolicyURL is the privacy policy URL for the operating system." + }, + "cpeName": { + "type": "string", + "description": "CPEName is the Common Platform Enumeration name for the operating system." + }, + "supportEnd": { + "type": "string", + "description": "SupportEnd is the end of support date or version identifier." + }, + "extendedSupport": { + "type": "boolean", + "description": "ExtendedSupport indicates whether extended security or support is available." + } + }, + "type": "object", + "description": "LinuxRelease contains Linux distribution identification and version information extracted from /etc/os-release or similar system files." + }, + "Location": { + "properties": { + "path": { + "type": "string", + "description": "RealPath is the canonical absolute form of the path accessed (all symbolic links have been followed and relative path components like '.' and '..' have been removed)." + }, + "layerID": { + "type": "string", + "description": "FileSystemID is an ID representing and entire filesystem. For container images, this is a layer digest. For directories or a root filesystem, this is blank." + }, + "accessPath": { + "type": "string", + "description": "AccessPath is the path used to retrieve file contents (which may or may not have hardlinks / symlinks in the path)" + }, + "annotations": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object", + "required": [ + "path", + "accessPath" + ], + "description": "Location represents a path relative to a particular filesystem resolved to a specific file.Reference." + }, + "LuarocksPackage": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name as found in the .rockspec file" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the .rockspec file" + }, + "license": { + "type": "string", + "description": "License is license identifier" + }, + "homepage": { + "type": "string", + "description": "Homepage is project homepage URL" + }, + "description": { + "type": "string", + "description": "Description is a human-readable package description" + }, + "url": { + "type": "string", + "description": "URL is the source download URL" + }, + "dependencies": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "Dependencies are the map of dependency names to version constraints" + } + }, + "type": "object", + "required": [ + "name", + "version", + "license", + "homepage", + "description", + "url", + "dependencies" + ], + "description": "LuaRocksPackage represents a Lua package managed by the LuaRocks package manager with metadata from .rockspec files." + }, + "MicrosoftKbPatch": { + "properties": { + "product_id": { + "type": "string", + "description": "ProductID is MSRC Product ID (e.g. \"Windows 10 Version 1703 for 32-bit Systems\")" + }, + "kb": { + "type": "string", + "description": "Kb is Knowledge Base article number (e.g. \"5001028\")" + } + }, + "type": "object", + "required": [ + "product_id", + "kb" + ], + "description": "MicrosoftKbPatch represents a Windows Knowledge Base patch identifier associated with a specific Microsoft product from the MSRC (Microsoft Security Response Center)." + }, + "NixDerivation": { + "properties": { + "path": { + "type": "string", + "description": "Path is path to the .drv file in Nix store" + }, + "system": { + "type": "string", + "description": "System is target system string indicating where derivation can be built (e.g. \"x86_64-linux\", \"aarch64-darwin\"). Must match current system for local builds." + }, + "inputDerivations": { + "items": { + "$ref": "#/$defs/NixDerivationReference" + }, + "type": "array", + "description": "InputDerivations are the list of other derivations that were inputs to this build (dependencies)" + }, + "inputSources": { + "items": { + "type": "string" + }, + "type": "array", + "description": "InputSources are the list of source file paths that were inputs to this build" + } + }, + "type": "object", + "description": "NixDerivation represents a Nix .drv file that describes how to build a package including inputs, outputs, and build instructions." + }, + "NixDerivationReference": { + "properties": { + "path": { + "type": "string", + "description": "Path is path to the referenced .drv file" + }, + "outputs": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Outputs are which outputs of the referenced derivation were used (e.g. [\"out\"], [\"bin\", \"dev\"])" + } + }, + "type": "object", + "description": "NixDerivationReference represents a reference to another derivation used as a build input or runtime dependency." + }, + "NixStoreEntry": { + "properties": { + "path": { + "type": "string", + "description": "Path is full store path for this output (e.g. /nix/store/abc123...-package-1.0)" + }, + "output": { + "type": "string", + "description": "Output is the specific output name for multi-output packages (empty string for default \"out\" output, can be \"bin\", \"dev\", \"doc\", etc.)" + }, + "outputHash": { + "type": "string", + "description": "OutputHash is hash prefix of the store path basename (first part before the dash)" + }, + "derivation": { + "$ref": "#/$defs/NixDerivation", + "description": "Derivation is information about the .drv file that describes how this package was built" + }, + "files": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Files are the list of files under the nix/store path for this package" + } + }, + "type": "object", + "required": [ + "outputHash" + ], + "description": "NixStoreEntry represents a package in the Nix store (/nix/store) with its derivation information and metadata." + }, + "OpamPackage": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name as found in the .opam file" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the .opam file" + }, + "licenses": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Licenses are the list of applicable licenses" + }, + "url": { + "type": "string", + "description": "URL is download URL for the package source" + }, + "checksum": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Checksums are the list of checksums for verification" + }, + "homepage": { + "type": "string", + "description": "Homepage is project homepage URL" + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Dependencies are the list of required dependencies" + } + }, + "type": "object", + "required": [ + "name", + "version", + "licenses", + "url", + "checksum", + "homepage", + "dependencies" + ], + "description": "OpamPackage represents an OCaml package managed by the OPAM package manager with metadata from .opam files." + }, + "Package": { + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "type": { + "type": "string" + }, + "foundBy": { + "type": "string" + }, + "locations": { + "items": { + "$ref": "#/$defs/Location" + }, + "type": "array" + }, + "licenses": { + "$ref": "#/$defs/licenses" + }, + "language": { + "type": "string" + }, + "cpes": { + "$ref": "#/$defs/cpes" + }, + "purl": { + "type": "string" + }, + "metadataType": { + "type": "string" + }, + "metadata": { + "anyOf": [ + { + "type": "null" + }, + { + "$ref": "#/$defs/AlpmDbEntry" + }, + { + "$ref": "#/$defs/ApkDbEntry" + }, + { + "$ref": "#/$defs/BinarySignature" + }, + { + "$ref": "#/$defs/BitnamiSbomEntry" + }, + { + "$ref": "#/$defs/CConanFileEntry" + }, + { + "$ref": "#/$defs/CConanInfoEntry" + }, + { + "$ref": "#/$defs/CConanLockEntry" + }, + { + "$ref": "#/$defs/CConanLockV2Entry" + }, + { + "$ref": "#/$defs/CocoaPodfileLockEntry" + }, + { + "$ref": "#/$defs/CondaMetadataEntry" + }, + { + "$ref": "#/$defs/DartPubspec" + }, + { + "$ref": "#/$defs/DartPubspecLockEntry" + }, + { + "$ref": "#/$defs/DotnetDepsEntry" + }, + { + "$ref": "#/$defs/DotnetPackagesLockEntry" + }, + { + "$ref": "#/$defs/DotnetPortableExecutableEntry" + }, + { + "$ref": "#/$defs/DpkgArchiveEntry" + }, + { + "$ref": "#/$defs/DpkgDbEntry" + }, + { + "$ref": "#/$defs/ElfBinaryPackageNoteJsonPayload" + }, + { + "$ref": "#/$defs/ElixirMixLockEntry" + }, + { + "$ref": "#/$defs/ErlangRebarLockEntry" + }, + { + "$ref": "#/$defs/GgufFileHeader" + }, + { + "$ref": "#/$defs/GithubActionsUseStatement" + }, + { + "$ref": "#/$defs/GoModuleBuildinfoEntry" + }, + { + "$ref": "#/$defs/GoModuleEntry" + }, + { + "$ref": "#/$defs/GoSourceEntry" + }, + { + "$ref": "#/$defs/HaskellHackageStackEntry" + }, + { + "$ref": "#/$defs/HaskellHackageStackLockEntry" + }, + { + "$ref": "#/$defs/HomebrewFormula" + }, + { + "$ref": "#/$defs/JavaArchive" + }, + { + "$ref": "#/$defs/JavaJvmInstallation" + }, + { + "$ref": "#/$defs/JavascriptNpmPackage" + }, + { + "$ref": "#/$defs/JavascriptNpmPackageLockEntry" + }, + { + "$ref": "#/$defs/JavascriptPnpmLockEntry" + }, + { + "$ref": "#/$defs/JavascriptYarnLockEntry" + }, + { + "$ref": "#/$defs/LinuxKernelArchive" + }, + { + "$ref": "#/$defs/LinuxKernelModule" + }, + { + "$ref": "#/$defs/LuarocksPackage" + }, + { + "$ref": "#/$defs/MicrosoftKbPatch" + }, + { + "$ref": "#/$defs/NixStoreEntry" + }, + { + "$ref": "#/$defs/OpamPackage" + }, + { + "$ref": "#/$defs/PeBinary" + }, + { + "$ref": "#/$defs/PhpComposerInstalledEntry" + }, + { + "$ref": "#/$defs/PhpComposerLockEntry" + }, + { + "$ref": "#/$defs/PhpPearEntry" + }, + { + "$ref": "#/$defs/PhpPeclEntry" + }, + { + "$ref": "#/$defs/PortageDbEntry" + }, + { + "$ref": "#/$defs/PythonPackage" + }, + { + "$ref": "#/$defs/PythonPdmLockEntry" + }, + { + "$ref": "#/$defs/PythonPipRequirementsEntry" + }, + { + "$ref": "#/$defs/PythonPipfileLockEntry" + }, + { + "$ref": "#/$defs/PythonPoetryLockEntry" + }, + { + "$ref": "#/$defs/PythonUvLockEntry" + }, + { + "$ref": "#/$defs/RDescription" + }, + { + "$ref": "#/$defs/RpmArchive" + }, + { + "$ref": "#/$defs/RpmDbEntry" + }, + { + "$ref": "#/$defs/RubyGemspec" + }, + { + "$ref": "#/$defs/RustCargoAuditEntry" + }, + { + "$ref": "#/$defs/RustCargoLockEntry" + }, + { + "$ref": "#/$defs/SnapEntry" + }, + { + "$ref": "#/$defs/SwiftPackageManagerLockEntry" + }, + { + "$ref": "#/$defs/SwiplpackPackage" + }, + { + "$ref": "#/$defs/TerraformLockProviderEntry" + }, + { + "$ref": "#/$defs/WordpressPluginEntry" + } + ] + } + }, + "type": "object", + "required": [ + "id", + "name", + "version", + "type", + "foundBy", + "locations", + "licenses", + "language", + "cpes", + "purl" + ], + "description": "Package represents a pkg.Package object specialized for JSON marshaling and unmarshalling." + }, + "PeBinary": { + "properties": { + "VersionResources": { + "$ref": "#/$defs/KeyValues", + "description": "VersionResources contains key-value pairs extracted from the PE file's version resource section (e.g., FileVersion, ProductName, CompanyName)." + } + }, + "type": "object", + "required": [ + "VersionResources" + ], + "description": "PEBinary represents metadata captured from a Portable Executable formatted binary (dll, exe, etc.)" + }, + "PhpComposerAuthors": { + "properties": { + "name": { + "type": "string", + "description": "Name is author's full name" + }, + "email": { + "type": "string", + "description": "Email is author's email address" + }, + "homepage": { + "type": "string", + "description": "Homepage is author's personal or company website" + } + }, + "type": "object", + "required": [ + "name" + ], + "description": "PhpComposerAuthors represents author information for a PHP Composer package from the authors field in composer.json." + }, + "PhpComposerExternalReference": { + "properties": { + "type": { + "type": "string", + "description": "Type is reference type (git for source VCS, zip/tar for dist archives)" + }, + "url": { + "type": "string", + "description": "URL is the URL to the resource (git repository URL or archive download URL)" + }, + "reference": { + "type": "string", + "description": "Reference is git commit hash or version tag for source, or archive version for dist" + }, + "shasum": { + "type": "string", + "description": "Shasum is SHA hash of the archive file for integrity verification (dist only)" + } + }, + "type": "object", + "required": [ + "type", + "url", + "reference" + ], + "description": "PhpComposerExternalReference represents source or distribution information for a PHP package, indicating where the package code is retrieved from." + }, + "PhpComposerInstalledEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is package name in vendor/package format (e.g. symfony/console)" + }, + "version": { + "type": "string", + "description": "Version is the package version" + }, + "source": { + "$ref": "#/$defs/PhpComposerExternalReference", + "description": "Source is the source repository information for development (typically git repo, used when passing --prefer-source). Originates from source code repository." + }, + "dist": { + "$ref": "#/$defs/PhpComposerExternalReference", + "description": "Dist is distribution archive information for production (typically zip/tar, default install method). Packaged version of released code." + }, + "require": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "Require is runtime dependencies with version constraints (package will not install unless these requirements can be met)" + }, + "provide": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "Provide is virtual packages/functionality provided by this package (allows other packages to depend on capabilities)" + }, + "require-dev": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "RequireDev is development-only dependencies (not installed in production, only when developing this package or running tests)" + }, + "suggest": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "Suggest is optional but recommended dependencies (suggestions for packages that would extend functionality)" + }, + "license": { + "items": { + "type": "string" + }, + "type": "array", + "description": "License is the list of license identifiers (SPDX format)" + }, + "type": { + "type": "string", + "description": "Type is package type indicating purpose (library=reusable code, project=application, metapackage=aggregates dependencies, etc.)" + }, + "notification-url": { + "type": "string", + "description": "NotificationURL is the URL to notify when package is installed (for tracking/statistics)" + }, + "bin": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Bin is the list of binary/executable files that should be added to PATH" + }, + "authors": { + "items": { + "$ref": "#/$defs/PhpComposerAuthors" + }, + "type": "array", + "description": "Authors are the list of package authors with name/email/homepage" + }, + "description": { + "type": "string", + "description": "Description is a human-readable package description" + }, + "homepage": { + "type": "string", + "description": "Homepage is project homepage URL" + }, + "keywords": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Keywords are the list of keywords for package discovery/search" + }, + "time": { + "type": "string", + "description": "Time is timestamp when this package version was released" + } + }, + "type": "object", + "required": [ + "name", + "version", + "source", + "dist" + ], + "description": "PhpComposerInstalledEntry represents a single package entry from a composer v1/v2 \"installed.json\" files (very similar to composer.lock files)." + }, + "PhpComposerLockEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is package name in vendor/package format (e.g. symfony/console)" + }, + "version": { + "type": "string", + "description": "Version is the package version" + }, + "source": { + "$ref": "#/$defs/PhpComposerExternalReference", + "description": "Source is the source repository information for development (typically git repo, used when passing --prefer-source). Originates from source code repository." + }, + "dist": { + "$ref": "#/$defs/PhpComposerExternalReference", + "description": "Dist is distribution archive information for production (typically zip/tar, default install method). Packaged version of released code." + }, + "require": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "Require is runtime dependencies with version constraints (package will not install unless these requirements can be met)" + }, + "provide": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "Provide is virtual packages/functionality provided by this package (allows other packages to depend on capabilities)" + }, + "require-dev": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "RequireDev is development-only dependencies (not installed in production, only when developing this package or running tests)" + }, + "suggest": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object", + "description": "Suggest is optional but recommended dependencies (suggestions for packages that would extend functionality)" + }, + "license": { + "items": { + "type": "string" + }, + "type": "array", + "description": "License is the list of license identifiers (SPDX format)" + }, + "type": { + "type": "string", + "description": "Type is package type indicating purpose (library=reusable code, project=application, metapackage=aggregates dependencies, etc.)" + }, + "notification-url": { + "type": "string", + "description": "NotificationURL is the URL to notify when package is installed (for tracking/statistics)" + }, + "bin": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Bin is the list of binary/executable files that should be added to PATH" + }, + "authors": { + "items": { + "$ref": "#/$defs/PhpComposerAuthors" + }, + "type": "array", + "description": "Authors are the list of package authors with name/email/homepage" + }, + "description": { + "type": "string", + "description": "Description is a human-readable package description" + }, + "homepage": { + "type": "string", + "description": "Homepage is project homepage URL" + }, + "keywords": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Keywords are the list of keywords for package discovery/search" + }, + "time": { + "type": "string", + "description": "Time is timestamp when this package version was released" + } + }, + "type": "object", + "required": [ + "name", + "version", + "source", + "dist" + ], + "description": "PhpComposerLockEntry represents a single package entry found from a composer.lock file." + }, + "PhpPearEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name" + }, + "channel": { + "type": "string", + "description": "Channel is PEAR channel this package is from" + }, + "version": { + "type": "string", + "description": "Version is the package version" + }, + "license": { + "items": { + "type": "string" + }, + "type": "array", + "description": "License is the list of applicable licenses" + } + }, + "type": "object", + "required": [ + "name", + "version" + ], + "description": "PhpPearEntry represents a single package entry found within php pear metadata files." + }, + "PhpPeclEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name" + }, + "channel": { + "type": "string", + "description": "Channel is PEAR channel this package is from" + }, + "version": { + "type": "string", + "description": "Version is the package version" + }, + "license": { + "items": { + "type": "string" + }, + "type": "array", + "description": "License is the list of applicable licenses" + } + }, + "type": "object", + "required": [ + "name", + "version" + ], + "description": "PhpPeclEntry represents a single package entry found within php pecl metadata files." + }, + "PnpmLockResolution": { + "properties": { + "integrity": { + "type": "string", + "description": "Integrity is Subresource Integrity hash for verification (SRI format)" + } + }, + "type": "object", + "required": [ + "integrity" + ], + "description": "PnpmLockResolution contains package resolution metadata from pnpm lockfiles, including the integrity hash used for verification." + }, + "PortageDbEntry": { + "properties": { + "installedSize": { + "type": "integer", + "description": "InstalledSize is total size of installed files in bytes" + }, + "licenses": { + "type": "string", + "description": "Licenses is license string which may be an expression (e.g. \"GPL-2 OR Apache-2.0\")" + }, + "files": { + "items": { + "$ref": "#/$defs/PortageFileRecord" + }, + "type": "array", + "description": "Files are the files installed by this package (tracked in CONTENTS file)" + } + }, + "type": "object", + "required": [ + "installedSize", + "files" + ], + "description": "PortageEntry represents a single package entry in the portage DB flat-file store." + }, + "PortageFileRecord": { + "properties": { + "path": { + "type": "string", + "description": "Path is the file path relative to the filesystem root" + }, + "digest": { + "$ref": "#/$defs/Digest", + "description": "Digest is file content hash (MD5 for regular files in CONTENTS format: \"obj filename md5hash mtime\")" + } + }, + "type": "object", + "required": [ + "path" + ], + "description": "PortageFileRecord represents a single file attributed to a portage package." + }, + "PythonDirectURLOriginInfo": { + "properties": { + "url": { + "type": "string", + "description": "URL is the source URL from which the package was installed." + }, + "commitId": { + "type": "string", + "description": "CommitID is the VCS commit hash if installed from version control." + }, + "vcs": { + "type": "string", + "description": "VCS is the version control system type (e.g., \"git\", \"hg\")." + } + }, + "type": "object", + "required": [ + "url" + ], + "description": "PythonDirectURLOriginInfo represents installation source metadata from direct_url.json for packages installed from VCS or direct URLs." + }, + "PythonFileDigest": { + "properties": { + "algorithm": { + "type": "string", + "description": "Algorithm is the hash algorithm used (e.g., \"sha256\")." + }, + "value": { + "type": "string", + "description": "Value is the hex-encoded hash digest value." + } + }, + "type": "object", + "required": [ + "algorithm", + "value" + ], + "description": "PythonFileDigest represents the file metadata for a single file attributed to a python package." + }, + "PythonFileRecord": { + "properties": { + "path": { + "type": "string", + "description": "Path is the installed file path from the RECORD file." + }, + "digest": { + "$ref": "#/$defs/PythonFileDigest", + "description": "Digest contains the hash algorithm and value for file integrity verification." + }, + "size": { + "type": "string", + "description": "Size is the file size in bytes as a string." + } + }, + "type": "object", + "required": [ + "path" + ], + "description": "PythonFileRecord represents a single entry within a RECORD file for a python wheel or egg package" + }, + "PythonPackage": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name from the Name field in PKG-INFO or METADATA." + }, + "version": { + "type": "string", + "description": "Version is the package version from the Version field in PKG-INFO or METADATA." + }, + "author": { + "type": "string", + "description": "Author is the package author name from the Author field." + }, + "authorEmail": { + "type": "string", + "description": "AuthorEmail is the package author's email address from the Author-Email field." + }, + "platform": { + "type": "string", + "description": "Platform indicates the target platform for the package (e.g., \"any\", \"linux\", \"win32\")." + }, + "files": { + "items": { + "$ref": "#/$defs/PythonFileRecord" + }, + "type": "array", + "description": "Files are the installed files listed in the RECORD file for wheels or installed-files.txt for eggs." + }, + "sitePackagesRootPath": { + "type": "string", + "description": "SitePackagesRootPath is the root directory path containing the package (e.g., \"/usr/lib/python3.9/site-packages\")." + }, + "topLevelPackages": { + "items": { + "type": "string" + }, + "type": "array", + "description": "TopLevelPackages are the top-level Python module names from top_level.txt file." + }, + "directUrlOrigin": { + "$ref": "#/$defs/PythonDirectURLOriginInfo", + "description": "DirectURLOrigin contains VCS or direct URL installation information from direct_url.json." + }, + "requiresPython": { + "type": "string", + "description": "RequiresPython specifies the Python version requirement (e.g., \"\u003e=3.6\")." + }, + "requiresDist": { + "items": { + "type": "string" + }, + "type": "array", + "description": "RequiresDist lists the package dependencies with version specifiers from Requires-Dist fields." + }, + "providesExtra": { + "items": { + "type": "string" + }, + "type": "array", + "description": "ProvidesExtra lists optional feature names that can be installed via extras (e.g., \"dev\", \"test\")." + } + }, + "type": "object", + "required": [ + "name", + "version", + "author", + "authorEmail", + "platform", + "sitePackagesRootPath" + ], + "description": "PythonPackage represents all captured data for a python egg or wheel package (specifically as outlined in the PyPA core metadata specification https://packaging.python.org/en/latest/specifications/core-metadata/)." + }, + "PythonPdmFileEntry": { + "properties": { + "url": { + "type": "string", + "description": "URL is the file download URL" + }, + "digest": { + "$ref": "#/$defs/PythonFileDigest", + "description": "Digest is the hash digest of the file hosted at the URL" + } + }, + "type": "object", + "required": [ + "url", + "digest" + ] + }, + "PythonPdmLockEntry": { + "properties": { + "summary": { + "type": "string", + "description": "Summary provides a description of the package" + }, + "files": { + "items": { + "$ref": "#/$defs/PythonPdmFileEntry" + }, + "type": "array", + "description": "Files are the package files with their paths and hash digests (for the base package without extras)" + }, + "marker": { + "type": "string", + "description": "Marker is the \"environment\" --conditional expressions that determine whether a package should be installed based on the runtime environment" + }, + "requiresPython": { + "type": "string", + "description": "RequiresPython specifies the Python version requirement (e.g., \"\u003e=3.6\")." + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Dependencies are the dependency specifications for the base package (without extras)" + }, + "extras": { + "items": { + "$ref": "#/$defs/PythonPdmLockExtraVariant" + }, + "type": "array", + "description": "Extras contains variants for different extras combinations (PDM may have multiple entries per package)" + } + }, + "type": "object", + "required": [ + "summary", + "files" + ], + "description": "PythonPdmLockEntry represents a single package entry within a pdm.lock file." + }, + "PythonPdmLockExtraVariant": { + "properties": { + "extras": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Extras are the optional extras enabled for this variant (e.g., [\"toml\"], [\"dev\"], or [\"toml\", \"dev\"])" + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Dependencies are the dependencies specific to this extras variant" + }, + "files": { + "items": { + "$ref": "#/$defs/PythonPdmFileEntry" + }, + "type": "array", + "description": "Files are the package files specific to this variant (only populated if different from base)" + }, + "marker": { + "type": "string", + "description": "Marker is the environment conditional expression for this variant (e.g., \"python_version \u003c \\\"3.11\\\"\")" + } + }, + "type": "object", + "required": [ + "extras" + ], + "description": "PythonPdmLockExtraVariant represents a specific extras combination variant within a PDM lock file." + }, + "PythonPipRequirementsEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name from the requirements file." + }, + "extras": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Extras are the optional features to install from the package (e.g., package[dev,test])." + }, + "versionConstraint": { + "type": "string", + "description": "VersionConstraint specifies version requirements (e.g., \"\u003e=1.0,\u003c2.0\")." + }, + "url": { + "type": "string", + "description": "URL is the direct download URL or VCS URL if specified instead of a PyPI package." + }, + "markers": { + "type": "string", + "description": "Markers are environment marker expressions for conditional installation (e.g., \"python_version \u003e= '3.8'\")." + } + }, + "type": "object", + "required": [ + "name", + "versionConstraint" + ], + "description": "PythonRequirementsEntry represents a single entry within a [*-]requirements.txt file." + }, + "PythonPipfileLockEntry": { + "properties": { + "hashes": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Hashes are the package file hash values in the format \"algorithm:digest\" for integrity verification." + }, + "index": { + "type": "string", + "description": "Index is the PyPI index name where the package should be fetched from." + } + }, + "type": "object", + "required": [ + "hashes", + "index" + ], + "description": "PythonPipfileLockEntry represents a single package entry within a Pipfile.lock file." + }, + "PythonPoetryLockDependencyEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the dependency package name." + }, + "version": { + "type": "string", + "description": "Version is the locked version or version constraint for the dependency." + }, + "optional": { + "type": "boolean", + "description": "Optional indicates whether this dependency is optional (only needed for certain extras)." + }, + "markers": { + "type": "string", + "description": "Markers are environment marker expressions that conditionally enable the dependency (e.g., \"python_version \u003e= '3.8'\")." + }, + "extras": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Extras are the optional feature names from the dependency that should be installed." + } + }, + "type": "object", + "required": [ + "name", + "version", + "optional" + ], + "description": "PythonPoetryLockDependencyEntry represents a single dependency entry within a Poetry lock file." + }, + "PythonPoetryLockEntry": { + "properties": { + "index": { + "type": "string", + "description": "Index is the package repository name where the package should be fetched from." + }, + "dependencies": { + "items": { + "$ref": "#/$defs/PythonPoetryLockDependencyEntry" + }, + "type": "array", + "description": "Dependencies are the package's runtime dependencies with version constraints." + }, + "extras": { + "items": { + "$ref": "#/$defs/PythonPoetryLockExtraEntry" + }, + "type": "array", + "description": "Extras are optional feature groups that include additional dependencies." + } + }, + "type": "object", + "required": [ + "index", + "dependencies" + ], + "description": "PythonPoetryLockEntry represents a single package entry within a Pipfile.lock file." + }, + "PythonPoetryLockExtraEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the optional feature name (e.g., \"dev\", \"test\")." + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Dependencies are the package names required when this extra is installed." + } + }, + "type": "object", + "required": [ + "name", + "dependencies" + ], + "description": "PythonPoetryLockExtraEntry represents an optional feature group in a Poetry lock file." + }, + "PythonUvLockDependencyEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the dependency package name." + }, + "optional": { + "type": "boolean", + "description": "Optional indicates whether this dependency is optional (only needed for certain extras)." + }, + "markers": { + "type": "string", + "description": "Markers are environment marker expressions that conditionally enable the dependency (e.g., \"python_version \u003e= '3.8'\")." + }, + "extras": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Extras are the optional feature names from the dependency that should be installed." + } + }, + "type": "object", + "required": [ + "name", + "optional" + ], + "description": "PythonUvLockDependencyEntry represents a single dependency entry within a uv lock file." + }, + "PythonUvLockEntry": { + "properties": { + "index": { + "type": "string", + "description": "Index is the package repository name where the package should be fetched from." + }, + "dependencies": { + "items": { + "$ref": "#/$defs/PythonUvLockDependencyEntry" + }, + "type": "array", + "description": "Dependencies are the package's runtime dependencies with version constraints." + }, + "extras": { + "items": { + "$ref": "#/$defs/PythonUvLockExtraEntry" + }, + "type": "array", + "description": "Extras are optional feature groups that include additional dependencies." + } + }, + "type": "object", + "required": [ + "index", + "dependencies" + ], + "description": "PythonUvLockEntry represents a single package entry within a uv.lock file." + }, + "PythonUvLockExtraEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the optional feature name (e.g., \"dev\", \"test\")." + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Dependencies are the package names required when this extra is installed." + } + }, + "type": "object", + "required": [ + "name", + "dependencies" + ], + "description": "PythonUvLockExtraEntry represents an optional feature group in a uv lock file." + }, + "RDescription": { + "properties": { + "title": { + "type": "string", + "description": "Title is short one-line package title" + }, + "description": { + "type": "string", + "description": "Description is detailed package description" + }, + "author": { + "type": "string", + "description": "Author is package author(s)" + }, + "maintainer": { + "type": "string", + "description": "Maintainer is current package maintainer" + }, + "url": { + "items": { + "type": "string" + }, + "type": "array", + "description": "URL is the list of related URLs" + }, + "repository": { + "type": "string", + "description": "Repository is CRAN or other repository name" + }, + "built": { + "type": "string", + "description": "Built is R version and platform this was built with" + }, + "needsCompilation": { + "type": "boolean", + "description": "NeedsCompilation is whether this package requires compilation" + }, + "imports": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Imports are the packages imported in the NAMESPACE" + }, + "depends": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Depends are the packages this package depends on" + }, + "suggests": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Suggests are the optional packages that extend functionality" + } + }, + "type": "object", + "description": "RDescription represents metadata from an R package DESCRIPTION file containing package information, dependencies, and author details." + }, + "Relationship": { + "properties": { + "parent": { + "type": "string", + "description": "Parent is the ID of the parent artifact in this relationship." + }, + "child": { + "type": "string", + "description": "Child is the ID of the child artifact in this relationship." + }, + "type": { + "type": "string", + "description": "Type is the relationship type (e.g., \"contains\", \"dependency-of\", \"ancestor-of\")." + }, + "metadata": { + "description": "Metadata contains additional relationship-specific metadata." + } + }, + "type": "object", + "required": [ + "parent", + "child", + "type" + ], + "description": "Relationship represents a directed relationship between two artifacts in the SBOM, such as package-contains-file or package-depends-on-package." + }, + "RpmArchive": { + "properties": { + "name": { + "type": "string", + "description": "Name is the RPM package name as found in the RPM database." + }, + "version": { + "type": "string", + "description": "Version is the upstream version of the package." + }, + "epoch": { + "oneOf": [ + { + "type": "integer", + "description": "Epoch is the version epoch used to force upgrade ordering (null if not set)." + }, + { + "type": "null" + } + ] + }, + "architecture": { + "type": "string", + "description": "Arch is the target CPU architecture (e.g., \"x86_64\", \"aarch64\", \"noarch\")." + }, + "release": { + "type": "string", + "description": "Release is the package release number or distribution-specific version suffix." + }, + "sourceRpm": { + "type": "string", + "description": "SourceRpm is the source RPM filename that was used to build this package." + }, + "signatures": { + "items": { + "$ref": "#/$defs/RpmSignature" + }, + "type": "array", + "description": "Signatures contains GPG signature metadata for package verification." + }, + "size": { + "type": "integer", + "description": "Size is the total installed size of the package in bytes." + }, + "vendor": { + "type": "string", + "description": "Vendor is the organization that packaged the software." + }, + "modularityLabel": { + "type": "string", + "description": "ModularityLabel identifies the module stream for modular RPM packages (e.g., \"nodejs:12:20200101\")." + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Provides lists the virtual packages and capabilities this package provides." + }, + "requires": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Requires lists the dependencies required by this package." + }, + "files": { + "items": { + "$ref": "#/$defs/RpmFileRecord" + }, + "type": "array", + "description": "Files are the file records for all files owned by this package." + } + }, + "type": "object", + "required": [ + "name", + "version", + "epoch", + "architecture", + "release", + "sourceRpm", + "size", + "vendor", + "files" + ], + "description": "RpmArchive represents package metadata extracted directly from a .rpm archive file, containing the same information as an RPM database entry." + }, + "RpmDbEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is the RPM package name as found in the RPM database." + }, + "version": { + "type": "string", + "description": "Version is the upstream version of the package." + }, + "epoch": { + "oneOf": [ + { + "type": "integer", + "description": "Epoch is the version epoch used to force upgrade ordering (null if not set)." + }, + { + "type": "null" + } + ] + }, + "architecture": { + "type": "string", + "description": "Arch is the target CPU architecture (e.g., \"x86_64\", \"aarch64\", \"noarch\")." + }, + "release": { + "type": "string", + "description": "Release is the package release number or distribution-specific version suffix." + }, + "sourceRpm": { + "type": "string", + "description": "SourceRpm is the source RPM filename that was used to build this package." + }, + "signatures": { + "items": { + "$ref": "#/$defs/RpmSignature" + }, + "type": "array", + "description": "Signatures contains GPG signature metadata for package verification." + }, + "size": { + "type": "integer", + "description": "Size is the total installed size of the package in bytes." + }, + "vendor": { + "type": "string", + "description": "Vendor is the organization that packaged the software." + }, + "modularityLabel": { + "type": "string", + "description": "ModularityLabel identifies the module stream for modular RPM packages (e.g., \"nodejs:12:20200101\")." + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Provides lists the virtual packages and capabilities this package provides." + }, + "requires": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Requires lists the dependencies required by this package." + }, + "files": { + "items": { + "$ref": "#/$defs/RpmFileRecord" + }, + "type": "array", + "description": "Files are the file records for all files owned by this package." + } + }, + "type": "object", + "required": [ + "name", + "version", + "epoch", + "architecture", + "release", + "sourceRpm", + "size", + "vendor", + "files" + ], + "description": "RpmDBEntry represents all captured data from a RPM DB package entry." + }, + "RpmFileRecord": { + "properties": { + "path": { + "type": "string", + "description": "Path is the absolute file path where the file is installed." + }, + "mode": { + "type": "integer", + "description": "Mode is the file permission mode bits following Unix stat.h conventions." + }, + "size": { + "type": "integer", + "description": "Size is the file size in bytes." + }, + "digest": { + "$ref": "#/$defs/Digest", + "description": "Digest contains the hash algorithm and value for file integrity verification." + }, + "userName": { + "type": "string", + "description": "UserName is the owner username for the file." + }, + "groupName": { + "type": "string", + "description": "GroupName is the group name for the file." + }, + "flags": { + "type": "string", + "description": "Flags indicates the file type (e.g., \"%config\", \"%doc\", \"%ghost\")." + } + }, + "type": "object", + "required": [ + "path", + "mode", + "size", + "digest", + "userName", + "groupName", + "flags" + ], + "description": "RpmFileRecord represents the file metadata for a single file attributed to a RPM package." + }, + "RpmSignature": { + "properties": { + "algo": { + "type": "string", + "description": "PublicKeyAlgorithm is the public key algorithm used for signing (e.g., \"RSA\")." + }, + "hash": { + "type": "string", + "description": "HashAlgorithm is the hash algorithm used for the signature (e.g., \"SHA256\")." + }, + "created": { + "type": "string", + "description": "Created is the timestamp when the signature was created." + }, + "issuer": { + "type": "string", + "description": "IssuerKeyID is the GPG key ID that created the signature." + } + }, + "type": "object", + "required": [ + "algo", + "hash", + "created", + "issuer" + ], + "description": "RpmSignature represents a GPG signature for an RPM package used for authenticity verification." + }, + "RubyGemspec": { + "properties": { + "name": { + "type": "string", + "description": "Name is gem name as specified in the gemspec" + }, + "version": { + "type": "string", + "description": "Version is gem version as specified in the gemspec" + }, + "files": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Files is logical list of files in the gem (NOT directly usable as filesystem paths. Example: bundler gem lists \"lib/bundler/vendor/uri/lib/uri/ldap.rb\" but actual path is \"/usr/local/lib/ruby/3.2.0/bundler/vendor/uri/lib/uri/ldap.rb\". Would need gem installation path, ruby version, and env vars like GEM_HOME to resolve actual paths.)" + }, + "authors": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Authors are the list of gem authors (stored as array regardless of using `author` or `authors` method in gemspec)" + }, + "homepage": { + "type": "string", + "description": "Homepage is project homepage URL" + } + }, + "type": "object", + "required": [ + "name", + "version" + ], + "description": "RubyGemspec represents all metadata parsed from the *.gemspec file" + }, + "RustCargoAuditEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is crate name as specified in audit section of the build binary" + }, + "version": { + "type": "string", + "description": "Version is crate version as specified in audit section of the build binary" + }, + "source": { + "type": "string", + "description": "Source is the source registry or repository where this crate came from" + } + }, + "type": "object", + "required": [ + "name", + "version", + "source" + ], + "description": "RustBinaryAuditEntry represents Rust crate metadata extracted from a compiled binary using cargo-auditable format." + }, + "RustCargoLockEntry": { + "properties": { + "name": { + "type": "string", + "description": "Name is crate name as specified in Cargo.toml" + }, + "version": { + "type": "string", + "description": "Version is crate version as specified in Cargo.toml" + }, + "source": { + "type": "string", + "description": "Source is the source registry or repository URL in format \"registry+https://github.com/rust-lang/crates.io-index\" for registry packages" + }, + "checksum": { + "type": "string", + "description": "Checksum is content checksum for registry packages only (hexadecimal string). Cargo doesn't require or include checksums for git dependencies. Used to detect MITM attacks by verifying downloaded crate matches lockfile checksum." + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Dependencies are the list of dependencies with version constraints" + } + }, + "type": "object", + "required": [ + "name", + "version", + "source", + "checksum", + "dependencies" + ], + "description": "RustCargoLockEntry represents a locked dependency from a Cargo.lock file with precise version and checksum information." + }, + "Schema": { + "properties": { + "version": { + "type": "string", + "description": "Version is the JSON schema version for this document format." + }, + "url": { + "type": "string", + "description": "URL is the URL to the JSON schema definition document." + } + }, + "type": "object", + "required": [ + "version", + "url" + ], + "description": "Schema specifies the JSON schema version and URL reference that defines the structure and validation rules for this document format." + }, + "SnapEntry": { + "properties": { + "snapType": { + "type": "string", + "description": "SnapType indicates the snap type (base, kernel, app, gadget, or snapd)." + }, + "base": { + "type": "string", + "description": "Base is the base snap name that this snap depends on (e.g., \"core20\", \"core22\")." + }, + "snapName": { + "type": "string", + "description": "SnapName is the snap package name." + }, + "snapVersion": { + "type": "string", + "description": "SnapVersion is the snap package version." + }, + "architecture": { + "type": "string", + "description": "Architecture is the target CPU architecture (e.g., \"amd64\", \"arm64\")." + } + }, + "type": "object", + "required": [ + "snapType", + "base", + "snapName", + "snapVersion", + "architecture" + ], + "description": "SnapEntry represents metadata for a Snap package extracted from snap.yaml or snapcraft.yaml files." + }, + "Source": { + "properties": { + "id": { + "type": "string", + "description": "ID is a unique identifier for the analyzed source artifact." + }, + "name": { + "type": "string", + "description": "Name is the name of the analyzed artifact (e.g., image name, directory path)." + }, + "version": { + "type": "string", + "description": "Version is the version of the analyzed artifact (e.g., image tag)." + }, + "supplier": { + "type": "string", + "description": "Supplier is supplier information, which can be user-provided for NTIA minimum elements compliance." + }, + "type": { + "type": "string", + "description": "Type is the source type (e.g., \"image\", \"directory\", \"file\")." + }, + "metadata": { + "description": "Metadata contains additional source-specific metadata." + } + }, + "type": "object", + "required": [ + "id", + "name", + "version", + "type", + "metadata" + ], + "description": "Source represents the artifact that was analyzed to generate this SBOM, such as a container image, directory, or file archive." + }, + "SwiftPackageManagerLockEntry": { + "properties": { + "revision": { + "type": "string", + "description": "Revision is git commit hash of the resolved package" + } + }, + "type": "object", + "required": [ + "revision" + ], + "description": "SwiftPackageManagerResolvedEntry represents a resolved dependency from a Package.resolved file with its locked version and source location." + }, + "SwiplpackPackage": { + "properties": { + "name": { + "type": "string", + "description": "Name is the package name as found in the .toml file" + }, + "version": { + "type": "string", + "description": "Version is the package version as found in the .toml file" + }, + "author": { + "type": "string", + "description": "Author is author name" + }, + "authorEmail": { + "type": "string", + "description": "AuthorEmail is author email address" + }, + "packager": { + "type": "string", + "description": "Packager is packager name (if different from author)" + }, + "packagerEmail": { + "type": "string", + "description": "PackagerEmail is packager email address" + }, + "homepage": { + "type": "string", + "description": "Homepage is project homepage URL" + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Dependencies are the list of required dependencies" + } + }, + "type": "object", + "required": [ + "name", + "version", + "author", + "authorEmail", + "packager", + "packagerEmail", + "homepage", + "dependencies" + ], + "description": "SwiplPackEntry represents a SWI-Prolog package from the pack system with metadata about the package and its dependencies." + }, + "TerraformLockProviderEntry": { + "properties": { + "url": { + "type": "string", + "description": "URL is the provider source address (e.g., \"registry.terraform.io/hashicorp/aws\")." + }, + "constraints": { + "type": "string", + "description": "Constraints specifies the version constraints for the provider (e.g., \"~\u003e 4.0\")." + }, + "version": { + "type": "string", + "description": "Version is the locked provider version selected during terraform init." + }, + "hashes": { + "items": { + "type": "string" + }, + "type": "array", + "description": "Hashes are cryptographic checksums for the provider plugin archives across different platforms." + } + }, + "type": "object", + "required": [ + "url", + "constraints", + "version", + "hashes" + ], + "description": "TerraformLockProviderEntry represents a single provider entry in a Terraform dependency lock file (.terraform.lock.hcl)." + }, + "Toolchain": { + "properties": { + "name": { + "type": "string", + "description": "Name is the name of the toolchain (e.g., \"gcc\", \"clang\", \"ld\", etc.)." + }, + "version": { + "type": "string", + "description": "Version is the version of the toolchain." + }, + "kind": { + "type": "string", + "description": "Kind indicates the type of toolchain (e.g., compiler, linker, runtime)." + } + }, + "type": "object", + "required": [ + "name", + "kind" + ] + }, + "WordpressPluginEntry": { + "properties": { + "pluginInstallDirectory": { + "type": "string", + "description": "PluginInstallDirectory is directory name where the plugin is installed" + }, + "author": { + "type": "string", + "description": "Author is plugin author name" + }, + "authorUri": { + "type": "string", + "description": "AuthorURI is author's website URL" + } + }, + "type": "object", + "required": [ + "pluginInstallDirectory" + ], + "description": "WordpressPluginEntry represents all metadata parsed from the wordpress plugin file" + }, + "cpes": { + "items": { + "$ref": "#/$defs/CPE" + }, + "type": "array" + }, + "licenses": { + "items": { + "$ref": "#/$defs/License" + }, + "type": "array" + } + } +} diff --git a/schema/json/schema-latest.json b/schema/json/schema-latest.json index 9e83026ee..be086ef18 100644 --- a/schema/json/schema-latest.json +++ b/schema/json/schema-latest.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "anchore.io/schema/syft/json/16.1.2/document", + "$id": "anchore.io/schema/syft/json/16.1.3/document", "$ref": "#/$defs/Document", "$defs": { "AlpmDbEntry": { @@ -853,6 +853,10 @@ "type": "string", "description": "HashPath is the relative path to the .nupkg.sha512 hash file (e.g. \"app.metrics.3.0.0.nupkg.sha512\")" }, + "type": { + "type": "string", + "description": "Type is type of entry could be package or project for internal refs" + }, "executables": { "patternProperties": { ".*": { diff --git a/syft/pkg/cataloger/dotnet/cataloger_test.go b/syft/pkg/cataloger/dotnet/cataloger_test.go index 8b333bc8b..3db3122ef 100644 --- a/syft/pkg/cataloger/dotnet/cataloger_test.go +++ b/syft/pkg/cataloger/dotnet/cataloger_test.go @@ -516,6 +516,7 @@ func TestCataloger(t *testing.T) { Path: "newtonsoft.json/13.0.3", Sha512: "sha512-HrC5BXdl00IP9zeV+0Z848QWPAoCr9P3bDEZguI+gkLcBKAOxix/tLEAAHC+UvDNPv4a2d18lOReHMOagPa+zQ==", HashPath: "newtonsoft.json.13.0.3.nupkg.sha512", + Type: "package", Executables: nil, // important! }, } @@ -550,6 +551,7 @@ func TestCataloger(t *testing.T) { Path: "newtonsoft.json/13.0.3", Sha512: "sha512-HrC5BXdl00IP9zeV+0Z848QWPAoCr9P3bDEZguI+gkLcBKAOxix/tLEAAHC+UvDNPv4a2d18lOReHMOagPa+zQ==", HashPath: "newtonsoft.json.13.0.3.nupkg.sha512", + Type: "package", Executables: nil, // important! }, } @@ -605,6 +607,7 @@ func TestCataloger(t *testing.T) { Metadata: pkg.DotnetDepsEntry{ Name: "dotnetapp", Version: "1.0.0", + Type: "project", // note: the main package does not have a hash/path/etc Executables: map[string]pkg.DotnetPortableExecutableEntry{ "dotnetapp.dll": { @@ -1387,6 +1390,7 @@ func TestParseDotnetDeps(t *testing.T) { Metadata: pkg.DotnetDepsEntry{ Name: "TestLibrary", Version: "1.0.0", + Type: "project", }, } testCommon := pkg.Package{ @@ -1399,6 +1403,7 @@ func TestParseDotnetDeps(t *testing.T) { Metadata: pkg.DotnetDepsEntry{ Name: "TestCommon", Version: "1.0.0", + Type: "project", }, } awssdkcore := pkg.Package{ @@ -1414,6 +1419,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-kHBB+QmosVaG6DpngXQ8OlLVVNMzltNITfsRr68Z90qO7dSqJ2EHNd8dtBU1u3AQQLqqFHOY0lfmbpexeH6Pew==", Path: "awssdk.core/3.7.10.6", HashPath: "awssdk.core.3.7.10.6.nupkg.sha512", + Type: "package", }, } msftDependencyInjectionAbstractions := pkg.Package{ @@ -1429,6 +1435,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-xlzi2IYREJH3/m6+lUrQlujzX8wDitm4QGnUu6kUXTQAWPuZY8i+ticFJbzfqaetLA6KR/rO6Ew/HuYD+bxifg==", Path: "microsoft.extensions.dependencyinjection.abstractions/6.0.0", HashPath: "microsoft.extensions.dependencyinjection.abstractions.6.0.0.nupkg.sha512", + Type: "package", }, } msftDependencyInjection := pkg.Package{ @@ -1444,6 +1451,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-k6PWQMuoBDGGHOQTtyois2u4AwyVcIwL2LaSLlTZQm2CYcJ1pxbt6jfAnpWmzENA/wfrYRI/X9DTLoUkE4AsLw==", Path: "microsoft.extensions.dependencyinjection/6.0.0", HashPath: "microsoft.extensions.dependencyinjection.6.0.0.nupkg.sha512", + Type: "package", }, } msftLoggingAbstractions := pkg.Package{ @@ -1459,6 +1467,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-/HggWBbTwy8TgebGSX5DBZ24ndhzi93sHUBDvP1IxbZD7FDokYzdAr6+vbWGjw2XAfR2EJ1sfKUotpjHnFWPxA==", Path: "microsoft.extensions.logging.abstractions/6.0.0", HashPath: "microsoft.extensions.logging.abstractions.6.0.0.nupkg.sha512", + Type: "package", }, } msftExtensionsLogging := pkg.Package{ @@ -1474,6 +1483,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-eIbyj40QDg1NDz0HBW0S5f3wrLVnKWnDJ/JtZ+yJDFnDj90VoPuoPmFkeaXrtu+0cKm5GRAwoDf+dBWXK0TUdg==", Path: "microsoft.extensions.logging/6.0.0", HashPath: "microsoft.extensions.logging.6.0.0.nupkg.sha512", + Type: "package", }, } msftExtensionsOptions := pkg.Package{ @@ -1489,6 +1499,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-dzXN0+V1AyjOe2xcJ86Qbo233KHuLEY0njf/P2Kw8SfJU+d45HNS2ctJdnEnrWbM9Ye2eFgaC5Mj9otRMU6IsQ==", Path: "microsoft.extensions.options/6.0.0", HashPath: "microsoft.extensions.options.6.0.0.nupkg.sha512", + Type: "package", }, } msftExtensionsPrimitives := pkg.Package{ @@ -1504,6 +1515,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-9+PnzmQFfEFNR9J2aDTfJGGupShHjOuGw4VUv+JB044biSHrnmCIMD+mJHmb2H7YryrfBEXDurxQ47gJZdCKNQ==", Path: "microsoft.extensions.primitives/6.0.0", HashPath: "microsoft.extensions.primitives.6.0.0.nupkg.sha512", + Type: "package", }, } newtonsoftJson := pkg.Package{ @@ -1519,6 +1531,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-ppPFpBcvxdsfUonNcvITKqLl3bqxWbDCZIzDWHzjpdAHRFfZe0Dw9HmA0+za13IdyrgJwpkDTDA9fHaxOrt20A==", Path: "newtonsoft.json/13.0.1", HashPath: "newtonsoft.json.13.0.1.nupkg.sha512", + Type: "package", }, } serilogSinksConsole := pkg.Package{ @@ -1534,6 +1547,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-apLOvSJQLlIbKlbx+Y2UDHSP05kJsV7mou+fvJoRGs/iR+jC22r8cuFVMjjfVxz/AD4B2UCltFhE1naRLXwKNw==", Path: "serilog.sinks.console/4.0.1", HashPath: "serilog.sinks.console.4.0.1.nupkg.sha512", + Type: "package", }, } serilog := pkg.Package{ @@ -1549,6 +1563,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-+QX0hmf37a0/OZLxM3wL7V6/ADvC1XihXN4Kq/p6d8lCPfgkRdiuhbWlMaFjR9Av0dy5F0+MBeDmDdRZN/YwQA==", Path: "serilog/2.10.0", HashPath: "serilog.2.10.0.nupkg.sha512", + Type: "package", }, } systemDiagnosticsDiagnosticsource := pkg.Package{ @@ -1564,6 +1579,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-frQDfv0rl209cKm1lnwTgFPzNigy2EKk1BS3uAvHvlBVKe5cymGyHO+Sj+NLv5VF/AhHsqPIUUwya5oV4CHMUw==", Path: "system.diagnostics.diagnosticsource/6.0.0", HashPath: "system.diagnostics.diagnosticsource.6.0.0.nupkg.sha512", + Type: "package", }, } systemRuntimeCompilerServicesUnsafe := pkg.Package{ @@ -1579,6 +1595,7 @@ func TestParseDotnetDeps(t *testing.T) { Sha512: "sha512-/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg==", Path: "system.runtime.compilerservices.unsafe/6.0.0", HashPath: "system.runtime.compilerservices.unsafe.6.0.0.nupkg.sha512", + Type: "package", }} expectedPkgs := []pkg.Package{ diff --git a/syft/pkg/cataloger/dotnet/config.go b/syft/pkg/cataloger/dotnet/config.go index b959747d9..074eaa289 100644 --- a/syft/pkg/cataloger/dotnet/config.go +++ b/syft/pkg/cataloger/dotnet/config.go @@ -18,6 +18,11 @@ type CatalogerConfig struct { // and, if found (and this config option is enabled), will relax the DepPackagesMustClaimDLL value to `false` only in those cases. // app-config: dotnet.relax-dll-claims-when-bundling-detected RelaxDLLClaimsWhenBundlingDetected bool `mapstructure:"relax-dll-claims-when-bundling-detected" json:"relax-dll-claims-when-bundling-detected" yaml:"relax-dll-claims-when-bundling-detected"` + + // ExcludeProjectReferences excludes packages with type "project" from deps.json output. + // These are internal project references, not external NuGet packages. + // app-config: dotnet.exclude-project-references + ExcludeProjectReferences bool `mapstructure:"exclude-project-references" json:"exclude-project-references" yaml:"exclude-project-references"` } func (c CatalogerConfig) WithDepPackagesMustHaveDLL(requireDlls bool) CatalogerConfig { @@ -40,11 +45,17 @@ func (c CatalogerConfig) WithPropagateDLLClaimsToParents(propagate bool) Catalog return c } +func (c CatalogerConfig) WithExcludeProjectReferences(exclude bool) CatalogerConfig { + c.ExcludeProjectReferences = exclude + return c +} + func DefaultCatalogerConfig() CatalogerConfig { return CatalogerConfig{ DepPackagesMustHaveDLL: false, DepPackagesMustClaimDLL: true, PropagateDLLClaimsToParents: true, RelaxDLLClaimsWhenBundlingDetected: true, + ExcludeProjectReferences: true, } } diff --git a/syft/pkg/cataloger/dotnet/deps_binary_cataloger.go b/syft/pkg/cataloger/dotnet/deps_binary_cataloger.go index b26be3cfd..5fcf3c588 100644 --- a/syft/pkg/cataloger/dotnet/deps_binary_cataloger.go +++ b/syft/pkg/cataloger/dotnet/deps_binary_cataloger.go @@ -291,6 +291,12 @@ func packagesFromLogicalDepsJSON(doc logicalDepsJSON, config CatalogerConfig) (* continue } lp := doc.PackagesByNameVersion[nameVersion] + + if config.ExcludeProjectReferences && lp.Library != nil && lp.Library.Type == "project" { + skippedDepPkgs[nameVersion] = lp + continue + } + if config.DepPackagesMustHaveDLL && !lp.FoundDLLs(config.PropagateDLLClaimsToParents) { // could not find a paired DLL and the user required this... skippedDepPkgs[nameVersion] = lp diff --git a/syft/pkg/cataloger/dotnet/package.go b/syft/pkg/cataloger/dotnet/package.go index 7a463d419..6363535a7 100644 --- a/syft/pkg/cataloger/dotnet/package.go +++ b/syft/pkg/cataloger/dotnet/package.go @@ -132,12 +132,13 @@ func newDotnetDepsEntry(lp logicalDepsJSONPackage) pkg.DotnetDepsEntry { } } - var path, sha, hashPath string + var path, sha, hashPath, libType string lib := lp.Library if lib != nil { path = lib.Path sha = lib.Sha512 hashPath = lib.HashPath + libType = lib.Type } return pkg.DotnetDepsEntry{ @@ -146,6 +147,7 @@ func newDotnetDepsEntry(lp logicalDepsJSONPackage) pkg.DotnetDepsEntry { Path: path, Sha512: sha, HashPath: hashPath, + Type: libType, Executables: pes, } } @@ -184,6 +186,12 @@ func getDepsJSONFilePrefix(p string) string { if len(match) > 1 { return match[1] } + + r = regexp.MustCompile(`([^\\\/]+)\.exe$`) + match = r.FindStringSubmatch(p) + if len(match) > 1 { + return match[1] + } return "" } diff --git a/syft/pkg/dotnet.go b/syft/pkg/dotnet.go index 10e7851c8..9ce12fed7 100644 --- a/syft/pkg/dotnet.go +++ b/syft/pkg/dotnet.go @@ -17,6 +17,9 @@ type DotnetDepsEntry struct { // HashPath is the relative path to the .nupkg.sha512 hash file (e.g. "app.metrics.3.0.0.nupkg.sha512") HashPath string `mapstructure:"hashPath" json:"hashPath"` + // Type is type of entry could be package or project for internal refs + Type string `mapstructure:"type" json:"type,omitempty"` + // Executables are the map of .NET Portable Executable files within this package with their version resources Executables map[string]DotnetPortableExecutableEntry `json:"executables,omitempty"` } diff --git a/syft/source/snapsource/snap.go b/syft/source/snapsource/snap.go index 930cee375..c32ed86b6 100644 --- a/syft/source/snapsource/snap.go +++ b/syft/source/snapsource/snap.go @@ -7,6 +7,7 @@ import ( "os" "path" "path/filepath" + "strconv" "strings" "github.com/spf13/afero" @@ -31,21 +32,28 @@ type remoteSnap struct { URL string } +const NotSpecifiedRevision int = 0 + type snapIdentity struct { Name string Channel string Architecture string + Revision int } func (s snapIdentity) String() string { parts := []string{s.Name} + // revision will supersede channel + if s.Revision != NotSpecifiedRevision { + parts = append(parts, fmt.Sprintf(":%d", s.Revision)) + } else { + if s.Channel != "" { + parts = append(parts, fmt.Sprintf("@%s", s.Channel)) + } - if s.Channel != "" { - parts = append(parts, fmt.Sprintf("@%s", s.Channel)) - } - - if s.Architecture != "" { - parts = append(parts, fmt.Sprintf(" (%s)", s.Architecture)) + if s.Architecture != "" { + parts = append(parts, fmt.Sprintf(" (%s)", s.Architecture)) + } } return strings.Join(parts, "") @@ -166,17 +174,21 @@ func getSnapFileInfo(ctx context.Context, fs afero.Fs, path string, hashes []cry // The request can be: // - A snap name (e.g., "etcd") // - A snap name with channel (e.g., "etcd@beta" or "etcd@2.3/stable") +// - A snap name with revision (e.g. etcd:249@stable) func resolveRemoteSnap(request, architecture string) (*remoteSnap, error) { if architecture == "" { architecture = defaultArchitecture } - snapName, channel := parseSnapRequest(request) - + snapName, revision, channel, err := parseSnapRequest(request) + if err != nil { + return nil, err + } id := snapIdentity{ Name: snapName, Channel: channel, Architecture: architecture, + Revision: revision, } client := newSnapcraftClient() @@ -194,15 +206,26 @@ func resolveRemoteSnap(request, architecture string) (*remoteSnap, error) { }, nil } -// parseSnapRequest parses a snap request into name and channel +// parseSnapRequest parses a snap request into name and revision/channel // Examples: // - "etcd" -> name="etcd", channel="stable" (default) // - "etcd@beta" -> name="etcd", channel="beta" // - "etcd@2.3/stable" -> name="etcd", channel="2.3/stable" -func parseSnapRequest(request string) (name, channel string) { +// - "etcd:249@2.3/stable" -> name="etcd" revision=249 (channel not working because revision has been assigned) +func parseSnapRequest(request string) (name string, revision int, channel string, err error) { parts := strings.SplitN(request, "@", 2) name = parts[0] + divisions := strings.Split(parts[0], ":") + // handle revision first + if len(divisions) == 2 { + name = divisions[0] + revision, err = strconv.Atoi(divisions[1]) + if err != nil { + return "", NotSpecifiedRevision, "", err + } + return name, revision, "", err + } if len(parts) == 2 { channel = parts[1] } @@ -210,8 +233,7 @@ func parseSnapRequest(request string) (name, channel string) { if channel == "" { channel = defaultChannel } - - return name, channel + return name, NotSpecifiedRevision, channel, err } func downloadSnap(getter intFile.Getter, info *remoteSnap, dest string) error { diff --git a/syft/source/snapsource/snap_test.go b/syft/source/snapsource/snap_test.go index 24f01a42a..2a0dbbe48 100644 --- a/syft/source/snapsource/snap_test.go +++ b/syft/source/snapsource/snap_test.go @@ -508,78 +508,109 @@ func TestDownloadSnap(t *testing.T) { func TestParseSnapRequest(t *testing.T) { tests := []struct { - name string - request string - expectedName string - expectedChannel string + name string + request string + expectedName string + expectedRevision int + expectedChannel string + expectedError require.ErrorAssertionFunc }{ { name: "snap name only - uses default channel", request: "etcd", expectedName: "etcd", expectedChannel: "stable", + expectedError: require.NoError, }, { name: "snap with beta channel", request: "etcd@beta", expectedName: "etcd", expectedChannel: "beta", + expectedError: require.NoError, }, { name: "snap with edge channel", request: "etcd@edge", expectedName: "etcd", expectedChannel: "edge", + expectedError: require.NoError, }, { name: "snap with version track", request: "etcd@2.3/stable", expectedName: "etcd", expectedChannel: "2.3/stable", + expectedError: require.NoError, }, { name: "snap with complex channel path", request: "mysql@8.0/candidate", expectedName: "mysql", expectedChannel: "8.0/candidate", + expectedError: require.NoError, }, { name: "snap with multiple @ symbols - only first is delimiter", request: "app@beta@test", expectedName: "app", expectedChannel: "beta@test", + expectedError: require.NoError, + }, + { + name: "snap with revision", + request: "etcd:249", + expectedName: "etcd", + expectedRevision: 249, + expectedError: require.NoError, + }, + { + name: "snap with revision so the channel doesn't work", + request: "etcd:249@2.3/beta", + expectedName: "etcd", + expectedRevision: 249, + expectedError: require.NoError, }, { name: "empty snap name with channel", request: "@stable", expectedName: "", expectedChannel: "stable", + expectedError: require.NoError, }, { name: "snap name with empty channel - uses default", request: "etcd@", expectedName: "etcd", expectedChannel: "stable", + expectedError: require.NoError, }, { name: "hyphenated snap name", request: "hello-world@stable", expectedName: "hello-world", expectedChannel: "stable", + expectedError: require.NoError, }, { name: "snap name with numbers", request: "app123", expectedName: "app123", expectedChannel: "stable", + expectedError: require.NoError, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - name, channel := parseSnapRequest(tt.request) + name, revision, channel, err := parseSnapRequest(tt.request) assert.Equal(t, tt.expectedName, name) - assert.Equal(t, tt.expectedChannel, channel) + if tt.expectedRevision != NotSpecifiedRevision { + assert.Equal(t, tt.expectedRevision, revision) + } else { + assert.Equal(t, tt.expectedChannel, channel) + } + require.NoError(t, err) }) } } diff --git a/syft/source/snapsource/snapcraft_api.go b/syft/source/snapsource/snapcraft_api.go index 8c00dcbca..0acc82d46 100644 --- a/syft/source/snapsource/snapcraft_api.go +++ b/syft/source/snapsource/snapcraft_api.go @@ -5,6 +5,9 @@ import ( "fmt" "io" "net/http" + "regexp" + "strconv" + "strings" "github.com/anchore/syft/internal/log" ) @@ -58,17 +61,133 @@ type snapFindResponse struct { } `json:"results"` } +type SnapRisk string + +const ( + RiskStable SnapRisk = "stable" + RiskCandidate SnapRisk = "candidate" + RiskBeta SnapRisk = "beta" + RiskEdge SnapRisk = "edge" + RiskUnknown SnapRisk = "unknown" +) + +func isValidSnapRisk(r SnapRisk) bool { + switch r { + case RiskStable, RiskCandidate, RiskBeta, RiskEdge: + return true + default: + return false + } +} + +func stringToSnapRisk(s string) SnapRisk { + r := SnapRisk(s) + if !isValidSnapRisk(r) { + return RiskUnknown + } + return r +} + +func getRevisionFromURL(cm snapChannelMapEntry) (rev int, err error) { + re := regexp.MustCompile(`(\d+)\.snap$`) + match := re.FindStringSubmatch(cm.Download.URL) + if len(match) < 2 { + err = fmt.Errorf("could not determine revision from %s", cm.Download.URL) + return + } + rev, err = strconv.Atoi(match[1]) + return +} + +// isEligibleChannel determines whether a candidate channel satisfies a requested +// channel. Both channels are parsed into {track, risk} pairs. +// +// Matching rules: +// - If the request includes a track, both track and risk must match exactly. +// - If the request omits the track (e.g., "stable"), any candidate track is +// accepted as long as the risk matches. +// +// Examples: +// +// candidate="3.2/stable", request="stable" -> true +// candidate="3.2/stable", request="3.2/stable" -> true +// candidate="3.2/stable", request="3.2/beta" -> false +// candidate="3.2/beta", request="stable" -> false +// candidate="3.2/alpha", request="alpha" -> false(alpha is an invalid risk level) +// candidate="3.2/stable/fix-for-bug123", request="stable" -> true +// candidate="3.2/stable/fix-for-bug123", request="3.2/stable" -> true +func isEligibleChannel(candidate, request string) (bool, error) { + cTrack, cRisk, cBranch := splitChannel(candidate) + rTrack, rRisk, rBranch := splitChannel(request) + if rTrack == "" && rRisk == "" && rBranch == "" { + return false, fmt.Errorf("there is no such risk in the channel(only stable/candidate/beta/edge are valid)") + } + + if rTrack != "" { + return cTrack == rTrack && cRisk == rRisk && (cBranch == rBranch || rBranch == ""), nil + } + + return cRisk == rRisk && (cBranch == rBranch || rBranch == ""), nil +} + +func splitChannel(ch string) (track string, risk string, branch string) { + parts := strings.SplitN(ch, "/", 3) + if stringToSnapRisk(parts[0]) != RiskUnknown { + if len(parts) == 1 { + return "", parts[0], "" // no track + } else if len(parts) == 2 { + return "", parts[0], parts[1] + } + } else if len(parts) >= 2 && stringToSnapRisk(parts[1]) != RiskUnknown { + if len(parts) == 3 { + return parts[0], parts[1], parts[2] + } else if len(parts) == 2 { + return parts[0], parts[1], "" + } + } + + return "", "", "" +} + +func matchSnapDownloadURL(cm snapChannelMapEntry, id snapIdentity) (string, error) { + // revision will supersede channel + if id.Revision != NotSpecifiedRevision { + rev, err2 := getRevisionFromURL(cm) + if err2 == nil && rev == id.Revision { + return cm.Download.URL, nil + } + } else if cm.Channel.Architecture == id.Architecture { + matched, err2 := isEligibleChannel(cm.Channel.Name, id.Channel) + if err2 != nil { + return "", err2 + } + if matched { + return cm.Download.URL, nil + } + } + return "", nil +} + // GetSnapDownloadURL retrieves the download URL for a snap package func (c *snapcraftClient) GetSnapDownloadURL(id snapIdentity) (string, error) { apiURL := c.InfoAPIURL + id.Name - log.WithFields("name", id.Name, "channel", id.Channel, "architecture", id.Architecture).Trace("requesting snap info") + if id.Revision == NotSpecifiedRevision { + log.WithFields("name", id.Name, "channel", id.Channel, "architecture", id.Architecture).Trace("requesting snap info") + } else { + log.WithFields("name", id.Name, "revision", id.Revision, "architecture", id.Architecture).Trace("requesting snap info") + } req, err := http.NewRequest(http.MethodGet, apiURL, nil) if err != nil { return "", fmt.Errorf("failed to create HTTP request: %w", err) } + if id.Revision != NotSpecifiedRevision { + q := req.URL.Query() + q.Add("revision", fmt.Sprintf("%d", id.Revision)) + req.URL.RawQuery = q.Encode() + } req.Header.Set("Snap-Device-Series", defaultSeries) resp, err := c.HTTPClient.Do(req) @@ -107,9 +226,11 @@ func (c *snapcraftClient) GetSnapDownloadURL(id snapIdentity) (string, error) { } for _, cm := range info.ChannelMap { - if cm.Channel.Architecture == id.Architecture && cm.Channel.Name == id.Channel { - return cm.Download.URL, nil + url, err2 := matchSnapDownloadURL(cm, id) + if url == "" && err2 == nil { + continue } + return url, err2 } return "", fmt.Errorf("no matching snap found for %s", id.String()) diff --git a/syft/source/snapsource/snapcraft_api_test.go b/syft/source/snapsource/snapcraft_api_test.go index c13516d12..a9473a88c 100644 --- a/syft/source/snapsource/snapcraft_api_test.go +++ b/syft/source/snapsource/snapcraft_api_test.go @@ -161,6 +161,126 @@ func TestSnapcraftClient_GetSnapDownloadURL(t *testing.T) { expectedURL: "https://api.snapcraft.io/api/v1/snaps/download/etcd_123.snap", expectError: require.NoError, }, + { + name: "successful download URL retrieval (w/ track)", + snapID: snapIdentity{ + Name: "etcd", + Channel: "stable", + Architecture: "amd64", + }, + infoStatusCode: http.StatusOK, + infoResponse: snapcraftInfo{ + ChannelMap: []snapChannelMapEntry{ + { + Channel: snapChannel{ + Architecture: "amd64", + Name: "3.2/stable", + }, + Download: snapDownload{ + URL: "https://api.snapcraft.io/api/v1/snaps/download/etcd_123.snap", + }, + }, + }, + }, + expectedURL: "https://api.snapcraft.io/api/v1/snaps/download/etcd_123.snap", + expectError: require.NoError, + }, + { + name: "successful download URL retrieval (w/ track&branch)", + snapID: snapIdentity{ + Name: "etcd", + Channel: "stable", + Architecture: "amd64", + }, + infoStatusCode: http.StatusOK, + infoResponse: snapcraftInfo{ + ChannelMap: []snapChannelMapEntry{ + { + Channel: snapChannel{ + Architecture: "amd64", + Name: "3.2/stable/fix-for-bug123", + }, + Download: snapDownload{ + URL: "https://api.snapcraft.io/api/v1/snaps/download/etcd_123.snap", + }, + }, + }, + }, + expectedURL: "https://api.snapcraft.io/api/v1/snaps/download/etcd_123.snap", + expectError: require.NoError, + }, + { + name: "branch unmatched", + snapID: snapIdentity{ + Name: "etcd", + Channel: "stable/fix-for-bug124", + Architecture: "amd64", + }, + infoStatusCode: http.StatusOK, + infoResponse: snapcraftInfo{ + ChannelMap: []snapChannelMapEntry{ + { + Channel: snapChannel{ + Architecture: "amd64", + Name: "3.2/stable/fix-for-bug123", + }, + Download: snapDownload{ + URL: "https://api.snapcraft.io/api/v1/snaps/download/etcd_123.snap", + }, + }, + }, + }, + expectError: require.Error, + errorContains: "no matching snap found", + }, + { + name: "risk unmatched", + snapID: snapIdentity{ + Name: "etcd", + Channel: "stable", + Architecture: "amd64", + }, + infoStatusCode: http.StatusOK, + infoResponse: snapcraftInfo{ + ChannelMap: []snapChannelMapEntry{ + { + Channel: snapChannel{ + Architecture: "amd64", + Name: "latest/beta", + }, + Download: snapDownload{ + URL: "https://api.snapcraft.io/api/v1/snaps/download/etcd_123.snap", + }, + }, + }, + }, + expectError: require.Error, + errorContains: "no matching snap found", + }, + { + name: "illegal risk", + snapID: snapIdentity{ + Name: "etcd", + Channel: "foobar", + Architecture: "amd64", + }, + infoStatusCode: http.StatusOK, + infoResponse: snapcraftInfo{ + ChannelMap: []snapChannelMapEntry{ + { + Channel: snapChannel{ + Architecture: "amd64", + Name: "latest/beta", + }, + Download: snapDownload{ + URL: "https://api.snapcraft.io/api/v1/snaps/download/etcd_123.snap", + }, + }, + }, + }, + expectError: require.Error, + errorContains: "there is no such risk", + }, { name: "region-locked snap - exists but unavailable", snapID: snapIdentity{ @@ -351,6 +471,214 @@ func TestSnapcraftClient_GetSnapDownloadURL(t *testing.T) { } } +func TestSnapcraftClient_GetSnapDownloadURL_WithVersion(t *testing.T) { + tests := []struct { + name string + snapID snapIdentity + infoResponse snapcraftInfo + infoStatusCode int + findResponse *snapFindResponse + findStatusCode int + expectedURL string + expectError require.ErrorAssertionFunc + errorContains string + }{ + { + name: "successful download URL retrieval", + snapID: snapIdentity{ + Name: "etcd", + Channel: "stable", + Architecture: "amd64", + Revision: 249, + }, + infoStatusCode: http.StatusOK, + infoResponse: snapcraftInfo{ + ChannelMap: []snapChannelMapEntry{ + { + Channel: snapChannel{ + Architecture: "amd64", + Name: "stable", + }, + Download: snapDownload{ + URL: "https://api.snapcraft.io/api/v1/snaps/download/TKebVGcPeDKoOqAmNmczU2oWLtsojKD5_249.snap", + }, + }, + }, + }, + expectedURL: "https://api.snapcraft.io/api/v1/snaps/download/TKebVGcPeDKoOqAmNmczU2oWLtsojKD5_249.snap", + expectError: require.NoError, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.expectError == nil { + tt.expectError = require.NoError + } + + infoServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, defaultSeries, r.Header.Get("Snap-Device-Series")) + + expectedPath := "/" + tt.snapID.Name + assert.Equal(t, expectedPath, r.URL.Path) + + w.WriteHeader(tt.infoStatusCode) + + if tt.infoStatusCode == http.StatusOK { + responseBytes, err := json.Marshal(tt.infoResponse) + require.NoError(t, err) + w.Write(responseBytes) + } + })) + defer infoServer.Close() + + var findServer *httptest.Server + if tt.findResponse != nil || tt.findStatusCode != 0 { + findServer = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, defaultSeries, r.Header.Get("Snap-Device-Series")) + assert.Equal(t, tt.snapID.Name, r.URL.Query().Get("name-startswith")) + + statusCode := tt.findStatusCode + if statusCode == 0 { + statusCode = http.StatusOK + } + w.WriteHeader(statusCode) + + if tt.findResponse != nil && statusCode == http.StatusOK { + responseBytes, err := json.Marshal(tt.findResponse) + require.NoError(t, err) + w.Write(responseBytes) + } + })) + defer findServer.Close() + } + + client := &snapcraftClient{ + InfoAPIURL: infoServer.URL + "/", + HTTPClient: &http.Client{}, + } + if findServer != nil { + client.FindAPIURL = findServer.URL + } + + url, err := client.GetSnapDownloadURL(tt.snapID) + tt.expectError(t, err) + if err != nil { + if tt.errorContains != "" { + assert.Contains(t, err.Error(), tt.errorContains) + } + return + } + assert.Equal(t, tt.expectedURL, url) + }) + } +} + +func TestSnapcraftClient_GetSnapDownloadURL_DoesntExist(t *testing.T) { + tests := []struct { + name string + snapID snapIdentity + infoResponse snapcraftInfo + infoStatusCode int + findResponse *snapFindResponse + findStatusCode int + expectedURL string + expectError require.ErrorAssertionFunc + errorContains string + }{ + { + name: "non-existent snap with revision", + snapID: snapIdentity{ + Name: "etcd", + Channel: "stable", + Architecture: "amd64", + Revision: 248, + }, + infoStatusCode: http.StatusOK, + infoResponse: snapcraftInfo{ + ChannelMap: []snapChannelMapEntry{ + { + Channel: snapChannel{ + Architecture: "amd64", + Name: "stable", + }, + Download: snapDownload{ + URL: "https://api.snapcraft.io/api/v1/snaps/download/TKebVGcPeDKoOqAmNmczU2oWLtsojKD5_249.snap", + }, + }, + }, + }, + expectedURL: "https://api.snapcraft.io/api/v1/snaps/download/TKebVGcPeDKoOqAmNmczU2oWLtsojKD5_249.snap", + expectError: func(t require.TestingT, err error, msgAndArgs ...interface{}) { + require.EqualError(t, err, "no matching snap found for etcd:248") + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.expectError == nil { + tt.expectError = require.NoError + } + + infoServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, defaultSeries, r.Header.Get("Snap-Device-Series")) + + expectedPath := "/" + tt.snapID.Name + assert.Equal(t, expectedPath, r.URL.Path) + + w.WriteHeader(tt.infoStatusCode) + + if tt.infoStatusCode == http.StatusOK { + responseBytes, err := json.Marshal(tt.infoResponse) + require.NoError(t, err) + w.Write(responseBytes) + } + })) + defer infoServer.Close() + + var findServer *httptest.Server + if tt.findResponse != nil || tt.findStatusCode != 0 { + findServer = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, defaultSeries, r.Header.Get("Snap-Device-Series")) + assert.Equal(t, tt.snapID.Name, r.URL.Query().Get("name-startswith")) + + statusCode := tt.findStatusCode + if statusCode == 0 { + statusCode = http.StatusOK + } + w.WriteHeader(statusCode) + + if tt.findResponse != nil && statusCode == http.StatusOK { + responseBytes, err := json.Marshal(tt.findResponse) + require.NoError(t, err) + w.Write(responseBytes) + } + })) + defer findServer.Close() + } + + client := &snapcraftClient{ + InfoAPIURL: infoServer.URL + "/", + HTTPClient: &http.Client{}, + } + if findServer != nil { + client.FindAPIURL = findServer.URL + } + + url, err := client.GetSnapDownloadURL(tt.snapID) + tt.expectError(t, err) + if err != nil { + if tt.errorContains != "" { + assert.Contains(t, err.Error(), tt.errorContains) + } + return + } + assert.Equal(t, tt.expectedURL, url) + }) + } +} + func TestSnapcraftClient_GetSnapDownloadURL_InvalidJSON(t *testing.T) { infoServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) diff --git a/test/cli/scan_cmd_test.go b/test/cli/scan_cmd_test.go index 133b1d171..197799294 100644 --- a/test/cli/scan_cmd_test.go +++ b/test/cli/scan_cmd_test.go @@ -9,7 +9,7 @@ import ( const ( // this is the number of packages that should be found in the image-pkg-coverage fixture image // when analyzed with the squashed scope. - coverageImageSquashedPackageCount = 43 + coverageImageSquashedPackageCount = 42 ) func TestPackagesCmdFlags(t *testing.T) {