Improve support for cataloging nix package relationships (#3837)

* add nix DB cataloger

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* add derivation path to nix store pkg metadata

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* go mod tidy

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* allow for derivation path to be optional

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* repin build image and disable syscall filtering

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* bump storage capacity

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* track nix derivation details on packages

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* image fixture should have derivation examples

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* address comments

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

---------

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
This commit is contained in:
Alex Goodman 2025-05-05 11:35:13 -04:00 committed by GitHub
parent 7505a04aad
commit d47a6c3a6d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
28 changed files with 5002 additions and 239 deletions

View File

@ -29,7 +29,8 @@ jobs:
Unit-Test:
# Note: changing this job name requires making the same update in the .github/workflows/release.yaml pipeline
name: "Unit tests"
runs-on: ubuntu-24.04
# we need more storage than what's on the default runner
runs-on: ubuntu-22.04-4core-16gb
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2

View File

@ -24,6 +24,7 @@ import (
"github.com/anchore/syft/syft/pkg/cataloger/java"
"github.com/anchore/syft/syft/pkg/cataloger/javascript"
"github.com/anchore/syft/syft/pkg/cataloger/kernel"
"github.com/anchore/syft/syft/pkg/cataloger/nix"
"github.com/anchore/syft/syft/pkg/cataloger/python"
"github.com/anchore/syft/syft/source"
)
@ -48,6 +49,7 @@ type Catalog struct {
Java javaConfig `yaml:"java" json:"java" mapstructure:"java"`
JavaScript javaScriptConfig `yaml:"javascript" json:"javascript" mapstructure:"javascript"`
LinuxKernel linuxKernelConfig `yaml:"linux-kernel" json:"linux-kernel" mapstructure:"linux-kernel"`
Nix nixConfig `yaml:"nix" json:"nix" mapstructure:"nix"`
Python pythonConfig `yaml:"python" json:"python" mapstructure:"python"`
// configuration for the source (the subject being analyzed)
@ -75,6 +77,7 @@ func DefaultCatalog() Catalog {
Package: defaultPackageConfig(),
License: defaultLicenseConfig(),
LinuxKernel: defaultLinuxKernelConfig(),
Nix: defaultNixConfig(),
Dotnet: defaultDotnetConfig(),
Golang: defaultGolangConfig(),
Java: defaultJavaConfig(),
@ -193,6 +196,8 @@ func (cfg Catalog) ToPackagesConfig() pkgcataloging.Config {
LinuxKernel: kernel.LinuxKernelCatalogerConfig{
CatalogModules: cfg.LinuxKernel.CatalogModules,
},
Nix: nix.DefaultConfig().
WithCaptureOwnedFiles(cfg.Nix.CaptureOwnedFiles),
Python: python.CatalogerConfig{
GuessUnpinnedRequirements: cfg.Python.GuessUnpinnedRequirements,
},

View File

@ -0,0 +1,25 @@
package options
import (
"github.com/anchore/clio"
"github.com/anchore/syft/syft/pkg/cataloger/nix"
)
type nixConfig struct {
CaptureOwnedFiles bool `json:"capture-owned-files" yaml:"capture-owned-files" mapstructure:"capture-owned-files"`
}
func defaultNixConfig() nixConfig {
def := nix.DefaultConfig()
return nixConfig{
def.CaptureOwnedFiles,
}
}
var _ interface {
clio.FieldDescriber
} = (*nixConfig)(nil)
func (o *nixConfig) DescribeFields(descriptions clio.FieldDescriptionSet) {
descriptions.Add(&o.CaptureOwnedFiles, `enumerate all files owned by packages found within Nix store paths`)
}

View File

@ -56,6 +56,7 @@ func TestAllPackageCatalogersReachableInTasks(t *testing.T) {
// not reachable since they are deprecated
"dotnet-portable-executable-cataloger",
"dotnet-deps-cataloger",
"nix-store-cataloger",
"php-pecl-serialized-cataloger",
// not reachable by design
"sbom-cataloger",

7
go.mod
View File

@ -19,6 +19,7 @@ require (
github.com/anchore/go-homedir v0.0.0-20250319154043-c29668562e4d
github.com/anchore/go-logger v0.0.0-20250318195838-07ae343dd722
github.com/anchore/go-macholibre v0.0.0-20220308212642-53e6d0aaf6fb
github.com/anchore/go-sync v0.0.0-20250326131806-4eda43a485b6
github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b
github.com/anchore/packageurl-go v0.1.1-0.20250220190351-d62adb6e1115
@ -60,6 +61,7 @@ require (
github.com/magiconair/properties v1.8.10
github.com/mholt/archives v0.1.1
github.com/moby/sys/mountinfo v0.7.2
github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1
github.com/olekukonko/tablewriter v0.0.5
github.com/opencontainers/go-digest v1.0.0
github.com/pelletier/go-toml v1.9.5
@ -175,6 +177,7 @@ require (
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/minio/minlz v1.0.0 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
@ -256,10 +259,6 @@ require (
modernc.org/memory v1.9.1 // indirect
)
require github.com/anchore/go-sync v0.0.0-20250326131806-4eda43a485b6
require github.com/minio/minlz v1.0.0 // indirect
retract (
v0.53.2
v0.53.1 // Published accidentally with incorrect license in depdencies

4
go.sum
View File

@ -654,6 +654,10 @@ github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1 h1:kpt9ZfKcm+EDG4s40hMwE//d5SBgDjUOrITReV2u4aA=
github.com/nix-community/go-nix v0.0.0-20250101154619-4bdde671e0a1/go.mod h1:qgCw4bBKZX8qMgGeEZzGFVT3notl42dBjNqO2jut0M0=
github.com/nsf/jsondiff v0.0.0-20210926074059-1e845ec5d249 h1:NHrXEjTNQY7P0Zfx1aMrNhpgxHmow66XQtm0aQLY0AE=
github.com/nsf/jsondiff v0.0.0-20210926074059-1e845ec5d249/go.mod h1:mpRZBD8SJ55OIICQ3iWH0Yz3cjzA61JdqMLoWXeB2+8=
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
github.com/nwaples/rardecode/v2 v2.1.0 h1:JQl9ZoBPDy+nIZGb1mx8+anfHp/LV3NE2MjMiv0ct/U=

View File

@ -3,5 +3,5 @@ package internal
const (
// JSONSchemaVersion is the current schema version output by the JSON encoder
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
JSONSchemaVersion = "16.0.27"
JSONSchemaVersion = "16.0.28"
)

View File

@ -137,7 +137,12 @@ func DefaultPackageTaskFactories() Factories {
pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, Java, Maven,
),
newSimplePackageTaskFactory(java.NewNativeImageCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, Java),
newSimplePackageTaskFactory(nix.NewStoreCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "nix"),
newPackageTaskFactory(
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
return nix.NewCataloger(cfg.PackagesConfig.Nix)
},
pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "nix",
),
newSimplePackageTaskFactory(lua.NewPackageCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "lua"),
// other package catalogers ///////////////////////////////////////////////////////////////////////////
@ -167,5 +172,7 @@ func DefaultPackageTaskFactories() Factories {
newSimplePackageTaskFactory(dotnet.NewDotnetDepsCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
newSimplePackageTaskFactory(dotnet.NewDotnetPortableExecutableCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
newSimplePackageTaskFactory(php.NewPeclCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
newSimplePackageTaskFactory(nix.NewStoreCataloger, pkgcataloging.DeprecatedTag), // TODO: remove in syft v2.0
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "anchore.io/schema/syft/json/16.0.27/document",
"$id": "anchore.io/schema/syft/json/16.0.28/document",
"$ref": "#/$defs/Document",
"$defs": {
"AlpmDbEntry": {
@ -1660,14 +1660,57 @@
"kb"
]
},
"NixDerivation": {
"properties": {
"path": {
"type": "string"
},
"system": {
"type": "string"
},
"inputDerivations": {
"items": {
"$ref": "#/$defs/NixDerivationReference"
},
"type": "array"
},
"inputSources": {
"items": {
"type": "string"
},
"type": "array"
}
},
"type": "object"
},
"NixDerivationReference": {
"properties": {
"path": {
"type": "string"
},
"outputs": {
"items": {
"type": "string"
},
"type": "array"
}
},
"type": "object"
},
"NixStoreEntry": {
"properties": {
"outputHash": {
"path": {
"type": "string"
},
"output": {
"type": "string"
},
"outputHash": {
"type": "string"
},
"derivation": {
"$ref": "#/$defs/NixDerivation"
},
"files": {
"items": {
"type": "string"
@ -1677,8 +1720,7 @@
},
"type": "object",
"required": [
"outputHash",
"files"
"outputHash"
]
},
"OpamPackage": {

View File

@ -7,6 +7,7 @@ import (
"github.com/anchore/syft/syft/pkg/cataloger/java"
"github.com/anchore/syft/syft/pkg/cataloger/javascript"
"github.com/anchore/syft/syft/pkg/cataloger/kernel"
"github.com/anchore/syft/syft/pkg/cataloger/nix"
"github.com/anchore/syft/syft/pkg/cataloger/python"
)
@ -17,6 +18,7 @@ type Config struct {
JavaArchive java.ArchiveCatalogerConfig `yaml:"java-archive" json:"java-archive" mapstructure:"java-archive"`
JavaScript javascript.CatalogerConfig `yaml:"javascript" json:"javascript" mapstructure:"javascript"`
LinuxKernel kernel.LinuxKernelCatalogerConfig `yaml:"linux-kernel" json:"linux-kernel" mapstructure:"linux-kernel"`
Nix nix.Config `yaml:"nix" json:"nix" mapstructure:"nix"`
Python python.CatalogerConfig `yaml:"python" json:"python" mapstructure:"python"`
}
@ -25,12 +27,18 @@ func DefaultConfig() Config {
Binary: binary.DefaultClassifierCatalogerConfig(),
Dotnet: dotnet.DefaultCatalogerConfig(),
Golang: golang.DefaultCatalogerConfig(),
LinuxKernel: kernel.DefaultLinuxKernelCatalogerConfig(),
Python: python.DefaultCatalogerConfig(),
JavaArchive: java.DefaultArchiveCatalogerConfig(),
LinuxKernel: kernel.DefaultLinuxKernelCatalogerConfig(),
Nix: nix.DefaultConfig(),
Python: python.DefaultCatalogerConfig(),
}
}
func (c Config) WithNixConfig(cfg nix.Config) Config {
c.Nix = cfg
return c
}
func (c Config) WithBinaryConfig(cfg binary.ClassifierCatalogerConfig) Config {
c.Binary = cfg
return c

View File

@ -1,104 +1,57 @@
/*
Package nix provides a concrete Cataloger implementation for packages within the Nix packaging ecosystem.
*/
package nix
import (
"context"
"fmt"
"github.com/bmatcuk/doublestar/v4"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
)
const catalogerName = "nix-store-cataloger"
// storeCataloger finds package outputs installed in the Nix store location (/nix/store/*).
type storeCataloger struct{}
func NewStoreCataloger() pkg.Cataloger {
return &storeCataloger{}
type Config struct {
CaptureOwnedFiles bool `json:"capture-owned-files" yaml:"capture-owned-files" mapstructure:"capture-owned-files"`
}
func (c *storeCataloger) Name() string {
return catalogerName
func (c Config) WithCaptureOwnedFiles(set bool) Config {
c.CaptureOwnedFiles = set
return c
}
func (c *storeCataloger) Catalog(ctx context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
// we want to search for only directories, which isn't possible via the stereoscope API, so we need to apply the glob manually on all returned paths
var pkgs []pkg.Package
var filesByPath = make(map[string]*file.LocationSet)
ctx, cancel := context.WithCancel(ctx)
defer cancel()
for location := range resolver.AllLocations(ctx) {
matchesStorePath, err := doublestar.Match("**/nix/store/*", location.RealPath)
if err != nil {
return nil, nil, fmt.Errorf("failed to match nix store path: %w", err)
}
parentStorePath := findParentNixStorePath(location.RealPath)
if parentStorePath != "" {
if _, ok := filesByPath[parentStorePath]; !ok {
s := file.NewLocationSet()
filesByPath[parentStorePath] = &s
}
filesByPath[parentStorePath].Add(location)
}
if !matchesStorePath {
continue
}
storePath := parseNixStorePath(location.RealPath)
if storePath == nil || !storePath.isValidPackage() {
continue
}
p := newNixStorePackage(*storePath, location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation))
pkgs = append(pkgs, p)
func DefaultConfig() Config {
return Config{
CaptureOwnedFiles: false,
}
// add file sets to packages
for i := range pkgs {
p := &pkgs[i]
locations := p.Locations.ToSlice()
if len(locations) == 0 {
log.WithFields("package", p.Name).Debug("nix package has no evidence locations associated")
continue
}
parentStorePath := locations[0].RealPath
files, ok := filesByPath[parentStorePath]
if !ok {
log.WithFields("path", parentStorePath, "nix-store-path", parentStorePath).Debug("found a nix store file for a non-existent package")
continue
}
appendFiles(p, files.ToSlice()...)
}
return pkgs, nil, nil
}
func appendFiles(p *pkg.Package, location ...file.Location) {
metadata, ok := p.Metadata.(pkg.NixStoreEntry)
if !ok {
log.WithFields("package", p.Name).Debug("nix package metadata missing")
return
}
for _, l := range location {
metadata.Files = append(metadata.Files, l.RealPath)
}
if metadata.Files == nil {
// note: we always have an allocated collection for output
metadata.Files = []string{}
}
p.Metadata = metadata
p.SetID()
// cataloger finds package outputs installed in the Nix store location (/nix/store/*) or in the internal nix database (/nix/var/nix/db/db.sqlite).
type cataloger struct {
dbParser dbCataloger
storeCataloger storeCataloger
}
func NewCataloger(cfg Config) pkg.Cataloger {
name := "nix-cataloger"
return cataloger{
dbParser: newDBCataloger(cfg, name),
storeCataloger: newStoreCataloger(cfg, name),
}
}
func (c cataloger) Name() string {
return c.dbParser.catalogerName
}
func (c cataloger) Catalog(ctx context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
// always try the DB cataloger first (based off of information recorded by actions taken by nix tooling)
pkgs, rels, err := c.dbParser.catalog(resolver)
if err != nil {
return nil, nil, fmt.Errorf("failed to catalog nix packages from database: %w", err)
}
if len(pkgs) > 0 {
return pkgs, rels, nil
}
// there are no results from the DB cataloger, then use the store path cataloger (not as accurate / detailed in information)
return c.storeCataloger.Catalog(ctx, resolver)
}

View File

@ -3,13 +3,145 @@ package nix
import (
"testing"
"github.com/google/go-cmp/cmp"
"github.com/scylladb/go-set/strset"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
_ "modernc.org/sqlite"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
)
func TestCataloger_Catalog(t *testing.T) {
func TestCataloger_Image(t *testing.T) {
tests := []struct {
fixture string
wantPkgs []string
wantRel []string
}{
{
// $ nix-store -q --tree $(which jq)
//
// /nix/store/nzwfgsp28vgxv7n2gl5fxqkca9awh4dz-jq-1.6-bin3.4
// ├───/nix/store/02mqs1by2vab9yzw0qc4j7463w78p3ps-glibc-2.37-8
// │ ├───/nix/store/cw8fpl8r1x9rmaqj55fwbfnnrgw7b40k-libidn2-2.3.4
// │ │ ├───/nix/store/h1ysk4vvw48winwmh38rvnsj0dlsz7c1-libunistring-1.1
// │ │ │ └───/nix/store/h1ysk4vvw48winwmh38rvnsj0dlsz7c1-libunistring-1.1 [...]
// │ │ └───/nix/store/cw8fpl8r1x9rmaqj55fwbfnnrgw7b40k-libidn2-2.3.4 [...]
// │ ├───/nix/store/fmz62d844wf4blb11k21f4m0q6n6hdfp-xgcc-12.3.0-libgcc
// │ └───/nix/store/02mqs1by2vab9yzw0qc4j7463w78p3ps-glibc-2.37-8 [...]
// ├───/nix/store/mzj90j6m3c3a1vv8j9pl920f98i2yz9q-oniguruma-6.9.8-lib
// │ ├───/nix/store/02mqs1by2vab9yzw0qc4j7463w78p3ps-glibc-2.37-8 [...]
// │ └───/nix/store/mzj90j6m3c3a1vv8j9pl920f98i2yz9q-oniguruma-6.9.8-lib [...]
// └───/nix/store/1x3s2v9wc9m302cspfqcn2iwar0b5w99-jq-1.6-lib
// ├───/nix/store/02mqs1by2vab9yzw0qc4j7463w78p3ps-glibc-2.37-8 [...]
// ├───/nix/store/mzj90j6m3c3a1vv8j9pl920f98i2yz9q-oniguruma-6.9.8-lib [...]
// └───/nix/store/1x3s2v9wc9m302cspfqcn2iwar0b5w99-jq-1.6-lib [...]
fixture: "image-nixos-jq-pkg-db",
wantPkgs: []string{
"glibc @ 2.37-8 (/nix/var/nix/db/db.sqlite)",
"jq @ 1.6 (/nix/var/nix/db/db.sqlite)", // lib output
"jq @ 1.6 (/nix/var/nix/db/db.sqlite)", // bin output
"libidn2 @ 2.3.4 (/nix/var/nix/db/db.sqlite)",
"libunistring @ 1.1 (/nix/var/nix/db/db.sqlite)",
"oniguruma @ 6.9.8 (/nix/var/nix/db/db.sqlite)",
"xgcc @ 12.3.0 (/nix/var/nix/db/db.sqlite)",
},
wantRel: []string{
// used the DB cataloger, thus has a complete dependency graph
"glibc @ 2.37-8 (/nix/var/nix/db/db.sqlite) [dependency-of] jq @ 1.6 (/nix/var/nix/db/db.sqlite)", // jq bin output
"glibc @ 2.37-8 (/nix/var/nix/db/db.sqlite) [dependency-of] jq @ 1.6 (/nix/var/nix/db/db.sqlite)", // jq lib output
"glibc @ 2.37-8 (/nix/var/nix/db/db.sqlite) [dependency-of] oniguruma @ 6.9.8 (/nix/var/nix/db/db.sqlite)",
"jq @ 1.6 (/nix/var/nix/db/db.sqlite) [dependency-of] jq @ 1.6 (/nix/var/nix/db/db.sqlite)", // jq bin to lib output dependency
"libidn2 @ 2.3.4 (/nix/var/nix/db/db.sqlite) [dependency-of] glibc @ 2.37-8 (/nix/var/nix/db/db.sqlite)",
"libunistring @ 1.1 (/nix/var/nix/db/db.sqlite) [dependency-of] libidn2 @ 2.3.4 (/nix/var/nix/db/db.sqlite)",
"oniguruma @ 6.9.8 (/nix/var/nix/db/db.sqlite) [dependency-of] jq @ 1.6 (/nix/var/nix/db/db.sqlite)", // jq bin output
"oniguruma @ 6.9.8 (/nix/var/nix/db/db.sqlite) [dependency-of] jq @ 1.6 (/nix/var/nix/db/db.sqlite)", // jq lib output
"xgcc @ 12.3.0 (/nix/var/nix/db/db.sqlite) [dependency-of] glibc @ 2.37-8 (/nix/var/nix/db/db.sqlite)",
},
},
}
for _, tt := range tests {
t.Run(tt.fixture, func(t *testing.T) {
pkgtest.NewCatalogTester().
WithImageResolver(t, tt.fixture).
ExpectsPackageStrings(tt.wantPkgs).
ExpectsRelationshipStrings(tt.wantRel).
TestCataloger(t, NewCataloger(DefaultConfig()))
})
}
}
func TestCataloger_Image_FilesListing(t *testing.T) {
tests := []struct {
fixture string
wantPkgFiles map[string][]string
}{
{
fixture: "image-nixos-jq-pkg-db",
wantPkgFiles: map[string][]string{
"libidn2": {
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/lib/libidn2.la",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/lib/libidn2.so.0.3.8",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/cs/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/da/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/de/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/eo/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/es/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/fi/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/fr/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/fur/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/hr/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/hu/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/id/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/it/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/ja/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/ka/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/ko/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/nl/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/pl/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/pt_BR/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/ro/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/ru/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/sr/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/sv/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/uk/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/vi/LC_MESSAGES/libidn2.mo",
"/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4/share/locale/zh_CN/LC_MESSAGES/libidn2.mo",
},
},
},
}
for _, tt := range tests {
t.Run(tt.fixture, func(t *testing.T) {
pkgtest.NewCatalogTester().
WithImageResolver(t, tt.fixture).
ExpectsAssertion(func(t *testing.T, pkgs []pkg.Package, relationships []artifact.Relationship) {
found := strset.New()
for _, p := range pkgs {
if files, ok := tt.wantPkgFiles[p.Name]; ok {
m, ok := p.Metadata.(pkg.NixStoreEntry)
require.True(t, ok)
if d := cmp.Diff(files, m.Files); d != "" {
t.Errorf("unexpected files for package %q: %s", p.Name, d)
}
found.Add(p.Name)
}
}
expected := strset.New()
for n := range tt.wantPkgFiles {
expected.Add(n)
}
assert.ElementsMatch(t, expected.List(), found.List())
}).
TestCataloger(t, NewCataloger(Config{CaptureOwnedFiles: true}))
})
}
}
func TestCataloger_Directory(t *testing.T) {
tests := []struct {
fixture string
@ -20,22 +152,95 @@ func TestCataloger_Catalog(t *testing.T) {
fixture: "test-fixtures/fixture-1",
wantPkgs: []pkg.Package{
{
Name: "glibc",
Version: "2.34-210",
PURL: "pkg:nix/glibc@2.34-210?output=bin&outputhash=h0cnbmfcn93xm5dg2x27ixhag1cwndga",
Locations: file.NewLocationSet(file.NewLocation("nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin")),
FoundBy: catalogerName,
Type: pkg.NixPkg,
Name: "glibc",
Version: "2.34-210",
PURL: "pkg:nix/glibc@2.34-210?drvpath=5av396z8xa13jg89g9jws145c0k26k2x-glibc-2.34-210.drv&output=bin&outputhash=h0cnbmfcn93xm5dg2x27ixhag1cwndga",
Locations: file.NewLocationSet(
file.NewLocation("nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin").WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
file.NewLocation("nix/store/5av396z8xa13jg89g9jws145c0k26k2x-glibc-2.34-210.drv").WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation),
),
FoundBy: "nix-cataloger",
Type: pkg.NixPkg,
Metadata: pkg.NixStoreEntry{
Path: "/nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin",
Derivation: pkg.NixDerivation{
Path: "nix/store/5av396z8xa13jg89g9jws145c0k26k2x-glibc-2.34-210.drv",
System: "aarch64-linux",
InputDerivations: []pkg.NixDerivationReference{
{
Path: "/nix/store/1zi0k7y01rhqr2gfqb42if0icswg65sj-locale-C.diff.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/45j86ggi8mlpfslcrgvjf7m6phia21fp-raw.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/4fnfsd9sc7bam6886hwyaprdsww66dg3-bison-3.8.2.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/51azdrrvcqrk2hbky7ryphlwd99yz25d-linux-headers-5.18.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/67s0qc21gyarmdwc181bqmjc3qzv8zkz-libidn2-2.3.2.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/9rhliwskh3mrrs5nfzgz0x6wrccyfg7k-bootstrap-stage0-glibc-bootstrap.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/cl1wcw2v1ifzjlkzi50h32a6lms9m25s-binutils-2.38.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/ghjc8bkfk8lh53z14mk2nk7h059zh7vx-python3-minimal-3.10.5.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/k3786wfzw637r7sylccdmm92saqp73d8-glibc-2.34.tar.xz.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/l5zr5m1agvvnic49fg6qc44g5fgj3la1-glibc-reinstate-prlimit64-fallback.patch?id=eab07e78b691ae7866267fc04d31c7c3ad6b0eeb.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/mf5kz6d01ab8h0rswzyr04mbcd6g5x9n-bootstrap-stage2-stdenv-linux.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/nd1zy67vp028707pbh466qhrfqh4cpq6-bootstrap-stage2-gcc-wrapper-.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/ra77ww7p2xx8jh8n4m9vmj6wc8wxijdb-bootstrap-tools.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/wlldapf5bg58kivw520ll5bw0fmlaid7-raw.drv",
Outputs: []string{"out"},
},
},
InputSources: []string{
"/nix/store/001gp43bjqzx60cg345n2slzg7131za8-nix-nss-open-files.patch",
"/nix/store/7kw224hdyxd7115lrqh9a4dv2x8msq2s-fix-x64-abi.patch",
"/nix/store/8haph3ng4mgsqr6p4024vj8k6kg3mqc4-nix-locale-archive.patch",
"/nix/store/95hp6hs9g73h93safadb8x6vajyqkv6q-0001-Revert-Remove-all-usage-of-BASH-or-BASH-in-installed.patch",
"/nix/store/9krlzvny65gdc8s7kpb6lkx8cd02c25b-default-builder.sh",
"/nix/store/b1w7zbvm39ff1i52iyjggyvw2rdxz104-dont-use-system-ld-so-cache.patch",
"/nix/store/ikmqczy0y20n04a2b8qfflzwihv8139g-separate-debug-info.sh",
"/nix/store/mgx19wbmgrh3rblbxhs6vi47sha15n11-2.34-master.patch.gz",
"/nix/store/mnglr8rr7nl444h7p50ysyq8qd0fm1lm-dont-use-system-ld-so-preload.patch",
"/nix/store/xkd50xxii6k7l1kmw4l5x6xzbhamcs87-allow-kernel-2.6.32.patch",
"/nix/store/za0pg7fmysrcwrqcal26fnmzw6vycgdn-fix_path_attribute_in_getconf.patch",
},
},
OutputHash: "h0cnbmfcn93xm5dg2x27ixhag1cwndga",
Output: "bin",
Files: []string{
"nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin/lib",
"nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin/lib/glibc.so",
"nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin/share",
"nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin/share/man",
"nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin/share/man/glibc.1",
},
Files: nil, // default cataloger configure does not capture owned files
},
},
},
@ -43,12 +248,10 @@ func TestCataloger_Catalog(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.fixture, func(t *testing.T) {
c := NewStoreCataloger()
pkgtest.NewCatalogTester().
FromDirectory(t, tt.fixture).
Expects(tt.wantPkgs, tt.wantRel).
TestCataloger(t, c)
TestCataloger(t, NewCataloger(DefaultConfig()))
})
}
}

View File

@ -0,0 +1,138 @@
package nix
import (
"fmt"
"io"
"path"
"strconv"
"strings"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/unknown"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
)
const defaultSchema = 10
type dbProcessor func(config Config, dbLocation file.Location, resolver file.Resolver, catalogerName string) ([]pkg.Package, []artifact.Relationship, error)
type dbCataloger struct {
config Config
schemaProcessor map[int]dbProcessor
catalogerName string
}
func newDBCataloger(cfg Config, catalogerName string) dbCataloger {
return dbCataloger{
config: cfg,
catalogerName: catalogerName,
schemaProcessor: map[int]dbProcessor{
10: processV10DB,
},
}
}
type dbPackageEntry struct {
ID int
DrvID int
nixStorePath
DeriverPath string
*derivationFile
Location *file.Location
Files []string
}
func (c dbCataloger) catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
dbLocs, err := resolver.FilesByGlob("**/nix/var/nix/db/db.sqlite")
if err != nil {
return nil, nil, fmt.Errorf("failed to find Nix database: %w", err)
}
if len(dbLocs) == 0 {
return nil, nil, nil
}
var pkgs []pkg.Package
var relationships []artifact.Relationship
var errs error
for _, dbLoc := range dbLocs {
parser, schema := c.selectDBParser(dbLoc, resolver)
if parser == nil {
errs = unknown.Appendf(errs, dbLoc.Coordinates, "unsupported Nix database schema for schema=%d at %q", schema, dbLoc.RealPath)
continue
}
newPkgs, newRelationships, err := parser(c.config, dbLoc, resolver, c.catalogerName)
if err != nil {
errs = unknown.Append(errs, dbLoc.Coordinates, err)
continue
}
pkgs = append(pkgs, newPkgs...)
relationships = append(relationships, newRelationships...)
}
return pkgs, relationships, errs
}
func (c dbCataloger) selectDBParser(dbLocation file.Location, resolver file.Resolver) (dbProcessor, int) {
loc := resolver.RelativeFileByPath(dbLocation, path.Join(path.Dir(dbLocation.RealPath), "schema"))
if loc == nil {
log.WithFields("path", dbLocation.RealPath).Tracef("failed to detect Nix database schema, assuming %d", defaultSchema)
return c.schemaProcessor[defaultSchema], 0
}
schemaContents, err := resolver.FileContentsByLocation(*loc)
defer internal.CloseAndLogError(schemaContents, loc.RealPath)
if err != nil {
log.WithFields("path", loc.RealPath).Tracef("failed to open Nix database schema file, assuming %d", defaultSchema)
return c.schemaProcessor[defaultSchema], 0
}
contents, err := io.ReadAll(schemaContents)
if err != nil {
log.WithFields("path", loc.RealPath).Tracef("failed to read Nix database schema file, assuming %d", defaultSchema)
return c.schemaProcessor[defaultSchema], 0
}
schema, err := strconv.Atoi(strings.TrimSpace(string(contents)))
if err != nil {
log.WithFields("path", loc.RealPath).Tracef("failed to parse Nix database schema file, assuming %d", defaultSchema)
return c.schemaProcessor[defaultSchema], 0
}
processor := c.schemaProcessor[schema]
if processor == nil {
closestSchema := c.findClosestSchema(schema)
if closestSchema == 0 {
schema = defaultSchema
}
processor = c.schemaProcessor[closestSchema]
log.WithFields("path", loc.RealPath).Tracef("unsupported Nix database schema (%d), treating as closest available schema (%d)", schema, closestSchema)
}
return processor, schema
}
func (c dbCataloger) findClosestSchema(got int) int {
var closest int
var closestDiff int
for schema := range c.schemaProcessor {
if schema == got {
return schema
}
diff := schema - got
if diff < 0 {
diff = -diff
}
if diff < closestDiff {
closestDiff = diff
closest = schema
}
}
return closest
}

View File

@ -0,0 +1,249 @@
package nix
import (
"database/sql"
"fmt"
"io"
"os"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
)
var _ dbProcessor = processV10DB
func processV10DB(config Config, dbLocation file.Location, resolver file.Resolver, catalogerName string) ([]pkg.Package, []artifact.Relationship, error) {
dbContents, err := resolver.FileContentsByLocation(dbLocation)
defer internal.CloseAndLogError(dbContents, dbLocation.RealPath)
if err != nil {
return nil, nil, fmt.Errorf("unable to read Nix database: %w", err)
}
tempDB, err := createTempDB(dbContents)
if err != nil {
return nil, nil, fmt.Errorf("failed to create temporary database: %w", err)
}
defer os.RemoveAll(tempDB.Name())
db, err := sql.Open("sqlite", tempDB.Name())
if err != nil {
return nil, nil, fmt.Errorf("failed to open database: %w", err)
}
db.SetConnMaxLifetime(0)
defer db.Close()
packageEntries, err := extractV10DBPackages(config, db, dbLocation, resolver)
if err != nil {
return nil, nil, err
}
pkgs, relationships, err := finalizeV10DBResults(db, packageEntries, catalogerName)
if err != nil {
return nil, nil, err
}
return pkgs, relationships, nil
}
func extractV10DBPackages(config Config, db *sql.DB, dbLocation file.Location, resolver file.Resolver) (map[int]*dbPackageEntry, error) {
pkgs, err := extractV10DBValidPaths(config, db, dbLocation, resolver)
if err != nil {
return nil, err
}
err = extractV10DBDerivationOutputs(db, pkgs)
if err != nil {
return nil, err
}
return pkgs, nil
}
func extractV10DBValidPaths(config Config, db *sql.DB, dbLocation file.Location, resolver file.Resolver) (map[int]*dbPackageEntry, error) {
packages := make(map[int]*dbPackageEntry)
rows, err := db.Query("SELECT id, path, hash, deriver FROM ValidPaths")
if err != nil {
return nil, fmt.Errorf("failed to query ValidPaths: %w", err)
}
defer rows.Close()
for rows.Next() {
var id int
var path, hash, deriver sql.NullString
if err := rows.Scan(&id, &path, &hash, &deriver); err != nil {
return nil, fmt.Errorf("failed to scan ValidPaths row: %w", err)
}
if !path.Valid {
continue
}
nsp := parseNixStorePath(path.String)
if nsp == nil {
nsp = &nixStorePath{}
}
// always trust the DB values over string parsing
nsp.OutputHash = hash.String
nsp.StorePath = path.String
var files []string
if config.CaptureOwnedFiles {
files = listOutputPaths(path.String, resolver)
}
df, err := newDerivationFromPath(deriver.String, resolver)
if err != nil {
log.WithFields("path", deriver.String, "error", err).Trace("unable to find derivation")
df = nil
}
packages[id] = &dbPackageEntry{
ID: id,
nixStorePath: *nsp,
derivationFile: df,
DeriverPath: deriver.String,
Location: &dbLocation,
Files: files,
}
}
return packages, nil
}
func listOutputPaths(storePath string, resolver file.Resolver) []string {
if storePath == "" {
return nil
}
searchGlob := storePath + "/**/*"
locations, err := resolver.FilesByGlob(searchGlob)
if err != nil {
log.WithFields("path", storePath, "error", err).Trace("unable to find output paths")
return nil
}
return filePaths(locations)
}
func extractV10DBDerivationOutputs(db *sql.DB, packages map[int]*dbPackageEntry) error {
outputRows, err := db.Query("SELECT drv, id, path FROM DerivationOutputs")
if err != nil {
return fmt.Errorf("failed to query DerivationOutputs: %w", err)
}
defer outputRows.Close()
pkgsByPath := make(map[string]*dbPackageEntry)
for _, p := range packages {
pkgsByPath[p.StorePath] = p
}
for outputRows.Next() {
var drvID int
var outputID, outputPath string
if err := outputRows.Scan(&drvID, &outputID, &outputPath); err != nil {
return fmt.Errorf("failed to scan DerivationOutputs row: %w", err)
}
if _, ok := pkgsByPath[outputPath]; !ok {
continue
}
pkgsByPath[outputPath].Output = outputID
pkgsByPath[outputPath].DrvID = drvID
}
return nil
}
func finalizeV10DBResults(db *sql.DB, packageEntries map[int]*dbPackageEntry, catalogerName string) ([]pkg.Package, []artifact.Relationship, error) {
// make Syft packages for each package entry
syftPackages := make(map[int]pkg.Package)
for id, entry := range packageEntries {
syftPackages[id] = newDBPackage(entry, catalogerName)
}
var relationships []artifact.Relationship
query := `
SELECT r.referrer, r.reference
FROM Refs r
JOIN ValidPaths v1 ON r.referrer = v1.id
JOIN ValidPaths v2 ON r.reference = v2.id
`
refRows, err := db.Query(query)
if err != nil {
return nil, nil, fmt.Errorf("failed to query Refs with ValidPaths JOIN: %w", err)
}
defer refRows.Close()
relExists := make(map[int]map[int]bool)
for refRows.Next() {
var referrerID, referenceID int
if err := refRows.Scan(&referrerID, &referenceID); err != nil {
return nil, nil, fmt.Errorf("failed to scan Refs row: %w", err)
}
if referrerID == referenceID {
// skip self-references
continue
}
referrer, refExists := syftPackages[referrerID]
reference, refeeExists := syftPackages[referenceID]
if !refExists || !refeeExists {
// only include relationships for packages we have discovered
continue
}
if _, ok := relExists[referrerID]; !ok {
relExists[referrerID] = make(map[int]bool)
}
if relExists[referrerID][referenceID] {
// deduplicate existing relationships
continue
}
relExists[referrerID][referenceID] = true
rel := artifact.Relationship{
From: reference,
To: referrer,
Type: artifact.DependencyOfRelationship,
}
relationships = append(relationships, rel)
}
var pkgs []pkg.Package
for _, p := range syftPackages {
pkgs = append(pkgs, p)
}
return pkgs, relationships, nil
}
func createTempDB(content io.ReadCloser) (*os.File, error) {
tempFile, err := os.CreateTemp("", "nix-db-*.sqlite")
if err != nil {
return nil, err
}
_, err = io.Copy(tempFile, content)
if err != nil {
tempFile.Close()
os.Remove(tempFile.Name())
return nil, err
}
return tempFile, nil
}

View File

@ -0,0 +1,122 @@
package nix
import (
"fmt"
"strings"
"github.com/nix-community/go-nix/pkg/derivation"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/file"
)
type derivationFile struct {
Location file.Location
derivation.Derivation
}
func newDerivationFromPath(p string, resolver file.Resolver) (*derivationFile, error) {
locs, err := resolver.FilesByPath(p)
if err != nil {
return nil, fmt.Errorf("failed to find derivation: %w", err)
}
if len(locs) == 0 {
return nil, nil
}
// only use one reference
return newDerivationFromLocation(locs[0], resolver)
}
func newDerivationFromLocation(loc file.Location, resolver file.Resolver) (*derivationFile, error) {
reader, err := resolver.FileContentsByLocation(loc)
if err != nil {
return nil, fmt.Errorf("failed to read derivation: %w", err)
}
defer internal.CloseAndLogError(reader, loc.RealPath)
d, err := derivation.ReadDerivation(reader)
if err != nil {
return nil, fmt.Errorf("failed to parse derivation: %w", err)
}
return &derivationFile{
Location: loc,
Derivation: *d,
}, nil
}
type derivations struct {
derivationsByDrvPath map[string]derivationFile
drvPathByOutputPath map[string]string
}
func newDerivations() *derivations {
return &derivations{
derivationsByDrvPath: make(map[string]derivationFile),
drvPathByOutputPath: make(map[string]string),
}
}
func (c *derivations) add(df derivationFile) {
c.derivationsByDrvPath[df.Location.RealPath] = df
for _, output := range df.Outputs {
if output == nil || output.Path == "" {
continue
}
c.drvPathByOutputPath[output.Path] = df.Location.RealPath
}
}
func (c *derivations) findDerivationForOutputPath(outputPath string) *derivationFile {
if !strings.HasPrefix(outputPath, "/") {
outputPath = "/" + outputPath
}
if drvPath, ok := c.drvPathByOutputPath[outputPath]; ok {
d, ok := c.derivationsByDrvPath[drvPath]
if ok {
return &d
}
}
return nil
}
// given a path as input, assuming it's an output path for a derivation, find all input store paths needed for this particular output path.
func (c *derivations) findDependencies(p string) []string {
if d, ok := c.derivationsByDrvPath[p]; ok {
var deps []string
for drvPath, names := range d.InputDerivations {
if len(names) == 0 {
continue
}
for _, n := range names {
outputPath := c.namedOutputStorePath(drvPath, n)
if outputPath == "" {
continue
}
deps = append(deps, outputPath)
}
}
for _, inputSrc := range d.InputSources {
if inputSrc == "" {
continue
}
deps = append(deps, inputSrc)
}
return deps
}
if drvPath, ok := c.drvPathByOutputPath[p]; ok {
return c.findDependencies(drvPath)
}
return nil
}
func (c *derivations) namedOutputStorePath(drvPath, name string) string {
if d, ok := c.derivationsByDrvPath[drvPath]; ok {
if output, ok := d.Outputs[name]; ok {
return output.Path
}
}
return ""
}

View File

@ -0,0 +1,407 @@
package nix
import (
"testing"
"github.com/nix-community/go-nix/pkg/derivation"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/anchore/syft/syft/file"
)
func TestDerivationCollection_Add(t *testing.T) {
c := newDerivations()
d := derivationFile{
Location: file.NewLocation("/nix/store/xyz789-foo.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/abc123-foo",
},
"dev": {
Path: "/nix/store/def456-foo-dev",
},
},
},
}
c.add(d)
assert.Len(t, c.derivationsByDrvPath, 1)
assert.Len(t, c.drvPathByOutputPath, 2)
assert.Equal(t, "/nix/store/xyz789-foo.drv", c.drvPathByOutputPath["/nix/store/abc123-foo"])
assert.Equal(t, "/nix/store/xyz789-foo.drv", c.drvPathByOutputPath["/nix/store/def456-foo-dev"])
}
func TestDerivationCollection_AddNilOutputs(t *testing.T) {
c := newDerivations()
d := derivationFile{
Location: file.NewLocation("/nix/store/xyz789-foo.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": nil,
"dev": {
Path: "",
},
},
},
}
c.add(d)
assert.Len(t, c.derivationsByDrvPath, 1)
assert.Empty(t, c.drvPathByOutputPath)
}
func TestDerivationCollection_FindDerivationForOutputPath(t *testing.T) {
c := newDerivations()
// standard derivation
standardDrv := derivationFile{
Location: file.NewLocation("/nix/store/xyz789-foo.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/abc123-foo",
},
},
},
}
c.add(standardDrv)
// derivation with multiple outputs
multiOutputDrv := derivationFile{
Location: file.NewLocation("/nix/store/multi-output.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/multi-out-path",
},
"dev": {
Path: "/nix/store/multi-dev-path",
},
"doc": {
Path: "/nix/store/multi-doc-path",
},
},
},
}
c.add(multiOutputDrv)
// derivation with special characters in path
specialCharsDrv := derivationFile{
Location: file.NewLocation("/nix/store/special-chars+_.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/special-chars+_-output",
},
},
},
}
c.add(specialCharsDrv)
// derivation with same output path as another (should override)
duplicateOutputDrv := derivationFile{
Location: file.NewLocation("/nix/store/duplicate.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/abc123-foo", // same as standardDrv output
},
},
},
}
c.add(duplicateOutputDrv)
tests := []struct {
name string
outputPath string
expected *derivationFile
}{
{
name: "output path exists",
outputPath: "/nix/store/abc123-foo",
expected: &duplicateOutputDrv,
},
{
name: "output path exists without leading slash",
outputPath: "nix/store/abc123-foo",
expected: &duplicateOutputDrv,
},
{
name: "output path does not exist",
outputPath: "/nix/store/nonexistent",
},
{
name: "multiple output derivation - out path",
outputPath: "/nix/store/multi-out-path",
expected: &multiOutputDrv,
},
{
name: "multiple output derivation - dev path",
outputPath: "/nix/store/multi-dev-path",
expected: &multiOutputDrv,
},
{
name: "special characters in path",
outputPath: "/nix/store/special-chars+_-output",
expected: &specialCharsDrv,
},
{
name: "empty string path",
outputPath: "",
},
{
name: "path with just a slash",
outputPath: "/",
},
{
name: "drv path exists in mapping but not in derivations",
outputPath: "/nix/store/missing",
},
}
// add a path mapping to a derivation that doesn't exist
c.drvPathByOutputPath["/nix/store/missing"] = "/nix/store/nonexistent.drv"
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := c.findDerivationForOutputPath(tt.outputPath)
if tt.expected == nil {
assert.Nil(t, result)
} else {
require.NotNil(t, result)
assert.Equal(t, tt.expected.Location.RealPath, result.Location.RealPath)
}
})
}
}
func TestDerivationCollection_FindDependencies(t *testing.T) {
c := newDerivations()
// set up a dependency tree:
// - foo depends on bar and baz
// - bar depends on qux
// create "qux" derivation
quxDrv := derivationFile{
Location: file.NewLocation("/nix/store/qux.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/qux-path",
},
},
},
}
c.add(quxDrv)
// create "bar" derivation which depends on qux
barDrv := derivationFile{
Location: file.NewLocation("/nix/store/bar.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/bar-path",
},
},
InputDerivations: map[string][]string{
"/nix/store/qux.drv": {"out"},
},
},
}
c.add(barDrv)
// create "baz" derivation
bazDrv := derivationFile{
Location: file.NewLocation("/nix/store/baz.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/baz-path",
},
},
},
}
c.add(bazDrv)
// create "foo" derivation which depends on bar and baz
fooDrv := derivationFile{
Location: file.NewLocation("/nix/store/foo.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/foo-path",
},
},
InputDerivations: map[string][]string{
"/nix/store/bar.drv": {"out"},
"/nix/store/baz.drv": {"out"},
},
InputSources: []string{
"/nix/store/src1",
"/nix/store/src2",
},
},
}
c.add(fooDrv)
// add a test case for empty input names
emptyNamesDrv := derivationFile{
Location: file.NewLocation("/nix/store/empty-names.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/empty-names-path",
},
},
InputDerivations: map[string][]string{
"/nix/store/bar.drv": {},
},
},
}
c.add(emptyNamesDrv)
// add a test case for empty input sources
emptySourcesDrv := derivationFile{
Location: file.NewLocation("/nix/store/empty-sources.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/empty-sources-path",
},
},
InputDerivations: map[string][]string{
"/nix/store/bar.drv": {"out"},
},
InputSources: []string{
"",
},
},
}
c.add(emptySourcesDrv)
tests := []struct {
name string
path string
expected []string
}{
{
name: "lookup by derivation path",
path: "/nix/store/foo.drv",
expected: []string{
"/nix/store/bar-path",
"/nix/store/baz-path",
"/nix/store/src1",
"/nix/store/src2",
},
},
{
name: "lookup by output path",
path: "/nix/store/foo-path",
expected: []string{
"/nix/store/bar-path",
"/nix/store/baz-path",
"/nix/store/src1",
"/nix/store/src2",
},
},
{
name: "lookup by derivation with no inputs",
path: "/nix/store/qux.drv",
expected: nil,
},
{
name: "lookup nonexistent path",
path: "/nix/store/nonexistent",
expected: nil,
},
{
name: "lookup derivation with empty input names",
path: "/nix/store/empty-names.drv",
expected: nil,
},
{
name: "lookup derivation with empty input sources",
path: "/nix/store/empty-sources.drv",
expected: []string{
"/nix/store/bar-path",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := c.findDependencies(tt.path)
if tt.expected == nil {
assert.Nil(t, result)
} else {
require.NotNil(t, result)
assert.ElementsMatch(t, tt.expected, result)
}
})
}
}
func TestDerivationCollection_NamedOutputStorePath(t *testing.T) {
c := newDerivations()
d := derivationFile{
Location: file.NewLocation("/nix/store/xyz789-foo.drv"),
Derivation: derivation.Derivation{
Outputs: map[string]*derivation.Output{
"out": {
Path: "/nix/store/abc123-foo",
},
"dev": {
Path: "/nix/store/def456-foo-dev",
},
},
},
}
c.add(d)
tests := []struct {
name string
drvPath string
outName string
expected string
}{
{
name: "existing drv and output",
drvPath: "/nix/store/xyz789-foo.drv",
outName: "out",
expected: "/nix/store/abc123-foo",
},
{
name: "existing drv and dev output",
drvPath: "/nix/store/xyz789-foo.drv",
outName: "dev",
expected: "/nix/store/def456-foo-dev",
},
{
name: "existing drv but nonexistent output",
drvPath: "/nix/store/xyz789-foo.drv",
outName: "nonexistent",
expected: "",
},
{
name: "nonexistent drv",
drvPath: "/nix/store/nonexistent.drv",
outName: "out",
expected: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := c.namedOutputStorePath(tt.drvPath, tt.outName)
assert.Equal(t, tt.expected, result)
})
}
}

View File

@ -1,22 +1,43 @@
package nix
import (
"path"
"sort"
"github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
)
func newNixStorePackage(storePath nixStorePath, locations ...file.Location) pkg.Package {
type nixStorePackage struct {
Location *file.Location
Files []string
*derivationFile
nixStorePath
}
func newNixStorePackage(pp nixStorePackage, catalogerName string) pkg.Package {
locations := file.NewLocationSet(pp.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation))
var derivationPath string
if pp.derivationFile != nil {
derivationPath = pp.derivationFile.Location.RealPath
locations.Add(pp.derivationFile.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation))
}
p := pkg.Package{
Name: storePath.name,
Version: storePath.version,
Name: pp.Name,
Version: pp.Version,
FoundBy: catalogerName,
Locations: file.NewLocationSet(locations...),
Locations: locations,
Type: pkg.NixPkg,
PURL: packageURL(storePath),
PURL: packageURL(pp.nixStorePath, derivationPath),
Metadata: pkg.NixStoreEntry{
OutputHash: storePath.outputHash,
Output: storePath.output,
Path: pp.StorePath,
Derivation: newDerivation(pp.derivationFile),
OutputHash: pp.OutputHash,
Output: pp.Output,
Files: pp.Files,
},
}
@ -25,24 +46,90 @@ func newNixStorePackage(storePath nixStorePath, locations ...file.Location) pkg.
return p
}
func packageURL(storePath nixStorePath) string {
func newDBPackage(entry *dbPackageEntry, catalogerName string) pkg.Package {
locations := file.NewLocationSet(
entry.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
file.NewLocation(entry.StorePath).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation),
)
if entry.derivationFile != nil {
locations.Add(entry.derivationFile.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation))
}
p := pkg.Package{
Name: entry.Name,
Version: entry.Version,
FoundBy: catalogerName,
Locations: locations,
Type: pkg.NixPkg,
PURL: packageURL(entry.nixStorePath, entry.DeriverPath),
Metadata: pkg.NixStoreEntry{
Path: entry.StorePath,
Derivation: newDerivation(entry.derivationFile),
OutputHash: entry.OutputHash,
Output: entry.Output,
Files: entry.Files,
},
}
p.SetID()
return p
}
func newDerivation(df *derivationFile) pkg.NixDerivation {
if df == nil {
return pkg.NixDerivation{}
}
var inputDerivations []pkg.NixDerivationReference
for drvPath, names := range df.InputDerivations {
sort.Strings(names)
inputDerivations = append(inputDerivations, pkg.NixDerivationReference{
Path: drvPath,
Outputs: names,
})
}
sort.Slice(inputDerivations, func(i, j int) bool {
return inputDerivations[i].Path < inputDerivations[j].Path
})
sources := df.InputSources
sort.Strings(sources)
return pkg.NixDerivation{
Path: df.Location.RealPath,
System: df.Platform,
InputDerivations: inputDerivations,
InputSources: sources,
}
}
func packageURL(storePath nixStorePath, drvPath string) string {
var qualifiers packageurl.Qualifiers
if storePath.output != "" {
if storePath.Output != "" {
// since there is no nix pURL type yet, this is a guess, however, it is reasonable to assume that
// if only a single output is installed the pURL should be able to express this.
qualifiers = append(qualifiers,
packageurl.Qualifier{
Key: "output",
Value: storePath.output,
Value: storePath.Output,
},
)
}
if storePath.outputHash != "" {
// it's not immediately clear if the hash found in the store path should be encoded in the pURL
if storePath.OutputHash != "" {
qualifiers = append(qualifiers,
packageurl.Qualifier{
Key: "outputhash",
Value: storePath.outputHash,
Value: storePath.OutputHash,
},
)
}
if drvPath != "" {
qualifiers = append(qualifiers,
packageurl.Qualifier{
Key: "drvpath",
Value: path.Base(drvPath),
},
)
}
@ -50,8 +137,8 @@ func packageURL(storePath nixStorePath) string {
// TODO: nix pURL type has not been accepted yet (only proposed at this time)
"nix",
"",
storePath.name,
storePath.version,
storePath.Name,
storePath.Version,
qualifiers,
"")
return pURL.ToString()

View File

@ -11,39 +11,49 @@ func Test_packageURL(t *testing.T) {
tests := []struct {
name string
storePath nixStorePath
drvPath string
want string
}{
{
name: "name + version",
storePath: nixStorePath{
name: "glibc",
version: "2.34",
Name: "glibc",
Version: "2.34",
},
want: "pkg:nix/glibc@2.34",
},
{
name: "hash qualifier",
storePath: nixStorePath{
name: "glibc",
version: "2.34",
outputHash: "h0cnbmfcn93xm5dg2x27ixhag1cwndga",
Name: "glibc",
Version: "2.34",
OutputHash: "h0cnbmfcn93xm5dg2x27ixhag1cwndga",
},
want: "pkg:nix/glibc@2.34?outputhash=h0cnbmfcn93xm5dg2x27ixhag1cwndga",
},
{
name: "output qualifier",
storePath: nixStorePath{
name: "glibc",
version: "2.34",
outputHash: "h0cnbmfcn93xm5dg2x27ixhag1cwndga",
output: "bin",
Name: "glibc",
Version: "2.34",
OutputHash: "h0cnbmfcn93xm5dg2x27ixhag1cwndga",
Output: "bin",
},
want: "pkg:nix/glibc@2.34?output=bin&outputhash=h0cnbmfcn93xm5dg2x27ixhag1cwndga",
},
{
name: "derivation qualifier",
storePath: nixStorePath{
Name: "glibc",
Version: "2.34",
},
drvPath: "/nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34.drv",
want: "pkg:nix/glibc@2.34?drvpath=h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34.drv",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.want, packageURL(tt.storePath))
assert.Equal(t, tt.want, packageURL(tt.storePath, tt.drvPath))
})
}
}

View File

@ -18,7 +18,7 @@ var (
// minor: "34"
// patch: "210"
// (there are other capture groups, but they can be ignored)
rightMostVersionIshPattern = regexp.MustCompile(`-(?P<version>(?P<major>[0-9][a-zA-Z0-9]*)(\.(?P<minor>[0-9][a-zA-Z0-9]*))?(\.(?P<patch>0|[1-9][a-zA-Z0-9]*)){0,3}(?:-(?P<prerelease>\d*[.a-zA-Z-][.0-9a-zA-Z-]*)*)?(?:\+(?P<metadata>[.0-9a-zA-Z-]+(?:\.[.0-9a-zA-Z-]+)*))?)`)
rightMostVersionIshPattern = regexp.MustCompile(`-(?P<version>(?P<major>[0-9][a-zA-Z0-9]*)(\.(?P<minor>[0-9][a-zA-Z0-9]*))?(\.(?P<patch>0|[1-9][a-zA-Z0-9]*)){0,3}(?:-(?P<prerelease>\d*[.0-9a-zA-Z-]*)*)?(?:\+(?P<metadata>[.0-9a-zA-Z-]+(?:\.[.0-9a-zA-Z-]+)*))?)`)
unstableVersion = regexp.MustCompile(`-(?P<version>unstable-\d{4}-\d{2}-\d{2})$`)
)
@ -26,14 +26,15 @@ var (
// checkout the package naming conventions here: https://nixos.org/manual/nixpkgs/stable/#sec-package-naming
type nixStorePath struct {
outputHash string
name string
version string
output string
StorePath string
OutputHash string
Name string
Version string
Output string
}
func (p nixStorePath) isValidPackage() bool {
return p.name != "" && p.version != ""
return p.Name != "" && p.Version != ""
}
func findParentNixStorePath(source string) string {
@ -54,13 +55,13 @@ func findParentNixStorePath(source string) string {
return source[0:startOfSubPath]
}
func parseNixStorePath(source string) *nixStorePath {
if strings.HasSuffix(source, ".drv") {
func parseNixStorePath(og string) *nixStorePath {
if strings.HasSuffix(og, ".drv") {
// ignore derivations
return nil
}
source = path.Base(source)
source := path.Base(og)
versionStartIdx, versionIsh, prerelease := findVersionIsh(source)
if versionStartIdx == -1 {
@ -85,11 +86,16 @@ func parseNixStorePath(source string) *nixStorePath {
output = lastPrereleaseField
}
if og != "" && !strings.HasPrefix(og, "/") {
og = fmt.Sprintf("/%s", og)
}
return &nixStorePath{
outputHash: hash,
name: name,
version: version,
output: output,
StorePath: og,
OutputHash: hash,
Name: name,
Version: version,
Output: output,
}
}
@ -102,33 +108,25 @@ func findVersionIsh(input string) (int, string, string) {
// note that the match indices are in the form of [start, end, start, end, ...]. Also note that the
// capture group for version in both regexes are the same index, but if the regexes are changed
// this code will start to fail.
versionGroup := 1
match := unstableVersion.FindAllStringSubmatchIndex(input, -1)
if len(match) > 0 && len(match[0]) > 0 {
return match[0][versionGroup*2], input[match[0][versionGroup*2]:match[0][(versionGroup*2)+1]], ""
// check for unstable version pattern first
if match := unstableVersion.FindStringSubmatch(input); match != nil {
indices := unstableVersion.FindStringSubmatchIndex(input)
versionStart := indices[2] // index of first capture group's start
version := match[1] // first capture group is the version
return versionStart, version, ""
}
match = rightMostVersionIshPattern.FindAllStringSubmatchIndex(input, -1)
if len(match) == 0 || len(match[0]) == 0 {
// try the regular version pattern
match := rightMostVersionIshPattern.FindStringSubmatch(input)
if match == nil {
return -1, "", ""
}
var version string
versionStart, versionStop := match[0][versionGroup*2], match[0][(versionGroup*2)+1]
if versionStart != -1 || versionStop != -1 {
version = input[versionStart:versionStop]
}
version := match[1] // capture group 1 is the version
indices := rightMostVersionIshPattern.FindStringSubmatchIndex(input)
versionStart := indices[2] // index of first capture group's start
prerelease := match[7] // capture group 7 is the prerelease version
prereleaseGroup := 7
var prerelease string
prereleaseStart, prereleaseStop := match[0][prereleaseGroup*2], match[0][(prereleaseGroup*2)+1]
if prereleaseStart != -1 && prereleaseStop != -1 {
prerelease = input[prereleaseStart:prereleaseStop]
}
return versionStart,
version,
prerelease
return versionStart, version, prerelease
}

View File

@ -1,7 +1,6 @@
package nix
import (
"path"
"testing"
"github.com/stretchr/testify/assert"
@ -125,13 +124,21 @@ func Test_findVersionIsh(t *testing.T) {
wantVersion: "unstable-2021-08-16",
wantPreRelease: "",
},
{
name: "version with release suffix and no output name",
input: "/nix/store/02mqs1by2vab9yzw0qc4j7463w78p3ps-glibc-2.37-8",
wantIdx: 50,
wantVersion: "2.37-8",
wantPreRelease: "8",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotIdx, gotVersion, gotPreRelease := findVersionIsh(tt.input)
assert.Equal(t, tt.wantIdx, gotIdx)
assert.Equal(t, tt.wantVersion, gotVersion)
assert.Equal(t, tt.wantPreRelease, gotPreRelease)
assert.Equal(t, tt.wantIdx, gotIdx, "bad index")
assert.Equal(t, tt.wantVersion, gotVersion, "bad version")
assert.Equal(t, tt.wantPreRelease, gotPreRelease, "bad pre-release")
})
}
}
@ -139,121 +146,132 @@ func Test_findVersionIsh(t *testing.T) {
func Test_parseNixStorePath(t *testing.T) {
tests := []struct {
source string
want *nixStorePath
name string
want *nixStorePath
}{
{
source: "/nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin",
name: "/nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin",
want: &nixStorePath{
outputHash: "h0cnbmfcn93xm5dg2x27ixhag1cwndga",
name: "glibc",
version: "2.34-210",
output: "bin",
OutputHash: "h0cnbmfcn93xm5dg2x27ixhag1cwndga",
Name: "glibc",
Version: "2.34-210",
Output: "bin",
},
},
{
source: "/nix/store/0296qxvn30z9b2ah1g5p97k5wr9k8y78-busybox-static-x86_64-unknown-linux-musl-1.35.0",
name: "/nix/store/02mqs1by2vab9yzw0qc4j7463w78p3ps-glibc-2.37-8",
want: &nixStorePath{
outputHash: "0296qxvn30z9b2ah1g5p97k5wr9k8y78",
name: "busybox-static-x86_64-unknown-linux-musl",
version: "1.35.0",
OutputHash: "02mqs1by2vab9yzw0qc4j7463w78p3ps",
Name: "glibc",
Version: "2.37-8",
},
},
{
source: "/nix/store/5zzrvdmlkc5rh3k5862krd3wfb3pqhyf-perl5.34.1-TimeDate-2.33",
name: "/nix/store/0296qxvn30z9b2ah1g5p97k5wr9k8y78-busybox-static-x86_64-unknown-linux-musl-1.35.0",
want: &nixStorePath{
outputHash: "5zzrvdmlkc5rh3k5862krd3wfb3pqhyf",
name: "perl5.34.1-TimeDate",
version: "2.33",
OutputHash: "0296qxvn30z9b2ah1g5p97k5wr9k8y78",
Name: "busybox-static-x86_64-unknown-linux-musl",
Version: "1.35.0",
},
},
{
source: "/nix/store/q38q8ng57zwjg1h15ry5zx0lb0xyax4b-libcap-2.63-lib",
name: "/nix/store/5zzrvdmlkc5rh3k5862krd3wfb3pqhyf-perl5.34.1-TimeDate-2.33",
want: &nixStorePath{
outputHash: "q38q8ng57zwjg1h15ry5zx0lb0xyax4b",
name: "libcap",
version: "2.63",
output: "lib",
OutputHash: "5zzrvdmlkc5rh3k5862krd3wfb3pqhyf",
Name: "perl5.34.1-TimeDate",
Version: "2.33",
},
},
{
source: "/nix/store/p0y8fbpbqr2jm5zfrdll0rgyg2lvp5g2-util-linux-minimal-2.37.4-bin",
name: "/nix/store/q38q8ng57zwjg1h15ry5zx0lb0xyax4b-libcap-2.63-lib",
want: &nixStorePath{
outputHash: "p0y8fbpbqr2jm5zfrdll0rgyg2lvp5g2",
name: "util-linux-minimal",
version: "2.37.4",
output: "bin",
OutputHash: "q38q8ng57zwjg1h15ry5zx0lb0xyax4b",
Name: "libcap",
Version: "2.63",
Output: "lib",
},
},
{
source: "/nix/store/z24qs6f5d1mmwdp73n1jfc3swj4v2c5s-krb5-1.19.3.9.10",
name: "/nix/store/p0y8fbpbqr2jm5zfrdll0rgyg2lvp5g2-util-linux-minimal-2.37.4-bin",
want: &nixStorePath{
outputHash: "z24qs6f5d1mmwdp73n1jfc3swj4v2c5s",
name: "krb5",
version: "1.19.3.9.10",
OutputHash: "p0y8fbpbqr2jm5zfrdll0rgyg2lvp5g2",
Name: "util-linux-minimal",
Version: "2.37.4",
Output: "bin",
},
},
{
source: "/nix/store/zkgyp2vra0bgqm0dv1qi514l5fd0aksx-bash-interactive-5.1-p16-man",
name: "/nix/store/z24qs6f5d1mmwdp73n1jfc3swj4v2c5s-krb5-1.19.3.9.10",
want: &nixStorePath{
outputHash: "zkgyp2vra0bgqm0dv1qi514l5fd0aksx",
name: "bash-interactive",
version: "5.1-p16",
output: "man",
OutputHash: "z24qs6f5d1mmwdp73n1jfc3swj4v2c5s",
Name: "krb5",
Version: "1.19.3.9.10",
},
},
{
source: "/nix/store/nwf2y0nc48ybim56308cr5ccvwkabcqc-openssl-1.1.1q",
name: "/nix/store/zkgyp2vra0bgqm0dv1qi514l5fd0aksx-bash-interactive-5.1-p16-man",
want: &nixStorePath{
outputHash: "nwf2y0nc48ybim56308cr5ccvwkabcqc",
name: "openssl",
version: "1.1.1q",
OutputHash: "zkgyp2vra0bgqm0dv1qi514l5fd0aksx",
Name: "bash-interactive",
Version: "5.1-p16",
Output: "man",
},
},
{
source: "/nix/store/nwv742f1bxv6g78hy9yc6slxdbxlmqhb-kmod-29",
name: "/nix/store/nwf2y0nc48ybim56308cr5ccvwkabcqc-openssl-1.1.1q",
want: &nixStorePath{
outputHash: "nwv742f1bxv6g78hy9yc6slxdbxlmqhb",
name: "kmod",
version: "29",
OutputHash: "nwf2y0nc48ybim56308cr5ccvwkabcqc",
Name: "openssl",
Version: "1.1.1q",
},
},
{
source: "/nix/store/n83qx7m848kg51lcjchwbkmlgdaxfckf-tzdata-2022a",
name: "/nix/store/nwv742f1bxv6g78hy9yc6slxdbxlmqhb-kmod-29",
want: &nixStorePath{
outputHash: "n83qx7m848kg51lcjchwbkmlgdaxfckf",
name: "tzdata",
version: "2022a",
OutputHash: "nwv742f1bxv6g78hy9yc6slxdbxlmqhb",
Name: "kmod",
Version: "29",
},
},
{
source: "'/nix/store/q5dhwzcn82by5ndc7g0q83wsnn13qkqw-webdav-server-rs-unstable-2021-08-16",
name: "/nix/store/n83qx7m848kg51lcjchwbkmlgdaxfckf-tzdata-2022a",
want: &nixStorePath{
outputHash: "q5dhwzcn82by5ndc7g0q83wsnn13qkqw",
name: "webdav-server-rs",
version: "unstable-2021-08-16",
OutputHash: "n83qx7m848kg51lcjchwbkmlgdaxfckf",
Name: "tzdata",
Version: "2022a",
},
},
{
name: "/nix/store/q5dhwzcn82by5ndc7g0q83wsnn13qkqw-webdav-server-rs-unstable-2021-08-16",
want: &nixStorePath{
OutputHash: "q5dhwzcn82by5ndc7g0q83wsnn13qkqw",
Name: "webdav-server-rs",
Version: "unstable-2021-08-16",
},
},
// negative cases...
{
source: "'z33yk02rsr6b4rb56lgb80bnvxx6yw39-?id=21ee35dde73aec5eba35290587d479218c6dd824.drv'",
name: "'z33yk02rsr6b4rb56lgb80bnvxx6yw39-?id=21ee35dde73aec5eba35290587d479218c6dd824.drv'",
},
{
source: "/nix/store/yzahni8aig6mdrvcsccgwm2515lcpi5q-git-minimal-2.36.0.drv",
name: "/nix/store/yzahni8aig6mdrvcsccgwm2515lcpi5q-git-minimal-2.36.0.drv",
},
{
source: "/nix/store/z9yvxs0s3xdkp5jgmzis4g50bfq3dgvm-0018-pkg-config-derive-prefix-from-prefix.patch",
name: "/nix/store/z9yvxs0s3xdkp5jgmzis4g50bfq3dgvm-0018-pkg-config-derive-prefix-from-prefix.patch",
},
{
source: "/nix/store/w3hl7zrmc9qvzadc0k7cp9ysxiyz88j6-base-system",
name: "/nix/store/w3hl7zrmc9qvzadc0k7cp9ysxiyz88j6-base-system",
},
{
source: "/nix/store/zz1lc28x25fcx6al6xwk3dk8kp7wx47y-Test-RequiresInternet-0.05.tar.gz.drv",
name: "/nix/store/zz1lc28x25fcx6al6xwk3dk8kp7wx47y-Test-RequiresInternet-0.05.tar.gz.drv",
},
}
for _, tt := range tests {
t.Run(path.Base(tt.source), func(t *testing.T) {
assert.Equal(t, tt.want, parseNixStorePath(tt.source))
t.Run(tt.name, func(t *testing.T) {
if tt.want != nil {
tt.want.StorePath = tt.name
}
assert.Equal(t, tt.want, parseNixStorePath(tt.name))
})
}
}

View File

@ -0,0 +1,196 @@
/*
Package nix provides a concrete Cataloger implementation for packages within the Nix packaging ecosystem.
*/
package nix
import (
"context"
"errors"
"fmt"
"github.com/bmatcuk/doublestar/v4"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/unknown"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
)
// storeCataloger finds package outputs installed in the Nix store location (/nix/store/*).
type storeCataloger struct {
config Config
name string
}
// NewStoreCataloger returns a new cataloger object initialized for Nix store files.
// Deprecated: please use NewCataloger instead
func NewStoreCataloger() pkg.Cataloger {
return newStoreCataloger(Config{CaptureOwnedFiles: true}, "nix-store-cataloger")
}
func newStoreCataloger(cfg Config, name string) storeCataloger {
return storeCataloger{
config: cfg,
name: name,
}
}
func (c storeCataloger) Name() string {
return c.name
}
func (c storeCataloger) Catalog(ctx context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
prototypes, err := c.findPackagesFromStore(ctx, resolver)
if err != nil {
return nil, nil, fmt.Errorf("failed to find nix packages: %w", err)
}
drvs, err := c.findDerivationsFromStore(resolver, prototypes)
if err != nil {
// preserve unknown errors, but suppress would-be fatal errors
var cErr *unknown.CoordinateError
if !errors.As(err, &cErr) {
// let's ignore fatal errors from this path, since it only enriches packages
drvs = newDerivations()
err = nil
log.WithFields("error", err).Trace("failed to find nix derivations")
}
}
pkgs, rels := c.finalizeStorePackages(prototypes, drvs)
return pkgs, rels, err
}
func (c storeCataloger) finalizeStorePackages(pkgPrototypes []nixStorePackage, drvs *derivations) ([]pkg.Package, []artifact.Relationship) {
var pkgs []pkg.Package
var pkgByStorePath = make(map[string]pkg.Package)
for _, pp := range pkgPrototypes {
if pp.Location == nil {
continue
}
p := newNixStorePackage(pp, c.name)
pkgs = append(pkgs, p)
pkgByStorePath[pp.Location.RealPath] = p
}
var relationships []artifact.Relationship
for storePath, p := range pkgByStorePath {
deps := drvs.findDependencies(storePath)
for _, dep := range deps {
if depPkg, ok := pkgByStorePath[dep]; ok {
relationships = append(relationships, artifact.Relationship{
From: depPkg,
To: p,
Type: artifact.DependencyOfRelationship,
})
}
}
}
return pkgs, relationships
}
func (c storeCataloger) findDerivationsFromStore(resolver file.Resolver, pkgPrototypes []nixStorePackage) (*derivations, error) {
locs, err := resolver.FilesByGlob("**/nix/store/*.drv")
if err != nil {
return nil, fmt.Errorf("failed to find derivations: %w", err)
}
var errs error
dvs := newDerivations()
for _, loc := range locs {
d, err := newDerivationFromLocation(loc, resolver)
if err != nil {
errs = unknown.Append(errs, loc.Coordinates, err)
continue
}
if d == nil {
continue
}
dvs.add(*d)
}
// attach derivations to the packages they belong to
for i := range pkgPrototypes {
p := &pkgPrototypes[i]
p.derivationFile = dvs.findDerivationForOutputPath(p.Location.RealPath)
}
return dvs, errs
}
func (c storeCataloger) findPackagesFromStore(ctx context.Context, resolver file.Resolver) ([]nixStorePackage, error) {
// we want to search for only directories, which isn't possible via the stereoscope API, so we need to apply the glob manually on all returned paths
var prototypes []nixStorePackage
var filesByStorePath = make(map[string]*file.LocationSet)
ctx, cancel := context.WithCancel(ctx)
defer cancel()
for location := range resolver.AllLocations(ctx) {
matchesStorePath, err := doublestar.Match("**/nix/store/*", location.RealPath)
if err != nil {
return nil, fmt.Errorf("failed to match nix store path: %w", err)
}
parentStorePath := findParentNixStorePath(location.RealPath)
if c.config.CaptureOwnedFiles && parentStorePath != "" {
fileInfo, err := resolver.FileMetadataByLocation(location)
if err != nil {
log.WithFields("path", location.RealPath).Trace("failed to get file metadata")
continue
}
if fileInfo.IsDir() {
// we should only add non-directories to the file set
continue
}
if _, ok := filesByStorePath[parentStorePath]; !ok {
s := file.NewLocationSet()
filesByStorePath[parentStorePath] = &s
}
filesByStorePath[parentStorePath].Add(location)
}
if !matchesStorePath {
continue
}
storePath := parseNixStorePath(location.RealPath)
if storePath == nil || !storePath.isValidPackage() {
continue
}
prototypes = append(prototypes, nixStorePackage{
Location: &location,
nixStorePath: *storePath,
})
}
// add file sets to packages
for i := range prototypes {
p := &prototypes[i]
if p.Location == nil {
log.WithFields("package", p.nixStorePath.Name).Debug("nix package has no evidence locations associated")
continue
}
parentStorePath := p.Location.RealPath
files, ok := filesByStorePath[parentStorePath]
if !ok {
log.WithFields("path", parentStorePath, "nix-store-path", parentStorePath).Debug("found a nix store file for a non-existent package")
continue
}
p.Files = filePaths(files.ToSlice())
}
return prototypes, nil
}
func filePaths(files []file.Location) []string {
var relativePaths []string
for _, f := range files {
relativePaths = append(relativePaths, f.RealPath)
}
return relativePaths
}

View File

@ -0,0 +1,183 @@
package nix
import (
"testing"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
)
func TestStoreCataloger_Image(t *testing.T) {
tests := []struct {
fixture string
wantPkgs []string
wantRel []string
}{
{
// $ nix-store -q --tree $(which jq)
//
// /nix/store/nzwfgsp28vgxv7n2gl5fxqkca9awh4dz-jq-1.6-bin3.4
// ├───/nix/store/02mqs1by2vab9yzw0qc4j7463w78p3ps-glibc-2.37-8
// │ ├───/nix/store/cw8fpl8r1x9rmaqj55fwbfnnrgw7b40k-libidn2-2.3.4
// │ │ ├───/nix/store/h1ysk4vvw48winwmh38rvnsj0dlsz7c1-libunistring-1.1
// │ │ │ └───/nix/store/h1ysk4vvw48winwmh38rvnsj0dlsz7c1-libunistring-1.1 [...]
// │ │ └───/nix/store/cw8fpl8r1x9rmaqj55fwbfnnrgw7b40k-libidn2-2.3.4 [...]
// │ ├───/nix/store/fmz62d844wf4blb11k21f4m0q6n6hdfp-xgcc-12.3.0-libgcc
// │ └───/nix/store/02mqs1by2vab9yzw0qc4j7463w78p3ps-glibc-2.37-8 [...]
// ├───/nix/store/mzj90j6m3c3a1vv8j9pl920f98i2yz9q-oniguruma-6.9.8-lib
// │ ├───/nix/store/02mqs1by2vab9yzw0qc4j7463w78p3ps-glibc-2.37-8 [...]
// │ └───/nix/store/mzj90j6m3c3a1vv8j9pl920f98i2yz9q-oniguruma-6.9.8-lib [...]
// └───/nix/store/1x3s2v9wc9m302cspfqcn2iwar0b5w99-jq-1.6-lib
// ├───/nix/store/02mqs1by2vab9yzw0qc4j7463w78p3ps-glibc-2.37-8 [...]
// ├───/nix/store/mzj90j6m3c3a1vv8j9pl920f98i2yz9q-oniguruma-6.9.8-lib [...]
// └───/nix/store/1x3s2v9wc9m302cspfqcn2iwar0b5w99-jq-1.6-lib [...]
fixture: "image-nixos-jq-pkg-store",
wantPkgs: []string{
"glibc @ 2.37-8 (/nix/store/aw2fw9ag10wr9pf0qk4nk5sxi0q0bn56-glibc-2.37-8)",
"jq @ 1.6 (/nix/store/3xpzpmcqmzsdblkzqa9d9s6l302pnk4g-jq-1.6-lib)", // jq lib output
"jq @ 1.6 (/nix/store/aj8lqifsyynq8iknivvxkrsqnblj7qzs-jq-1.6-bin)", // jq bin output
"libidn2 @ 2.3.4 (/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4)",
"libunistring @ 1.1 (/nix/store/s2gi8pfjszy6rq3ydx0z1vwbbskw994i-libunistring-1.1)",
"oniguruma @ 6.9.8 (/nix/store/dpcyirvyblnflf7cp14dnr1420va93zx-oniguruma-6.9.8-lib)",
"xgcc @ 12.3.0 (/nix/store/jbwb8d8l28lg9z0xzl784wyb9vlbwss6-xgcc-12.3.0-libgcc)",
},
wantRel: []string{
// note: parsing all relationships from only derivations results in partial results! (this is why the DB cataloger exists)
"libidn2 @ 2.3.4 (/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4) [dependency-of] glibc @ 2.37-8 (/nix/store/aw2fw9ag10wr9pf0qk4nk5sxi0q0bn56-glibc-2.37-8)",
"libunistring @ 1.1 (/nix/store/s2gi8pfjszy6rq3ydx0z1vwbbskw994i-libunistring-1.1) [dependency-of] libidn2 @ 2.3.4 (/nix/store/k8ivghpggjrq1n49xp8sj116i4sh8lia-libidn2-2.3.4)",
"xgcc @ 12.3.0 (/nix/store/jbwb8d8l28lg9z0xzl784wyb9vlbwss6-xgcc-12.3.0-libgcc) [dependency-of] glibc @ 2.37-8 (/nix/store/aw2fw9ag10wr9pf0qk4nk5sxi0q0bn56-glibc-2.37-8)",
},
},
}
for _, tt := range tests {
t.Run(tt.fixture, func(t *testing.T) {
c := NewStoreCataloger()
pkgtest.NewCatalogTester().
WithImageResolver(t, tt.fixture).
ExpectsPackageStrings(tt.wantPkgs).
ExpectsRelationshipStrings(tt.wantRel).
TestCataloger(t, c)
})
}
}
func TestStoreCataloger_Directory(t *testing.T) {
tests := []struct {
fixture string
wantPkgs []pkg.Package
wantRel []artifact.Relationship
}{
{
fixture: "test-fixtures/fixture-1",
wantPkgs: []pkg.Package{
{
Name: "glibc",
Version: "2.34-210",
PURL: "pkg:nix/glibc@2.34-210?drvpath=5av396z8xa13jg89g9jws145c0k26k2x-glibc-2.34-210.drv&output=bin&outputhash=h0cnbmfcn93xm5dg2x27ixhag1cwndga",
Locations: file.NewLocationSet(
file.NewLocation("nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin").WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
file.NewLocation("nix/store/5av396z8xa13jg89g9jws145c0k26k2x-glibc-2.34-210.drv").WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation),
),
FoundBy: "nix-store-cataloger",
Type: pkg.NixPkg,
Metadata: pkg.NixStoreEntry{
Path: "/nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin",
Derivation: pkg.NixDerivation{
Path: "nix/store/5av396z8xa13jg89g9jws145c0k26k2x-glibc-2.34-210.drv",
System: "aarch64-linux",
InputDerivations: []pkg.NixDerivationReference{
{
Path: "/nix/store/1zi0k7y01rhqr2gfqb42if0icswg65sj-locale-C.diff.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/45j86ggi8mlpfslcrgvjf7m6phia21fp-raw.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/4fnfsd9sc7bam6886hwyaprdsww66dg3-bison-3.8.2.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/51azdrrvcqrk2hbky7ryphlwd99yz25d-linux-headers-5.18.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/67s0qc21gyarmdwc181bqmjc3qzv8zkz-libidn2-2.3.2.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/9rhliwskh3mrrs5nfzgz0x6wrccyfg7k-bootstrap-stage0-glibc-bootstrap.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/cl1wcw2v1ifzjlkzi50h32a6lms9m25s-binutils-2.38.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/ghjc8bkfk8lh53z14mk2nk7h059zh7vx-python3-minimal-3.10.5.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/k3786wfzw637r7sylccdmm92saqp73d8-glibc-2.34.tar.xz.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/l5zr5m1agvvnic49fg6qc44g5fgj3la1-glibc-reinstate-prlimit64-fallback.patch?id=eab07e78b691ae7866267fc04d31c7c3ad6b0eeb.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/mf5kz6d01ab8h0rswzyr04mbcd6g5x9n-bootstrap-stage2-stdenv-linux.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/nd1zy67vp028707pbh466qhrfqh4cpq6-bootstrap-stage2-gcc-wrapper-.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/ra77ww7p2xx8jh8n4m9vmj6wc8wxijdb-bootstrap-tools.drv",
Outputs: []string{"out"},
},
{
Path: "/nix/store/wlldapf5bg58kivw520ll5bw0fmlaid7-raw.drv",
Outputs: []string{"out"},
},
},
InputSources: []string{
"/nix/store/001gp43bjqzx60cg345n2slzg7131za8-nix-nss-open-files.patch",
"/nix/store/7kw224hdyxd7115lrqh9a4dv2x8msq2s-fix-x64-abi.patch",
"/nix/store/8haph3ng4mgsqr6p4024vj8k6kg3mqc4-nix-locale-archive.patch",
"/nix/store/95hp6hs9g73h93safadb8x6vajyqkv6q-0001-Revert-Remove-all-usage-of-BASH-or-BASH-in-installed.patch",
"/nix/store/9krlzvny65gdc8s7kpb6lkx8cd02c25b-default-builder.sh",
"/nix/store/b1w7zbvm39ff1i52iyjggyvw2rdxz104-dont-use-system-ld-so-cache.patch",
"/nix/store/ikmqczy0y20n04a2b8qfflzwihv8139g-separate-debug-info.sh",
"/nix/store/mgx19wbmgrh3rblbxhs6vi47sha15n11-2.34-master.patch.gz",
"/nix/store/mnglr8rr7nl444h7p50ysyq8qd0fm1lm-dont-use-system-ld-so-preload.patch",
"/nix/store/xkd50xxii6k7l1kmw4l5x6xzbhamcs87-allow-kernel-2.6.32.patch",
"/nix/store/za0pg7fmysrcwrqcal26fnmzw6vycgdn-fix_path_attribute_in_getconf.patch",
},
},
OutputHash: "h0cnbmfcn93xm5dg2x27ixhag1cwndga",
Output: "bin",
Files: []string{
// the legacy cataloger captures files by default
"nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin/lib/glibc.so",
"nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin/share/man/glibc.1",
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.fixture, func(t *testing.T) {
c := NewStoreCataloger()
pkgtest.NewCatalogTester().
FromDirectory(t, tt.fixture).
Expects(tt.wantPkgs, tt.wantRel).
TestCataloger(t, c)
})
}
}

View File

@ -0,0 +1,43 @@
FROM --platform=linux/amd64 nixos/nix:2.28.2@sha256:4215204b5f65c7b756b26a6dd47a6af77f1d906e5edf62b184c95420a7dfa08f AS builder
# cross-platform builds cannot use bpf features
RUN mkdir -p /etc/nix && \
echo 'filter-syscalls = false' > /etc/nix/nix.conf && \
echo 'experimental-features = nix-command flakes' >> /etc/nix/nix.conf
# pin Nixpkgs to a specific commit (2023.11.17)
RUN mkdir -p /root/nix && \
echo 'import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/46688f8eb5.tar.gz") {}' > /root/nix/pinned-nixpkgs.nix
# install jq + sqlite using the pinned Nixpkgs
RUN nix-env -f /root/nix/pinned-nixpkgs.nix -iA jq sqlite
COPY clean_db.sql /tmp/clean_db.sql
RUN echo "path" > /tmp/required_paths.txt
RUN . /root/.nix-profile/etc/profile.d/nix.sh && \
PAGER='' nix-store -q --requisites $(which jq) >> /tmp/required_paths.txt
RUN sqlite3 /nix/var/nix/db/db.sqlite "CREATE TEMP TABLE IF NOT EXISTS RequiredPaths (path TEXT PRIMARY KEY);"
RUN sqlite3 /nix/var/nix/db/db.sqlite ".mode list" ".import /tmp/required_paths.txt RequiredPaths"
RUN sqlite3 /nix/var/nix/db/db.sqlite < /tmp/clean_db.sql
# create a directory with only the required dependencies + any derivations
RUN mkdir -p /nix-minimal && \
for dep in $(nix-store -q --requisites $(which jq)); do \
mkdir -p /nix-minimal$(dirname $dep) && \
cp -a $dep /nix-minimal$dep; \
done
# now add all the drv files from the store
RUN for drv in $(find /nix/store -name "*.drv"); do \
mkdir -p /nix-minimal$(dirname $drv) && \
cp -a $drv /nix-minimal$drv; \
done
FROM scratch
# get packages + relationships from here
COPY --from=builder /nix/var/nix/db/db.sqlite /nix/var/nix/db/db.sqlite
# get files owned by each package here
COPY --from=builder /nix-minimal/nix/store /nix/store

View File

@ -0,0 +1,15 @@
-- Delete DerivationOutputs where path is not in RequiredPaths
DELETE FROM DerivationOutputs
WHERE path NOT IN (SELECT path FROM RequiredPaths);
-- Delete ValidPaths where path is not in RequiredPaths
DELETE FROM ValidPaths
WHERE path NOT IN (SELECT path FROM RequiredPaths);
DELETE FROM Refs
WHERE referrer NOT IN (SELECT id FROM ValidPaths WHERE path IN (SELECT path FROM RequiredPaths))
OR reference NOT IN (SELECT id FROM ValidPaths WHERE path IN (SELECT path FROM RequiredPaths));
-- Run VACUUM to clean up the database file
VACUUM;

View File

@ -0,0 +1,30 @@
FROM --platform=linux/amd64 nixos/nix:2.28.2@sha256:4215204b5f65c7b756b26a6dd47a6af77f1d906e5edf62b184c95420a7dfa08f AS builder
# cross-platform builds cannot use bpf features
RUN mkdir -p /etc/nix && \
echo 'filter-syscalls = false' > /etc/nix/nix.conf && \
echo 'experimental-features = nix-command flakes' >> /etc/nix/nix.conf
# pin Nixpkgs to a specific commit (2023.11.17)
RUN mkdir -p /root/nix && \
echo 'import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/46688f8eb5.tar.gz") {}' > /root/nix/pinned-nixpkgs.nix
# install jq using the pinned Nixpkgs
RUN nix-env -f /root/nix/pinned-nixpkgs.nix -iA jq
# create a directory with only the required dependencies + any derivations
RUN mkdir -p /nix-minimal && \
for dep in $(nix-store -q --requisites $(which jq)); do \
mkdir -p /nix-minimal$(dirname $dep) && \
cp -a $dep /nix-minimal$dep; \
done
# now add all the drv files from the store
RUN for drv in $(find /nix/store -name "*.drv"); do \
mkdir -p /nix-minimal$(dirname $drv) && \
cp -a $drv /nix-minimal$drv; \
done
FROM scratch
COPY --from=builder /nix-minimal/nix/store /nix/store

View File

@ -7,15 +7,43 @@ import (
)
type NixStoreEntry struct {
// OutputHash is the prefix of the nix store basename path
OutputHash string `mapstructure:"outputHash" json:"outputHash"`
// Path is the store path for this output
Path string `mapstructure:"path" json:"path,omitempty"`
// Output allows for optionally specifying the specific nix package output this package represents (for packages that support multiple outputs).
// Note: the default output for a package is an empty string, so will not be present in the output.
Output string `mapstructure:"output" json:"output,omitempty"`
// OutputHash is the prefix of the nix store basename path
OutputHash string `mapstructure:"outputHash" json:"outputHash"`
// Derivation is any information about the derivation file that was used to build this package
Derivation NixDerivation `mapstructure:"derivation" json:"derivation,omitempty"`
// Files is a listing a files that are under the nix/store path for this package
Files []string `mapstructure:"files" json:"files"`
Files []string `mapstructure:"files" json:"files,omitempty"`
}
type NixDerivation struct {
// Path is the path to the derivation file
Path string `mapstructure:"path" json:"path,omitempty"`
// System is the nix system string that this derivation was built for
System string `mapstructure:"system" json:"system,omitempty"`
// InputDerivations is a list of derivation paths that were used to build this package
InputDerivations []NixDerivationReference `mapstructure:"inputDerivations" json:"inputDerivations,omitempty"`
// InputSources is a list of source paths that were used to build this package
InputSources []string `mapstructure:"inputSources" json:"inputSources,omitempty"`
}
type NixDerivationReference struct {
// Path is the path to the derivation file
Path string `mapstructure:"path" json:"path,omitempty"`
// Outputs is a list of output names that were used to build this package
Outputs []string `mapstructure:"outputs" json:"outputs,omitempty"`
}
func (m NixStoreEntry) OwnedFiles() (result []string) {