mirror of
https://github.com/anchore/syft.git
synced 2026-02-12 02:26:42 +01:00
Detect ELF security features (#2443)
* add detection of ELF security features Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> * fix linting Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> * update json schema with file executable data Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> * update expected fixure when no tty present Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> * more detailed differ Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> * use json differ Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> * fix tests Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> * remove json schema addition Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> * regenerate json schema Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> * fix mimtype set ref Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> --------- Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
This commit is contained in:
parent
3da679066e
commit
3023a5a7bc
6
.github/workflows/validations.yaml
vendored
6
.github/workflows/validations.yaml
vendored
@ -36,6 +36,12 @@ jobs:
|
|||||||
- name: Bootstrap environment
|
- name: Bootstrap environment
|
||||||
uses: ./.github/actions/bootstrap
|
uses: ./.github/actions/bootstrap
|
||||||
|
|
||||||
|
- name: Restore file executable test-fixture cache
|
||||||
|
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 #v3.3.2
|
||||||
|
with:
|
||||||
|
path: syft/file/cataloger/executable/test-fixtures/bin
|
||||||
|
key: ${{ runner.os }}-unit-file-executable-cache-${{ hashFiles( 'syft/file/cataloger/executable/test-fixtures/cache.fingerprint' ) }}
|
||||||
|
|
||||||
- name: Restore Java test-fixture cache
|
- name: Restore Java test-fixture cache
|
||||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 #v4.0.0
|
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 #v4.0.0
|
||||||
with:
|
with:
|
||||||
|
|||||||
@ -266,6 +266,7 @@ tasks:
|
|||||||
desc: Generate test fixture fingerprints
|
desc: Generate test fixture fingerprints
|
||||||
generates:
|
generates:
|
||||||
- cmd/syft/internal/test/integration/test-fixtures/cache.fingerprint
|
- cmd/syft/internal/test/integration/test-fixtures/cache.fingerprint
|
||||||
|
- syft/file/cataloger/executable/test-fixtures/cache.fingerprint
|
||||||
- syft/pkg/cataloger/binary/test-fixtures/cache.fingerprint
|
- syft/pkg/cataloger/binary/test-fixtures/cache.fingerprint
|
||||||
- syft/pkg/cataloger/java/test-fixtures/java-builds/cache.fingerprint
|
- syft/pkg/cataloger/java/test-fixtures/java-builds/cache.fingerprint
|
||||||
- syft/pkg/cataloger/golang/test-fixtures/archs/binaries.fingerprint
|
- syft/pkg/cataloger/golang/test-fixtures/archs/binaries.fingerprint
|
||||||
@ -274,17 +275,19 @@ tasks:
|
|||||||
- test/install/cache.fingerprint
|
- test/install/cache.fingerprint
|
||||||
- test/cli/test-fixtures/cache.fingerprint
|
- test/cli/test-fixtures/cache.fingerprint
|
||||||
cmds:
|
cmds:
|
||||||
|
# for EXECUTABLE unit test fixtures
|
||||||
|
- "cd syft/file/cataloger/executable/test-fixtures && make cache.fingerprint"
|
||||||
# for IMAGE integration test fixtures
|
# for IMAGE integration test fixtures
|
||||||
- "cd cmd/syft/internal/test/integration/test-fixtures && make cache.fingerprint"
|
- "cd cmd/syft/internal/test/integration/test-fixtures && make cache.fingerprint"
|
||||||
# for BINARY test fixtures
|
# for BINARY unit test fixtures
|
||||||
- "cd syft/pkg/cataloger/binary/test-fixtures && make cache.fingerprint"
|
- "cd syft/pkg/cataloger/binary/test-fixtures && make cache.fingerprint"
|
||||||
# for JAVA BUILD test fixtures
|
# for JAVA BUILD unit test fixtures
|
||||||
- "cd syft/pkg/cataloger/java/test-fixtures/java-builds && make cache.fingerprint"
|
- "cd syft/pkg/cataloger/java/test-fixtures/java-builds && make cache.fingerprint"
|
||||||
# for GO BINARY test fixtures
|
# for GO BINARY unit test fixtures
|
||||||
- "cd syft/pkg/cataloger/golang/test-fixtures/archs && make binaries.fingerprint"
|
- "cd syft/pkg/cataloger/golang/test-fixtures/archs && make binaries.fingerprint"
|
||||||
# for RPM test fixtures
|
# for RPM unit test fixtures
|
||||||
- "cd syft/pkg/cataloger/redhat/test-fixtures && make rpms.fingerprint"
|
- "cd syft/pkg/cataloger/redhat/test-fixtures && make rpms.fingerprint"
|
||||||
# for Kernel test fixtures
|
# for Kernel unit test fixtures
|
||||||
- "cd syft/pkg/cataloger/kernel/test-fixtures && make cache.fingerprint"
|
- "cd syft/pkg/cataloger/kernel/test-fixtures && make cache.fingerprint"
|
||||||
# for INSTALL test fixtures
|
# for INSTALL test fixtures
|
||||||
- "cd test/install && make cache.fingerprint"
|
- "cd test/install && make cache.fingerprint"
|
||||||
@ -294,6 +297,7 @@ tasks:
|
|||||||
fixtures:
|
fixtures:
|
||||||
desc: Generate test fixtures
|
desc: Generate test fixtures
|
||||||
cmds:
|
cmds:
|
||||||
|
- "cd syft/file/cataloger/executable/test-fixtures && make"
|
||||||
- "cd syft/pkg/cataloger/java/test-fixtures/java-builds && make"
|
- "cd syft/pkg/cataloger/java/test-fixtures/java-builds && make"
|
||||||
- "cd syft/pkg/cataloger/redhat/test-fixtures && make"
|
- "cd syft/pkg/cataloger/redhat/test-fixtures && make"
|
||||||
- "cd syft/pkg/cataloger/binary/test-fixtures && make"
|
- "cd syft/pkg/cataloger/binary/test-fixtures && make"
|
||||||
|
|||||||
@ -15,6 +15,7 @@ import (
|
|||||||
"github.com/anchore/syft/syft/cataloging"
|
"github.com/anchore/syft/syft/cataloging"
|
||||||
"github.com/anchore/syft/syft/cataloging/filecataloging"
|
"github.com/anchore/syft/syft/cataloging/filecataloging"
|
||||||
"github.com/anchore/syft/syft/cataloging/pkgcataloging"
|
"github.com/anchore/syft/syft/cataloging/pkgcataloging"
|
||||||
|
"github.com/anchore/syft/syft/file/cataloger/executable"
|
||||||
"github.com/anchore/syft/syft/file/cataloger/filecontent"
|
"github.com/anchore/syft/syft/file/cataloger/filecontent"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/binary"
|
"github.com/anchore/syft/syft/pkg/cataloger/binary"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/golang"
|
"github.com/anchore/syft/syft/pkg/cataloger/golang"
|
||||||
@ -111,6 +112,10 @@ func (cfg Catalog) ToFilesConfig() filecataloging.Config {
|
|||||||
Globs: cfg.File.Content.Globs,
|
Globs: cfg.File.Content.Globs,
|
||||||
SkipFilesAboveSize: cfg.File.Content.SkipFilesAboveSize,
|
SkipFilesAboveSize: cfg.File.Content.SkipFilesAboveSize,
|
||||||
},
|
},
|
||||||
|
Executable: executable.Config{
|
||||||
|
MIMETypes: executable.DefaultConfig().MIMETypes,
|
||||||
|
Globs: cfg.File.Executable.Globs,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -13,6 +13,7 @@ import (
|
|||||||
type fileConfig struct {
|
type fileConfig struct {
|
||||||
Metadata fileMetadata `yaml:"metadata" json:"metadata" mapstructure:"metadata"`
|
Metadata fileMetadata `yaml:"metadata" json:"metadata" mapstructure:"metadata"`
|
||||||
Content fileContent `yaml:"content" json:"content" mapstructure:"content"`
|
Content fileContent `yaml:"content" json:"content" mapstructure:"content"`
|
||||||
|
Executable fileExecutable `yaml:"executable" json:"executable" mapstructure:"executable"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type fileMetadata struct {
|
type fileMetadata struct {
|
||||||
@ -25,6 +26,10 @@ type fileContent struct {
|
|||||||
Globs []string `yaml:"globs" json:"globs" mapstructure:"globs"`
|
Globs []string `yaml:"globs" json:"globs" mapstructure:"globs"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type fileExecutable struct {
|
||||||
|
Globs []string `yaml:"globs" json:"globs" mapstructure:"globs"`
|
||||||
|
}
|
||||||
|
|
||||||
func defaultFileConfig() fileConfig {
|
func defaultFileConfig() fileConfig {
|
||||||
return fileConfig{
|
return fileConfig{
|
||||||
Metadata: fileMetadata{
|
Metadata: fileMetadata{
|
||||||
@ -34,6 +39,9 @@ func defaultFileConfig() fileConfig {
|
|||||||
Content: fileContent{
|
Content: fileContent{
|
||||||
SkipFilesAboveSize: 250 * intFile.KB,
|
SkipFilesAboveSize: 250 * intFile.KB,
|
||||||
},
|
},
|
||||||
|
Executable: fileExecutable{
|
||||||
|
Globs: nil,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -3,5 +3,5 @@ package internal
|
|||||||
const (
|
const (
|
||||||
// JSONSchemaVersion is the current schema version output by the JSON encoder
|
// JSONSchemaVersion is the current schema version output by the JSON encoder
|
||||||
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
|
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
|
||||||
JSONSchemaVersion = "16.0.0"
|
JSONSchemaVersion = "16.0.1"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import (
|
|||||||
"github.com/anchore/syft/internal/sbomsync"
|
"github.com/anchore/syft/internal/sbomsync"
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/file/cataloger/executable"
|
||||||
"github.com/anchore/syft/syft/file/cataloger/filecontent"
|
"github.com/anchore/syft/syft/file/cataloger/filecontent"
|
||||||
"github.com/anchore/syft/syft/file/cataloger/filedigest"
|
"github.com/anchore/syft/syft/file/cataloger/filedigest"
|
||||||
"github.com/anchore/syft/syft/file/cataloger/filemetadata"
|
"github.com/anchore/syft/syft/file/cataloger/filemetadata"
|
||||||
@ -100,6 +101,31 @@ func NewFileContentCatalogerTask(cfg filecontent.Config) Task {
|
|||||||
return NewTask("file-content-cataloger", fn)
|
return NewTask("file-content-cataloger", fn)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewExecutableCatalogerTask(selection file.Selection, cfg executable.Config) Task {
|
||||||
|
if selection == file.NoFilesSelection {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
cat := executable.NewCataloger(cfg)
|
||||||
|
|
||||||
|
fn := func(ctx context.Context, resolver file.Resolver, builder sbomsync.Builder) error {
|
||||||
|
accessor := builder.(sbomsync.Accessor)
|
||||||
|
|
||||||
|
result, err := cat.Catalog(resolver)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
accessor.WriteToSBOM(func(sbom *sbom.SBOM) {
|
||||||
|
sbom.Artifacts.Executables = result
|
||||||
|
})
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return NewTask("file-executable-cataloger", fn)
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: this should be replaced with a fix that allows passing a coordinate or location iterator to the cataloger
|
// TODO: this should be replaced with a fix that allows passing a coordinate or location iterator to the cataloger
|
||||||
// Today internal to both cataloger this functions differently: a slice of coordinates vs a channel of locations
|
// Today internal to both cataloger this functions differently: a slice of coordinates vs a channel of locations
|
||||||
func coordinatesForSelection(selection file.Selection, accessor sbomsync.Accessor) ([]file.Coordinates, bool) {
|
func coordinatesForSelection(selection file.Selection, accessor sbomsync.Accessor) ([]file.Coordinates, bool) {
|
||||||
|
|||||||
2220
schema/json/schema-16.0.1.json
Normal file
2220
schema/json/schema-16.0.1.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -9,6 +9,7 @@ import (
|
|||||||
intFile "github.com/anchore/syft/internal/file"
|
intFile "github.com/anchore/syft/internal/file"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/file/cataloger/executable"
|
||||||
"github.com/anchore/syft/syft/file/cataloger/filecontent"
|
"github.com/anchore/syft/syft/file/cataloger/filecontent"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -16,6 +17,7 @@ type Config struct {
|
|||||||
Selection file.Selection `yaml:"selection" json:"selection" mapstructure:"selection"`
|
Selection file.Selection `yaml:"selection" json:"selection" mapstructure:"selection"`
|
||||||
Hashers []crypto.Hash `yaml:"hashers" json:"hashers" mapstructure:"hashers"`
|
Hashers []crypto.Hash `yaml:"hashers" json:"hashers" mapstructure:"hashers"`
|
||||||
Content filecontent.Config `yaml:"content" json:"content" mapstructure:"content"`
|
Content filecontent.Config `yaml:"content" json:"content" mapstructure:"content"`
|
||||||
|
Executable executable.Config `yaml:"executable" json:"executable" mapstructure:"executable"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type configMarshaledForm struct {
|
type configMarshaledForm struct {
|
||||||
@ -33,6 +35,7 @@ func DefaultConfig() Config {
|
|||||||
Selection: file.FilesOwnedByPackageSelection,
|
Selection: file.FilesOwnedByPackageSelection,
|
||||||
Hashers: hashers,
|
Hashers: hashers,
|
||||||
Content: filecontent.DefaultConfig(),
|
Content: filecontent.DefaultConfig(),
|
||||||
|
Executable: executable.DefaultConfig(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -185,6 +185,9 @@ func (c *CreateSBOMConfig) fileTasks() []task.Task {
|
|||||||
if t := task.NewFileContentCatalogerTask(c.Files.Content); t != nil {
|
if t := task.NewFileContentCatalogerTask(c.Files.Content); t != nil {
|
||||||
tsks = append(tsks, t)
|
tsks = append(tsks, t)
|
||||||
}
|
}
|
||||||
|
if t := task.NewExecutableCatalogerTask(c.Files.Selection, c.Files.Executable); t != nil {
|
||||||
|
tsks = append(tsks, t)
|
||||||
|
}
|
||||||
|
|
||||||
return tsks
|
return tsks
|
||||||
}
|
}
|
||||||
|
|||||||
@ -88,7 +88,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) {
|
|||||||
wantTaskNames: [][]string{
|
wantTaskNames: [][]string{
|
||||||
environmentCatalogerNames(),
|
environmentCatalogerNames(),
|
||||||
pkgCatalogerNamesWithTagOrName(t, "image"),
|
pkgCatalogerNamesWithTagOrName(t, "image"),
|
||||||
fileCatalogerNames(true, true),
|
fileCatalogerNames(true, true, true),
|
||||||
relationshipCatalogerNames(),
|
relationshipCatalogerNames(),
|
||||||
},
|
},
|
||||||
wantManifest: &catalogerManifest{
|
wantManifest: &catalogerManifest{
|
||||||
@ -106,7 +106,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) {
|
|||||||
wantTaskNames: [][]string{
|
wantTaskNames: [][]string{
|
||||||
environmentCatalogerNames(),
|
environmentCatalogerNames(),
|
||||||
pkgCatalogerNamesWithTagOrName(t, "directory"),
|
pkgCatalogerNamesWithTagOrName(t, "directory"),
|
||||||
fileCatalogerNames(true, true),
|
fileCatalogerNames(true, true, true),
|
||||||
relationshipCatalogerNames(),
|
relationshipCatalogerNames(),
|
||||||
},
|
},
|
||||||
wantManifest: &catalogerManifest{
|
wantManifest: &catalogerManifest{
|
||||||
@ -125,7 +125,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) {
|
|||||||
wantTaskNames: [][]string{
|
wantTaskNames: [][]string{
|
||||||
environmentCatalogerNames(),
|
environmentCatalogerNames(),
|
||||||
pkgCatalogerNamesWithTagOrName(t, "directory"),
|
pkgCatalogerNamesWithTagOrName(t, "directory"),
|
||||||
fileCatalogerNames(true, true),
|
fileCatalogerNames(true, true, true),
|
||||||
relationshipCatalogerNames(),
|
relationshipCatalogerNames(),
|
||||||
},
|
},
|
||||||
wantManifest: &catalogerManifest{
|
wantManifest: &catalogerManifest{
|
||||||
@ -143,7 +143,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) {
|
|||||||
wantTaskNames: [][]string{
|
wantTaskNames: [][]string{
|
||||||
environmentCatalogerNames(),
|
environmentCatalogerNames(),
|
||||||
pkgCatalogerNamesWithTagOrName(t, "image"),
|
pkgCatalogerNamesWithTagOrName(t, "image"),
|
||||||
fileCatalogerNames(false, true), // note: the digest cataloger is not included
|
fileCatalogerNames(false, true, true), // note: the digest cataloger is not included
|
||||||
relationshipCatalogerNames(),
|
relationshipCatalogerNames(),
|
||||||
},
|
},
|
||||||
wantManifest: &catalogerManifest{
|
wantManifest: &catalogerManifest{
|
||||||
@ -181,7 +181,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) {
|
|||||||
// note: there is a single group of catalogers for pkgs and files
|
// note: there is a single group of catalogers for pkgs and files
|
||||||
append(
|
append(
|
||||||
pkgCatalogerNamesWithTagOrName(t, "image"),
|
pkgCatalogerNamesWithTagOrName(t, "image"),
|
||||||
fileCatalogerNames(true, true)...,
|
fileCatalogerNames(true, true, true)...,
|
||||||
),
|
),
|
||||||
relationshipCatalogerNames(),
|
relationshipCatalogerNames(),
|
||||||
},
|
},
|
||||||
@ -202,7 +202,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) {
|
|||||||
wantTaskNames: [][]string{
|
wantTaskNames: [][]string{
|
||||||
environmentCatalogerNames(),
|
environmentCatalogerNames(),
|
||||||
addTo(pkgCatalogerNamesWithTagOrName(t, "image"), "persistent"),
|
addTo(pkgCatalogerNamesWithTagOrName(t, "image"), "persistent"),
|
||||||
fileCatalogerNames(true, true),
|
fileCatalogerNames(true, true, true),
|
||||||
relationshipCatalogerNames(),
|
relationshipCatalogerNames(),
|
||||||
},
|
},
|
||||||
wantManifest: &catalogerManifest{
|
wantManifest: &catalogerManifest{
|
||||||
@ -222,7 +222,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) {
|
|||||||
wantTaskNames: [][]string{
|
wantTaskNames: [][]string{
|
||||||
environmentCatalogerNames(),
|
environmentCatalogerNames(),
|
||||||
addTo(pkgCatalogerNamesWithTagOrName(t, "directory"), "persistent"),
|
addTo(pkgCatalogerNamesWithTagOrName(t, "directory"), "persistent"),
|
||||||
fileCatalogerNames(true, true),
|
fileCatalogerNames(true, true, true),
|
||||||
relationshipCatalogerNames(),
|
relationshipCatalogerNames(),
|
||||||
},
|
},
|
||||||
wantManifest: &catalogerManifest{
|
wantManifest: &catalogerManifest{
|
||||||
@ -242,7 +242,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) {
|
|||||||
wantTaskNames: [][]string{
|
wantTaskNames: [][]string{
|
||||||
environmentCatalogerNames(),
|
environmentCatalogerNames(),
|
||||||
addTo(pkgIntersect("image", "javascript"), "persistent"),
|
addTo(pkgIntersect("image", "javascript"), "persistent"),
|
||||||
fileCatalogerNames(true, true),
|
fileCatalogerNames(true, true, true),
|
||||||
relationshipCatalogerNames(),
|
relationshipCatalogerNames(),
|
||||||
},
|
},
|
||||||
wantManifest: &catalogerManifest{
|
wantManifest: &catalogerManifest{
|
||||||
@ -263,7 +263,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) {
|
|||||||
wantTaskNames: [][]string{
|
wantTaskNames: [][]string{
|
||||||
environmentCatalogerNames(),
|
environmentCatalogerNames(),
|
||||||
addTo(pkgCatalogerNamesWithTagOrName(t, "image"), "user-provided"),
|
addTo(pkgCatalogerNamesWithTagOrName(t, "image"), "user-provided"),
|
||||||
fileCatalogerNames(true, true),
|
fileCatalogerNames(true, true, true),
|
||||||
relationshipCatalogerNames(),
|
relationshipCatalogerNames(),
|
||||||
},
|
},
|
||||||
wantManifest: &catalogerManifest{
|
wantManifest: &catalogerManifest{
|
||||||
@ -283,7 +283,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) {
|
|||||||
wantTaskNames: [][]string{
|
wantTaskNames: [][]string{
|
||||||
environmentCatalogerNames(),
|
environmentCatalogerNames(),
|
||||||
pkgCatalogerNamesWithTagOrName(t, "image"),
|
pkgCatalogerNamesWithTagOrName(t, "image"),
|
||||||
fileCatalogerNames(true, true),
|
fileCatalogerNames(true, true, true),
|
||||||
relationshipCatalogerNames(),
|
relationshipCatalogerNames(),
|
||||||
},
|
},
|
||||||
wantManifest: &catalogerManifest{
|
wantManifest: &catalogerManifest{
|
||||||
@ -367,11 +367,14 @@ func pkgCatalogerNamesWithTagOrName(t *testing.T, token string) []string {
|
|||||||
return names
|
return names
|
||||||
}
|
}
|
||||||
|
|
||||||
func fileCatalogerNames(digest, metadata bool) []string {
|
func fileCatalogerNames(digest, metadata, executable bool) []string {
|
||||||
var names []string
|
var names []string
|
||||||
if digest {
|
if digest {
|
||||||
names = append(names, "file-digest-cataloger")
|
names = append(names, "file-digest-cataloger")
|
||||||
}
|
}
|
||||||
|
if executable {
|
||||||
|
names = append(names, "file-executable-cataloger")
|
||||||
|
}
|
||||||
if metadata {
|
if metadata {
|
||||||
names = append(names, "file-metadata-cataloger")
|
names = append(names, "file-metadata-cataloger")
|
||||||
}
|
}
|
||||||
|
|||||||
234
syft/file/cataloger/executable/cataloger.go
Normal file
234
syft/file/cataloger/executable/cataloger.go
Normal file
@ -0,0 +1,234 @@
|
|||||||
|
package executable
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"debug/elf"
|
||||||
|
"debug/macho"
|
||||||
|
"encoding/binary"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/bmatcuk/doublestar/v4"
|
||||||
|
"github.com/dustin/go-humanize"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/internal/bus"
|
||||||
|
"github.com/anchore/syft/internal/log"
|
||||||
|
"github.com/anchore/syft/internal/mimetype"
|
||||||
|
"github.com/anchore/syft/syft/event/monitor"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/internal/unionreader"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
MIMETypes []string `json:"mime-types" yaml:"mime-types" mapstructure:"mime-types"`
|
||||||
|
Globs []string `json:"globs" yaml:"globs" mapstructure:"globs"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Cataloger struct {
|
||||||
|
config Config
|
||||||
|
}
|
||||||
|
|
||||||
|
func DefaultConfig() Config {
|
||||||
|
return Config{
|
||||||
|
MIMETypes: mimetype.ExecutableMIMETypeSet.List(),
|
||||||
|
Globs: nil,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCataloger(cfg Config) *Cataloger {
|
||||||
|
return &Cataloger{
|
||||||
|
config: cfg,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]file.Executable, error) {
|
||||||
|
locs, err := resolver.FilesByMIMEType(i.config.MIMETypes...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("unable to get file locations for binaries: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
locs, err = filterByGlobs(locs, i.config.Globs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
prog := catalogingProgress(int64(len(locs)))
|
||||||
|
|
||||||
|
results := make(map[file.Coordinates]file.Executable)
|
||||||
|
for _, loc := range locs {
|
||||||
|
prog.AtomicStage.Set(loc.Path())
|
||||||
|
|
||||||
|
reader, err := resolver.FileContentsByLocation(loc)
|
||||||
|
if err != nil {
|
||||||
|
// TODO: known-unknowns
|
||||||
|
log.WithFields("error", err).Warnf("unable to get file contents for %q", loc.RealPath)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
exec, err := processExecutable(loc, reader.(unionreader.UnionReader))
|
||||||
|
if err != nil {
|
||||||
|
log.WithFields("error", err).Warnf("unable to process executable %q", loc.RealPath)
|
||||||
|
}
|
||||||
|
if exec != nil {
|
||||||
|
prog.Increment()
|
||||||
|
results[loc.Coordinates] = *exec
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debugf("executable cataloger processed %d files", len(results))
|
||||||
|
|
||||||
|
prog.AtomicStage.Set(fmt.Sprintf("%s executables", humanize.Comma(prog.Current())))
|
||||||
|
prog.SetCompleted()
|
||||||
|
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func catalogingProgress(locations int64) *monitor.CatalogerTaskProgress {
|
||||||
|
info := monitor.GenericTask{
|
||||||
|
Title: monitor.Title{
|
||||||
|
Default: "Executables",
|
||||||
|
},
|
||||||
|
ParentID: monitor.TopLevelCatalogingTaskID,
|
||||||
|
}
|
||||||
|
|
||||||
|
return bus.StartCatalogerTask(info, locations, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
func filterByGlobs(locs []file.Location, globs []string) ([]file.Location, error) {
|
||||||
|
if len(globs) == 0 {
|
||||||
|
return locs, nil
|
||||||
|
}
|
||||||
|
var filteredLocs []file.Location
|
||||||
|
for _, loc := range locs {
|
||||||
|
matches, err := locationMatchesGlob(loc, globs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if matches {
|
||||||
|
filteredLocs = append(filteredLocs, loc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return filteredLocs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func locationMatchesGlob(loc file.Location, globs []string) (bool, error) {
|
||||||
|
for _, glob := range globs {
|
||||||
|
for _, path := range []string{loc.RealPath, loc.AccessPath} {
|
||||||
|
if path == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
matches, err := doublestar.Match(glob, path)
|
||||||
|
if err != nil {
|
||||||
|
return false, fmt.Errorf("unable to match glob %q to path %q: %w", glob, path, err)
|
||||||
|
}
|
||||||
|
if matches {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func processExecutable(loc file.Location, reader unionreader.UnionReader) (*file.Executable, error) {
|
||||||
|
data := file.Executable{}
|
||||||
|
|
||||||
|
// determine the executable format
|
||||||
|
|
||||||
|
format, err := findExecutableFormat(reader)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("unable to determine executable kind: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if format == "" {
|
||||||
|
log.Debugf("unable to determine executable format for %q", loc.RealPath)
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
data.Format = format
|
||||||
|
|
||||||
|
securityFeatures, err := findSecurityFeatures(format, reader)
|
||||||
|
if err != nil {
|
||||||
|
log.WithFields("error", err).Warnf("unable to determine security features for %q", loc.RealPath)
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
data.SecurityFeatures = securityFeatures
|
||||||
|
|
||||||
|
return &data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findExecutableFormat(reader unionreader.UnionReader) (file.ExecutableFormat, error) {
|
||||||
|
// read the first sector of the file
|
||||||
|
buf := make([]byte, 512)
|
||||||
|
n, err := reader.ReadAt(buf, 0)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("unable to read first sector of file: %w", err)
|
||||||
|
}
|
||||||
|
if n < 512 {
|
||||||
|
return "", fmt.Errorf("unable to read enough bytes to determine executable format")
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case isMacho(buf):
|
||||||
|
return file.MachO, nil
|
||||||
|
case isPE(buf):
|
||||||
|
return file.PE, nil
|
||||||
|
case isELF(buf):
|
||||||
|
return file.ELF, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func isMacho(by []byte) bool {
|
||||||
|
// sourced from https://github.com/gabriel-vasile/mimetype/blob/02af149c0dfd1444d9256fc33c2012bb3153e1d2/internal/magic/binary.go#L44
|
||||||
|
|
||||||
|
if classOrMachOFat(by) && by[7] < 20 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(by) < 4 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
be := binary.BigEndian.Uint32(by)
|
||||||
|
le := binary.LittleEndian.Uint32(by)
|
||||||
|
|
||||||
|
return be == macho.Magic32 ||
|
||||||
|
le == macho.Magic32 ||
|
||||||
|
be == macho.Magic64 ||
|
||||||
|
le == macho.Magic64
|
||||||
|
}
|
||||||
|
|
||||||
|
// Java bytecode and Mach-O binaries share the same magic number.
|
||||||
|
// More info here https://github.com/threatstack/libmagic/blob/master/magic/Magdir/cafebabe
|
||||||
|
func classOrMachOFat(in []byte) bool {
|
||||||
|
// sourced from https://github.com/gabriel-vasile/mimetype/blob/02af149c0dfd1444d9256fc33c2012bb3153e1d2/internal/magic/binary.go#L44
|
||||||
|
|
||||||
|
// There should be at least 8 bytes for both of them because the only way to
|
||||||
|
// quickly distinguish them is by comparing byte at position 7
|
||||||
|
if len(in) < 8 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return bytes.HasPrefix(in, []byte{0xCA, 0xFE, 0xBA, 0xBE})
|
||||||
|
}
|
||||||
|
|
||||||
|
func isPE(by []byte) bool {
|
||||||
|
return bytes.HasPrefix(by, []byte("MZ"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func isELF(by []byte) bool {
|
||||||
|
return bytes.HasPrefix(by, []byte(elf.ELFMAG))
|
||||||
|
}
|
||||||
|
|
||||||
|
func findSecurityFeatures(format file.ExecutableFormat, reader unionreader.UnionReader) (*file.ELFSecurityFeatures, error) {
|
||||||
|
// TODO: add support for PE and MachO
|
||||||
|
switch format { //nolint: gocritic
|
||||||
|
case file.ELF:
|
||||||
|
return findELFSecurityFeatures(reader) //nolint: gocritic
|
||||||
|
// case file.PE:
|
||||||
|
// return findPESecurityFeatures(reader)
|
||||||
|
// case file.MachO:
|
||||||
|
// return findMachOSecurityFeatures(reader)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unsupported executable format: %q", format)
|
||||||
|
}
|
||||||
221
syft/file/cataloger/executable/elf.go
Normal file
221
syft/file/cataloger/executable/elf.go
Normal file
@ -0,0 +1,221 @@
|
|||||||
|
package executable
|
||||||
|
|
||||||
|
import (
|
||||||
|
"debug/elf"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/scylladb/go-set/strset"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/internal/log"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/internal/unionreader"
|
||||||
|
)
|
||||||
|
|
||||||
|
func findELFSecurityFeatures(reader unionreader.UnionReader) (*file.ELFSecurityFeatures, error) {
|
||||||
|
f, err := elf.NewFile(reader)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
features := file.ELFSecurityFeatures{
|
||||||
|
SymbolTableStripped: isElfSymbolTableStripped(f),
|
||||||
|
StackCanary: checkElfStackCanary(f),
|
||||||
|
NoExecutable: checkElfNXProtection(f),
|
||||||
|
RelocationReadOnly: checkElfRelROProtection(f),
|
||||||
|
PositionIndependentExecutable: isELFPIE(f),
|
||||||
|
DynamicSharedObject: isELFDSO(f),
|
||||||
|
LlvmSafeStack: checkLLVMSafeStack(f),
|
||||||
|
LlvmControlFlowIntegrity: checkLLVMControlFlowIntegrity(f),
|
||||||
|
ClangFortifySource: checkClangFortifySource(f),
|
||||||
|
}
|
||||||
|
|
||||||
|
return &features, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func isElfSymbolTableStripped(file *elf.File) bool {
|
||||||
|
return file.Section(".symtab") == nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkElfStackCanary(file *elf.File) *bool {
|
||||||
|
return hasAnyDynamicSymbols(file, "__stack_chk_fail", "__stack_chk_guard")
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasAnyDynamicSymbols(file *elf.File, symbolNames ...string) *bool {
|
||||||
|
dynSyms, err := file.DynamicSymbols()
|
||||||
|
if err != nil {
|
||||||
|
// TODO: known-unknowns
|
||||||
|
log.WithFields("error", err).Warn("unable to read dynamic symbols from elf file")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
nameSet := strset.New(symbolNames...)
|
||||||
|
|
||||||
|
for _, sym := range dynSyms {
|
||||||
|
if nameSet.Has(sym.Name) {
|
||||||
|
return boolRef(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return boolRef(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func boolRef(b bool) *bool {
|
||||||
|
return &b
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkElfNXProtection(file *elf.File) bool {
|
||||||
|
// find the program headers until you find the GNU_STACK segment
|
||||||
|
for _, prog := range file.Progs {
|
||||||
|
if prog.Type == elf.PT_GNU_STACK {
|
||||||
|
// check if the GNU_STACK segment is executable
|
||||||
|
return prog.Flags&elf.PF_X == 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkElfRelROProtection(f *elf.File) file.RelocationReadOnly {
|
||||||
|
// background on relro https://www.redhat.com/en/blog/hardening-elf-binaries-using-relocation-read-only-relro
|
||||||
|
hasRelro := false
|
||||||
|
hasBindNow := hasBindNowDynTagOrFlag(f)
|
||||||
|
|
||||||
|
for _, prog := range f.Progs {
|
||||||
|
if prog.Type == elf.PT_GNU_RELRO {
|
||||||
|
hasRelro = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case hasRelro && hasBindNow:
|
||||||
|
return file.RelocationReadOnlyFull
|
||||||
|
case hasRelro:
|
||||||
|
return file.RelocationReadOnlyPartial
|
||||||
|
default:
|
||||||
|
return file.RelocationReadOnlyNone
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasBindNowDynTagOrFlag(f *elf.File) bool {
|
||||||
|
if hasElfDynTag(f, elf.DT_BIND_NOW) {
|
||||||
|
// support older binaries...
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// "DT_BIND_NOW ... use has been superseded by the DF_BIND_NOW flag"
|
||||||
|
// source: https://refspecs.linuxbase.org/elf/gabi4+/ch5.dynamic.html
|
||||||
|
return hasElfDynFlag(f, elf.DF_BIND_NOW)
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasElfDynFlag(f *elf.File, flag elf.DynFlag) bool {
|
||||||
|
vals, err := f.DynValue(elf.DT_FLAGS)
|
||||||
|
if err != nil {
|
||||||
|
// TODO: known-unknowns
|
||||||
|
log.WithFields("error", err).Warn("unable to read DT_FLAGS from elf file")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, val := range vals {
|
||||||
|
if val&uint64(flag) != 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasElfDynFlag1(f *elf.File, flag elf.DynFlag1) bool {
|
||||||
|
vals, err := f.DynValue(elf.DT_FLAGS_1)
|
||||||
|
if err != nil {
|
||||||
|
// TODO: known-unknowns
|
||||||
|
log.WithFields("error", err).Warn("unable to read DT_FLAGS_1 from elf file")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, val := range vals {
|
||||||
|
if val&uint64(flag) != 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasElfDynTag(f *elf.File, tag elf.DynTag) bool {
|
||||||
|
// source https://github.com/golang/go/blob/9b4b3e5acca2dabe107fa2c3ed963097d78a4562/src/cmd/cgo/internal/testshared/shared_test.go#L280
|
||||||
|
|
||||||
|
ds := f.SectionByType(elf.SHT_DYNAMIC)
|
||||||
|
if ds == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
d, err := ds.Data()
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for len(d) > 0 {
|
||||||
|
var t elf.DynTag
|
||||||
|
switch f.Class {
|
||||||
|
case elf.ELFCLASS32:
|
||||||
|
t = elf.DynTag(f.ByteOrder.Uint32(d[0:4]))
|
||||||
|
d = d[8:]
|
||||||
|
case elf.ELFCLASS64:
|
||||||
|
t = elf.DynTag(f.ByteOrder.Uint64(d[0:8]))
|
||||||
|
d = d[16:]
|
||||||
|
}
|
||||||
|
if t == tag {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func isELFPIE(f *elf.File) bool {
|
||||||
|
// being a shared object is not sufficient to be a PIE, the explicit flag must be set also
|
||||||
|
return isELFDSO(f) && hasElfDynFlag1(f, elf.DF_1_PIE)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isELFDSO(f *elf.File) bool {
|
||||||
|
return f.Type == elf.ET_DYN
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkLLVMSafeStack(file *elf.File) *bool {
|
||||||
|
// looking for the presence of https://github.com/microsoft/compiler-rt/blob/30b3b8cb5c9a0854f2f40f187c6f6773561a35f2/lib/safestack/safestack.cc#L207
|
||||||
|
return hasAnyDynamicSymbols(file, "__safestack_init")
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkLLVMControlFlowIntegrity(file *elf.File) *bool {
|
||||||
|
// look for any symbols that are functions and end with ".cfi"
|
||||||
|
dynSyms, err := file.Symbols()
|
||||||
|
if err != nil {
|
||||||
|
// TODO: known-unknowns
|
||||||
|
log.WithFields("error", err).Trace("unable to read symbols from elf file")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, sym := range dynSyms {
|
||||||
|
if isFunction(sym) && strings.HasSuffix(sym.Name, ".cfi") {
|
||||||
|
return boolRef(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return boolRef(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isFunction(sym elf.Symbol) bool {
|
||||||
|
return elf.ST_TYPE(sym.Info) == elf.STT_FUNC
|
||||||
|
}
|
||||||
|
|
||||||
|
var fortifyPattern = regexp.MustCompile(`__\w+_chk@.+`)
|
||||||
|
|
||||||
|
func checkClangFortifySource(file *elf.File) *bool {
|
||||||
|
dynSyms, err := file.Symbols()
|
||||||
|
if err != nil {
|
||||||
|
// TODO: known-unknowns
|
||||||
|
log.WithFields("error", err).Trace("unable to read symbols from elf file")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, sym := range dynSyms {
|
||||||
|
if isFunction(sym) && fortifyPattern.MatchString(sym.Name) {
|
||||||
|
return boolRef(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return boolRef(false)
|
||||||
|
}
|
||||||
162
syft/file/cataloger/executable/elf_test.go
Normal file
162
syft/file/cataloger/executable/elf_test.go
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
package executable
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/google/go-cmp/cmp"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/internal/unionreader"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_findELFSecurityFeatures(t *testing.T) {
|
||||||
|
|
||||||
|
readerForFixture := func(t *testing.T, fixture string) unionreader.UnionReader {
|
||||||
|
t.Helper()
|
||||||
|
f, err := os.Open(filepath.Join("test-fixtures", fixture))
|
||||||
|
require.NoError(t, err)
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
fixture string
|
||||||
|
want *file.ELFSecurityFeatures
|
||||||
|
wantStripped bool
|
||||||
|
wantErr require.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "detect canary",
|
||||||
|
fixture: "bin/with_canary",
|
||||||
|
want: &file.ELFSecurityFeatures{
|
||||||
|
StackCanary: boolRef(true), // ! important !
|
||||||
|
RelocationReadOnly: file.RelocationReadOnlyNone,
|
||||||
|
LlvmSafeStack: boolRef(false),
|
||||||
|
LlvmControlFlowIntegrity: boolRef(false),
|
||||||
|
ClangFortifySource: boolRef(false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "detect nx",
|
||||||
|
fixture: "bin/with_nx",
|
||||||
|
want: &file.ELFSecurityFeatures{
|
||||||
|
StackCanary: boolRef(false),
|
||||||
|
NoExecutable: true, // ! important !
|
||||||
|
RelocationReadOnly: file.RelocationReadOnlyNone,
|
||||||
|
LlvmSafeStack: boolRef(false),
|
||||||
|
LlvmControlFlowIntegrity: boolRef(false),
|
||||||
|
ClangFortifySource: boolRef(false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "detect relro",
|
||||||
|
fixture: "bin/with_relro",
|
||||||
|
want: &file.ELFSecurityFeatures{
|
||||||
|
StackCanary: boolRef(false),
|
||||||
|
RelocationReadOnly: file.RelocationReadOnlyFull, // ! important !
|
||||||
|
LlvmSafeStack: boolRef(false),
|
||||||
|
LlvmControlFlowIntegrity: boolRef(false),
|
||||||
|
ClangFortifySource: boolRef(false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "detect partial relro",
|
||||||
|
fixture: "bin/with_partial_relro",
|
||||||
|
want: &file.ELFSecurityFeatures{
|
||||||
|
StackCanary: boolRef(false),
|
||||||
|
RelocationReadOnly: file.RelocationReadOnlyPartial, // ! important !
|
||||||
|
LlvmSafeStack: boolRef(false),
|
||||||
|
LlvmControlFlowIntegrity: boolRef(false),
|
||||||
|
ClangFortifySource: boolRef(false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "detect pie",
|
||||||
|
fixture: "bin/with_pie",
|
||||||
|
want: &file.ELFSecurityFeatures{
|
||||||
|
StackCanary: boolRef(false),
|
||||||
|
RelocationReadOnly: file.RelocationReadOnlyNone,
|
||||||
|
PositionIndependentExecutable: true, // ! important !
|
||||||
|
DynamicSharedObject: true, // ! important !
|
||||||
|
LlvmSafeStack: boolRef(false),
|
||||||
|
LlvmControlFlowIntegrity: boolRef(false),
|
||||||
|
ClangFortifySource: boolRef(false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "detect dso",
|
||||||
|
fixture: "bin/pie_false_positive.so",
|
||||||
|
want: &file.ELFSecurityFeatures{
|
||||||
|
StackCanary: boolRef(false),
|
||||||
|
RelocationReadOnly: file.RelocationReadOnlyPartial,
|
||||||
|
NoExecutable: true,
|
||||||
|
PositionIndependentExecutable: false, // ! important !
|
||||||
|
DynamicSharedObject: true, // ! important !
|
||||||
|
LlvmSafeStack: boolRef(false),
|
||||||
|
LlvmControlFlowIntegrity: boolRef(false),
|
||||||
|
ClangFortifySource: boolRef(false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "detect safestack",
|
||||||
|
fixture: "bin/with_safestack",
|
||||||
|
want: &file.ELFSecurityFeatures{
|
||||||
|
NoExecutable: true,
|
||||||
|
StackCanary: boolRef(false),
|
||||||
|
RelocationReadOnly: file.RelocationReadOnlyPartial,
|
||||||
|
PositionIndependentExecutable: false,
|
||||||
|
DynamicSharedObject: false,
|
||||||
|
LlvmSafeStack: boolRef(true), // ! important !
|
||||||
|
LlvmControlFlowIntegrity: boolRef(false),
|
||||||
|
ClangFortifySource: boolRef(false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "detect cfi",
|
||||||
|
fixture: "bin/with_cfi",
|
||||||
|
want: &file.ELFSecurityFeatures{
|
||||||
|
NoExecutable: true,
|
||||||
|
StackCanary: boolRef(false),
|
||||||
|
RelocationReadOnly: file.RelocationReadOnlyPartial,
|
||||||
|
PositionIndependentExecutable: false,
|
||||||
|
DynamicSharedObject: false,
|
||||||
|
LlvmSafeStack: boolRef(false),
|
||||||
|
LlvmControlFlowIntegrity: boolRef(true), // ! important !
|
||||||
|
ClangFortifySource: boolRef(false),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "detect fortify",
|
||||||
|
fixture: "bin/with_fortify",
|
||||||
|
want: &file.ELFSecurityFeatures{
|
||||||
|
NoExecutable: true,
|
||||||
|
StackCanary: boolRef(false),
|
||||||
|
RelocationReadOnly: file.RelocationReadOnlyPartial,
|
||||||
|
PositionIndependentExecutable: false,
|
||||||
|
DynamicSharedObject: false,
|
||||||
|
LlvmSafeStack: boolRef(false),
|
||||||
|
LlvmControlFlowIntegrity: boolRef(false),
|
||||||
|
ClangFortifySource: boolRef(true), // ! important !
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
if tt.wantErr == nil {
|
||||||
|
tt.wantErr = require.NoError
|
||||||
|
}
|
||||||
|
got, err := findELFSecurityFeatures(readerForFixture(t, tt.fixture))
|
||||||
|
tt.wantErr(t, err)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if d := cmp.Diff(tt.want, got); d != "" {
|
||||||
|
t.Errorf("findELFSecurityFeatures() mismatch (-want +got):\n%s", d)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
3
syft/file/cataloger/executable/test-fixtures/.gitignore
vendored
Normal file
3
syft/file/cataloger/executable/test-fixtures/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
bin
|
||||||
|
actual_verify
|
||||||
|
Dockerfile.sha256
|
||||||
16
syft/file/cataloger/executable/test-fixtures/Dockerfile
Normal file
16
syft/file/cataloger/executable/test-fixtures/Dockerfile
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
FROM gcc:9.5.0
|
||||||
|
|
||||||
|
RUN apt update -y && apt install -y clang cmake git make m4 pkg-config zlib1g-dev
|
||||||
|
|
||||||
|
## from https://github.com/runsafesecurity/selfrando/blob/tb-v0.4.2/docs/linux-build-instructions.md
|
||||||
|
#RUN git clone https://github.com/runsafesecurity/selfrando.git && \
|
||||||
|
# export SR_ARCH=`uname -m | sed s/i686/x86/` && \
|
||||||
|
# cd selfrando && \
|
||||||
|
# cmake . -DSR_DEBUG_LEVEL=env -DCMAKE_BUILD_TYPE=Release -DSR_BUILD_LIBELF=1 \
|
||||||
|
# -DSR_ARCH=$SR_ARCH -DSR_LOG=console \
|
||||||
|
# -DSR_FORCE_INPLACE=1 -G "Unix Makefiles" \
|
||||||
|
# -DCMAKE_INSTALL_PREFIX:PATH=$PWD/out/$SR_ARCH
|
||||||
|
#RUN cd selfrando && make -j`nprocs --all`
|
||||||
|
#RUN cd selfrando && make install
|
||||||
|
|
||||||
|
RUN curl -o /bin/checksec https://raw.githubusercontent.com/slimm609/checksec.sh/2.6.0/checksec && chmod +x /bin/checksec
|
||||||
34
syft/file/cataloger/executable/test-fixtures/Makefile
Normal file
34
syft/file/cataloger/executable/test-fixtures/Makefile
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
BIN=./bin
|
||||||
|
TOOL_IMAGE=localhost/syft-bin-build-tools:latest
|
||||||
|
VERIFY_FILE=actual_verify
|
||||||
|
|
||||||
|
all: build verify
|
||||||
|
|
||||||
|
tools-check:
|
||||||
|
@sha256sum -c Dockerfile.sha256 || (echo "Tools Dockerfile has changed" && exit 1)
|
||||||
|
|
||||||
|
# for selfrando...
|
||||||
|
# docker buildx build --platform linux/amd64 -t $(TOOL_IMAGE) .
|
||||||
|
|
||||||
|
tools:
|
||||||
|
@(docker inspect $(TOOL_IMAGE) > /dev/null && make tools-check) || (docker build -t $(TOOL_IMAGE) . && sha256sum Dockerfile > Dockerfile.sha256)
|
||||||
|
|
||||||
|
build: tools
|
||||||
|
mkdir -p $(BIN)
|
||||||
|
docker run -i -v $(shell pwd):/mount -w /mount/project $(TOOL_IMAGE) make
|
||||||
|
|
||||||
|
verify: tools
|
||||||
|
@rm -f $(VERIFY_FILE)
|
||||||
|
docker run -i -v $(shell pwd):/mount -w /mount/project $(TOOL_IMAGE) make verify > $(VERIFY_FILE)
|
||||||
|
@python ./differ expected_verify $(VERIFY_FILE)
|
||||||
|
|
||||||
|
debug:
|
||||||
|
docker run -i --rm -v $(shell pwd):/mount -w /mount/project $(TOOL_IMAGE) bash
|
||||||
|
|
||||||
|
cache.fingerprint:
|
||||||
|
@find project Dockerfile Makefile -type f -exec md5sum {} + | awk '{print $1}' | sort | tee cache.fingerprint
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -f $(BIN)/*
|
||||||
|
|
||||||
|
.PHONY: build verify debug build-image build-bins clean dockerfile-check cache.fingerprint
|
||||||
@ -0,0 +1,53 @@
|
|||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def load_json(file_path):
|
||||||
|
with open(file_path, 'r') as file:
|
||||||
|
return json.load(file)
|
||||||
|
|
||||||
|
|
||||||
|
def diff_json(json1, json2):
|
||||||
|
differences_found = False
|
||||||
|
|
||||||
|
for key in json1:
|
||||||
|
if key not in json2:
|
||||||
|
print(f'missing key "{key}"')
|
||||||
|
continue
|
||||||
|
|
||||||
|
differences = []
|
||||||
|
for subkey in json1[key]:
|
||||||
|
if subkey not in json2[key]:
|
||||||
|
differences.append(f' - "{subkey}": expected "{json1[key][subkey]}" but was MISSING')
|
||||||
|
continue
|
||||||
|
|
||||||
|
if subkey in json2[key] and json1[key][subkey] != json2[key][subkey]:
|
||||||
|
differences.append(f' - "{subkey}": expected "{json1[key][subkey]}" got "{json2[key][subkey]}"')
|
||||||
|
|
||||||
|
if differences:
|
||||||
|
differences_found = True
|
||||||
|
print(f'{key}')
|
||||||
|
for diff in differences:
|
||||||
|
print(diff)
|
||||||
|
print()
|
||||||
|
|
||||||
|
return differences_found
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) != 3:
|
||||||
|
print("Usage: python ./differ file1.json file2.json")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
json1 = load_json(sys.argv[1])
|
||||||
|
json2 = load_json(sys.argv[2])
|
||||||
|
|
||||||
|
if diff_json(json1, json2):
|
||||||
|
print("FAIL: unexpected security feature differences")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print("PASS: all security features accounted for")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
main()
|
||||||
@ -0,0 +1 @@
|
|||||||
|
{ "dir": { "name":"../bin" }, "../bin/no_protection": { "relro":"no","canary":"no","nx":"no","pie":"no","clangcfi":"no","safestack":"no","rpath":"no","runpath":"yes","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"2", "filename":"../bin/no_protection"}, "../bin/with_nx": { "relro":"no","canary":"no","nx":"yes","pie":"no","clangcfi":"no","safestack":"no","rpath":"no","runpath":"no","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"2", "filename":"../bin/with_nx"}, "../bin/with_rpath": { "relro":"no","canary":"no","nx":"no","pie":"no","clangcfi":"no","safestack":"no","rpath":"yes","runpath":"no","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"2", "filename":"../bin/with_rpath"}, "../bin/with_canary": { "relro":"no","canary":"yes","nx":"no","pie":"no","clangcfi":"no","safestack":"no","rpath":"no","runpath":"no","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"2", "filename":"../bin/with_canary"}, "../bin/with_relro": { "relro":"full","canary":"no","nx":"no","pie":"no","clangcfi":"no","safestack":"no","rpath":"no","runpath":"no","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"2", "filename":"../bin/with_relro"}, "../bin/with_pie": { "relro":"no","canary":"no","nx":"no","pie":"yes","clangcfi":"no","safestack":"no","rpath":"no","runpath":"no","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"2", "filename":"../bin/with_pie"}, "../bin/with_runpath": { "relro":"no","canary":"no","nx":"no","pie":"no","clangcfi":"no","safestack":"no","rpath":"no","runpath":"yes","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"2", "filename":"../bin/with_runpath"}, "../bin/with_safestack": { "relro":"partial","canary":"no","nx":"yes","pie":"no","clangcfi":"no","safestack":"yes","rpath":"no","runpath":"no","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"3", "filename":"../bin/with_safestack"}, "../bin/pie_false_positive.so": { "relro":"partial","canary":"no","nx":"yes","pie":"dso","clangcfi":"no","safestack":"no","rpath":"no","runpath":"no","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"0", "filename":"../bin/pie_false_positive.so"}, "../bin/with_cfi": { "relro":"partial","canary":"no","nx":"yes","pie":"no","clangcfi":"yes","safestack":"no","rpath":"no","runpath":"no","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"2", "filename":"../bin/with_cfi"}, "../bin/with_partial_relro": { "relro":"partial","canary":"no","nx":"no","pie":"no","clangcfi":"no","safestack":"no","rpath":"no","runpath":"no","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"2", "filename":"../bin/with_partial_relro"}, "../bin/protected": { "relro":"full","canary":"yes","nx":"yes","pie":"yes","clangcfi":"no","safestack":"no","rpath":"no","runpath":"no","symbols":"yes","fortify_source":"no","fortified":"0","fortify-able":"2", "filename":"../bin/protected"}, "../bin/with_fortify": { "relro":"partial","canary":"no","nx":"yes","pie":"no","clangcfi":"no","safestack":"no","rpath":"no","runpath":"no","symbols":"yes","fortify_source":"yes","fortified":"1","fortify-able":"2", "filename":"../bin/with_fortify"}}
|
||||||
@ -0,0 +1,93 @@
|
|||||||
|
### GCC Options ############################################
|
||||||
|
CANARY := -fstack-protector
|
||||||
|
NO_CANARY := -fno-stack-protector
|
||||||
|
|
||||||
|
SHARED_OBJ := -shared
|
||||||
|
|
||||||
|
RELRO := -z relro -z now
|
||||||
|
PARTIAL_RELRO := -z relro
|
||||||
|
NO_RELRO := -z norelro
|
||||||
|
|
||||||
|
NX := -z noexecstack
|
||||||
|
NO_NX := -z execstack
|
||||||
|
|
||||||
|
PIE := -fpic -pie
|
||||||
|
NO_PIE := -no-pie
|
||||||
|
|
||||||
|
# deprecated
|
||||||
|
RPATH := -Wl,--disable-new-dtags,-rpath,./libs
|
||||||
|
|
||||||
|
# replaces RPATH (thus us mutually exclusive with it)
|
||||||
|
RUNPATH := -Wl,-rpath,./libs
|
||||||
|
|
||||||
|
GCCFLAGS := -g
|
||||||
|
|
||||||
|
### Clang Options ############################################
|
||||||
|
|
||||||
|
SAFE_STACK := -fsanitize=safe-stack
|
||||||
|
|
||||||
|
CFI := -flto -fvisibility=hidden -fsanitize=cfi
|
||||||
|
|
||||||
|
FORTIFY := -O2 -D_FORTIFY_SOURCE=2
|
||||||
|
|
||||||
|
### Common Options ############################################
|
||||||
|
|
||||||
|
SRC := main.c
|
||||||
|
LIB_SRC := lib.c
|
||||||
|
BIN := ../bin
|
||||||
|
|
||||||
|
BINS := $(BIN)/no_protection $(BIN)/with_nx $(BIN)/pie_false_positive.so $(BIN)/with_pie $(BIN)/with_canary $(BIN)/with_relro $(BIN)/with_partial_relro $(BIN)/with_rpath $(BIN)/with_runpath $(BIN)/with_safestack $(BIN)/with_cfi $(BIN)/with_fortify $(BIN)/protected
|
||||||
|
#.PHONY: verify $(BIN)/no_protection $(BIN)/with_nx $(BIN)/pie_false_positive.so $(BIN)/with_pie $(BIN)/with_canary $(BIN)/with_relro $(BIN)/with_partial_relro $(BIN)/with_rpath $(BIN)/with_runpath $(BIN)/with_safestack $(BIN)/with_cfi $(BIN)/with_fortify $(BIN)/protected
|
||||||
|
.PHONY: verify clean all
|
||||||
|
|
||||||
|
all: $(BINS)
|
||||||
|
|
||||||
|
|
||||||
|
$(BIN)/no_protection : $(SRC)
|
||||||
|
gcc $< -o $@ $(GCCFLAGS) $(NO_CANARY) $(NO_NX) $(NO_RELRO) $(NO_PIE) $(RUNPATH)
|
||||||
|
|
||||||
|
$(BIN)/with_nx : $(SRC)
|
||||||
|
gcc $< -o $@ $(GCCFLAGS) $(NO_CANARY) $(NX) $(NO_RELRO) $(NO_PIE)
|
||||||
|
|
||||||
|
$(BIN)/pie_false_positive.so: $(LIB_SRC)
|
||||||
|
gcc $< -c -Wall -Werror -fpic $(LIB_SRC)
|
||||||
|
gcc -shared -o $@ lib.o ; rm lib.o
|
||||||
|
|
||||||
|
$(BIN)/with_pie: $(SRC)
|
||||||
|
gcc $< -o $@ $(GCCFLAGS) $(NO_CANARY) $(NO_NX) $(NO_RELRO) $(PIE)
|
||||||
|
|
||||||
|
$(BIN)/with_canary: $(SRC)
|
||||||
|
gcc $< -o $@ $(GCCFLAGS) $(CANARY) $(NO_NX) $(NO_RELRO) $(NO_PIE)
|
||||||
|
|
||||||
|
$(BIN)/with_relro: $(SRC)
|
||||||
|
gcc $< -o $@ $(GCCFLAGS) $(NO_CANARY) $(NO_NX) $(RELRO) $(NO_PIE)
|
||||||
|
|
||||||
|
$(BIN)/with_partial_relro: $(SRC)
|
||||||
|
gcc $< -o $@ $(GCCFLAGS) $(NO_CANARY) $(NO_NX) $(PARTIAL_RELRO) $(NO_PIE)
|
||||||
|
|
||||||
|
$(BIN)/with_rpath: $(SRC)
|
||||||
|
gcc $< -o $@ $(GCCFLAGS) $(NO_CANARY) $(NO_NX) $(NO_RELRO) $(NO_PIE) $(RPATH)
|
||||||
|
|
||||||
|
$(BIN)/with_runpath: $(SRC)
|
||||||
|
gcc $< -o $@ $(GCCFLAGS) $(NO_CANARY) $(NO_NX) $(NO_RELRO) $(NO_PIE) $(RUNPATH)
|
||||||
|
|
||||||
|
$(BIN)/with_safestack: $(SRC)
|
||||||
|
clang $< -o $@ $(SAFE_STACK)
|
||||||
|
|
||||||
|
$(BIN)/with_cfi: $(SRC)
|
||||||
|
clang $< -o $@ $(CFI)
|
||||||
|
|
||||||
|
$(BIN)/with_fortify: $(SRC)
|
||||||
|
clang $< -o $@ $(FORTIFY)
|
||||||
|
|
||||||
|
#$(BIN)/with_selfrando: $(SRC)
|
||||||
|
# srenv gcc $< -o $@ $(GCCFLAGS) $(NO_CANARY) $(NO_NX) $(NO_RELRO) $(NO_PIE)
|
||||||
|
|
||||||
|
$(BIN)/protected: $(SRC)
|
||||||
|
gcc $< -o $@ $(GCCFLAGS) $(CANARY) $(NX) $(RELRO) $(PIE)
|
||||||
|
|
||||||
|
verify:
|
||||||
|
@/bin/checksec --dir=$(BIN) --extended --output=json
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -rf $(BINS)
|
||||||
@ -0,0 +1,6 @@
|
|||||||
|
#include <stdio.h>
|
||||||
|
|
||||||
|
void foo(void)
|
||||||
|
{
|
||||||
|
puts("Share me!");
|
||||||
|
}
|
||||||
@ -0,0 +1,6 @@
|
|||||||
|
#ifndef foo_h__
|
||||||
|
#define foo_h__
|
||||||
|
|
||||||
|
extern void foo(void);
|
||||||
|
|
||||||
|
#endif // foo_h__
|
||||||
106
syft/file/cataloger/executable/test-fixtures/project/main.c
Normal file
106
syft/file/cataloger/executable/test-fixtures/project/main.c
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
#include <stdio.h>
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <stdint.h>
|
||||||
|
|
||||||
|
// source: https://github.com/trailofbits/clang-cfi-showcase/blob/master/cfi_icall.c
|
||||||
|
|
||||||
|
typedef int (*int_arg_fn)(int);
|
||||||
|
typedef int (*float_arg_fn)(float);
|
||||||
|
|
||||||
|
static int int_arg(int arg) {
|
||||||
|
printf("In %s: (%d)\n", __FUNCTION__, arg);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static int float_arg(float arg) {
|
||||||
|
printf("CFI should protect transfer to here\n");
|
||||||
|
printf("In %s: (%f)\n", __FUNCTION__, (double)arg);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static int bad_int_arg(int arg) {
|
||||||
|
printf("CFI will not protect transfer to here\n");
|
||||||
|
printf("In %s: (%d)\n", __FUNCTION__, arg);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static int not_entry_point(int arg) {
|
||||||
|
// nop sled for x86 / x86-64
|
||||||
|
// these instructions act as a buffer
|
||||||
|
// for an indirect control flow transfer to skip
|
||||||
|
// a valid function entry point, but continue
|
||||||
|
// to execute normal code
|
||||||
|
__asm__ volatile (
|
||||||
|
"nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n"
|
||||||
|
"nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n"
|
||||||
|
"nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n"
|
||||||
|
"nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n"
|
||||||
|
"nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n"
|
||||||
|
"nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n"
|
||||||
|
"nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n"
|
||||||
|
"nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n" "nop\n"
|
||||||
|
);
|
||||||
|
printf("CFI ensures control flow only transfers to potentially valid destinations\n");
|
||||||
|
printf("In %s: (%d)\n", __FUNCTION__, arg);
|
||||||
|
// need to exit or the program will segfault anyway,
|
||||||
|
// since the indirect call skipped the function preamble
|
||||||
|
exit(arg);
|
||||||
|
}
|
||||||
|
|
||||||
|
struct foo {
|
||||||
|
int_arg_fn int_funcs[1];
|
||||||
|
int_arg_fn bad_int_funcs[1];
|
||||||
|
float_arg_fn float_funcs[1];
|
||||||
|
int_arg_fn not_entries[1];
|
||||||
|
};
|
||||||
|
|
||||||
|
// the struct aligns the function pointer arrays
|
||||||
|
// so indexing past the end will reliably
|
||||||
|
// call working function pointers
|
||||||
|
static struct foo f = {
|
||||||
|
.int_funcs = {int_arg},
|
||||||
|
.bad_int_funcs = {bad_int_arg},
|
||||||
|
.float_funcs = {float_arg},
|
||||||
|
.not_entries = {(int_arg_fn)((uintptr_t)(not_entry_point)+0x20)}
|
||||||
|
};
|
||||||
|
|
||||||
|
void simple1() {
|
||||||
|
char buf[16];
|
||||||
|
fgets(buf, sizeof(buf), stdin);
|
||||||
|
printf(buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
void simple2() {
|
||||||
|
char buf[16];
|
||||||
|
scanf("%s", buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
int main(int argc, char **argv) {
|
||||||
|
if(argc != 2) {
|
||||||
|
printf("Usage: %s <option>\n", argv[0]);
|
||||||
|
printf("Option values:\n");
|
||||||
|
printf("\t0\tCall correct function\n");
|
||||||
|
printf("\t1\tCall the wrong function but with the same signature\n");
|
||||||
|
printf("\t2\tCall a float function with an int function signature\n");
|
||||||
|
printf("\t3\tCall into the middle of a function\n");
|
||||||
|
printf("\n");
|
||||||
|
printf("\tAll other options are undefined, but should be caught by CFI :)\n");
|
||||||
|
printf("\n\n");
|
||||||
|
printf("Here are some pointers so clang doesn't optimize away members of `struct foo f`:\n");
|
||||||
|
printf("\tint_funcs: %p\n", (void*)f.int_funcs);
|
||||||
|
printf("\tbad_int_funcs: %p\n", (void*)f.bad_int_funcs);
|
||||||
|
printf("\tfloat_funcs: %p\n", (void*)f.float_funcs);
|
||||||
|
printf("\tnot_entries: %p\n", (void*)f.not_entries);
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
simple1();
|
||||||
|
simple2();
|
||||||
|
|
||||||
|
printf("Calling a function:\n");
|
||||||
|
|
||||||
|
int idx = argv[1][0] - '0';
|
||||||
|
|
||||||
|
return f.int_funcs[idx](idx);
|
||||||
|
}
|
||||||
51
syft/file/executable.go
Normal file
51
syft/file/executable.go
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
package file
|
||||||
|
|
||||||
|
type (
|
||||||
|
ExecutableFormat string
|
||||||
|
RelocationReadOnly string
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
ELF ExecutableFormat = "elf"
|
||||||
|
MachO ExecutableFormat = "macho"
|
||||||
|
PE ExecutableFormat = "pe"
|
||||||
|
|
||||||
|
RelocationReadOnlyNone RelocationReadOnly = "none"
|
||||||
|
RelocationReadOnlyPartial RelocationReadOnly = "partial"
|
||||||
|
RelocationReadOnlyFull RelocationReadOnly = "full"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Executable struct {
|
||||||
|
// Format denotes either ELF, Mach-O, or PE
|
||||||
|
Format ExecutableFormat `json:"format" yaml:"format" mapstructure:"format"`
|
||||||
|
|
||||||
|
SecurityFeatures *ELFSecurityFeatures `json:"elfSecurityFeatures,omitempty" yaml:"elfSecurityFeatures" mapstructure:"elfSecurityFeatures"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ELFSecurityFeatures struct {
|
||||||
|
SymbolTableStripped bool `json:"symbolTableStripped" yaml:"symbolTableStripped" mapstructure:"symbolTableStripped"`
|
||||||
|
|
||||||
|
// classic protections
|
||||||
|
|
||||||
|
StackCanary *bool `json:"stackCanary,omitempty" yaml:"stackCanary" mapstructure:"stackCanary"`
|
||||||
|
NoExecutable bool `json:"nx" yaml:"nx" mapstructure:"nx"`
|
||||||
|
RelocationReadOnly RelocationReadOnly `json:"relRO" yaml:"relRO" mapstructure:"relRO"`
|
||||||
|
PositionIndependentExecutable bool `json:"pie" yaml:"pie" mapstructure:"pie"`
|
||||||
|
DynamicSharedObject bool `json:"dso" yaml:"dso" mapstructure:"dso"`
|
||||||
|
|
||||||
|
// LlvmSafeStack represents a compiler-based security mechanism that separates the stack into a safe stack for storing return addresses and other critical data, and an unsafe stack for everything else, to mitigate stack-based memory corruption errors
|
||||||
|
// see https://clang.llvm.org/docs/SafeStack.html
|
||||||
|
LlvmSafeStack *bool `json:"safeStack,omitempty" yaml:"safeStack" mapstructure:"safeStack"`
|
||||||
|
|
||||||
|
// ControlFlowIntegrity represents runtime checks to ensure a program's control flow adheres to the legal paths determined at compile time, thus protecting against various types of control-flow hijacking attacks
|
||||||
|
// see https://clang.llvm.org/docs/ControlFlowIntegrity.html
|
||||||
|
LlvmControlFlowIntegrity *bool `json:"cfi,omitempty" yaml:"cfi" mapstructure:"cfi"`
|
||||||
|
|
||||||
|
// ClangFortifySource is a broad suite of extensions to libc aimed at catching misuses of common library functions
|
||||||
|
// see https://android.googlesource.com/platform//bionic/+/d192dbecf0b2a371eb127c0871f77a9caf81c4d2/docs/clang_fortify_anatomy.md
|
||||||
|
ClangFortifySource *bool `json:"fortify,omitempty" yaml:"fortify" mapstructure:"fortify"`
|
||||||
|
|
||||||
|
//// Selfrando provides function order shuffling to defend against ROP and other types of code reuse
|
||||||
|
//// see https://github.com/runsafesecurity/selfrando
|
||||||
|
// Selfrando *bool `json:"selfrando,omitempty" yaml:"selfrando" mapstructure:"selfrando"`
|
||||||
|
}
|
||||||
@ -221,6 +221,22 @@ func Test_encodeDecodeFileMetadata(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Executables: map[file.Coordinates]file.Executable{
|
||||||
|
c: {
|
||||||
|
Format: file.ELF,
|
||||||
|
SecurityFeatures: &file.ELFSecurityFeatures{
|
||||||
|
SymbolTableStripped: false,
|
||||||
|
StackCanary: boolRef(true),
|
||||||
|
NoExecutable: false,
|
||||||
|
RelocationReadOnly: "partial",
|
||||||
|
PositionIndependentExecutable: false,
|
||||||
|
DynamicSharedObject: false,
|
||||||
|
LlvmSafeStack: boolRef(false),
|
||||||
|
LlvmControlFlowIntegrity: boolRef(true),
|
||||||
|
ClangFortifySource: boolRef(true),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
LinuxDistribution: &linux.Release{
|
LinuxDistribution: &linux.Release{
|
||||||
PrettyName: "some os",
|
PrettyName: "some os",
|
||||||
Name: "os",
|
Name: "os",
|
||||||
|
|||||||
@ -12,6 +12,7 @@ type File struct {
|
|||||||
Contents string `json:"contents,omitempty"`
|
Contents string `json:"contents,omitempty"`
|
||||||
Digests []file.Digest `json:"digests,omitempty"`
|
Digests []file.Digest `json:"digests,omitempty"`
|
||||||
Licenses []FileLicense `json:"licenses,omitempty"`
|
Licenses []FileLicense `json:"licenses,omitempty"`
|
||||||
|
Executable *file.Executable `json:"executable,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type FileMetadataEntry struct {
|
type FileMetadataEntry struct {
|
||||||
|
|||||||
@ -119,6 +119,11 @@ func toFile(s sbom.SBOM) []model.File {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var executable *file.Executable
|
||||||
|
if exec, exists := artifacts.Executables[coordinates]; exists {
|
||||||
|
executable = &exec
|
||||||
|
}
|
||||||
|
|
||||||
results = append(results, model.File{
|
results = append(results, model.File{
|
||||||
ID: string(coordinates.ID()),
|
ID: string(coordinates.ID()),
|
||||||
Location: coordinates,
|
Location: coordinates,
|
||||||
@ -126,6 +131,7 @@ func toFile(s sbom.SBOM) []model.File {
|
|||||||
Digests: digests,
|
Digests: digests,
|
||||||
Contents: contents,
|
Contents: contents,
|
||||||
Licenses: licenses,
|
Licenses: licenses,
|
||||||
|
Executable: executable,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -35,6 +35,7 @@ func toSyftModel(doc model.Document) *sbom.SBOM {
|
|||||||
FileDigests: fileArtifacts.FileDigests,
|
FileDigests: fileArtifacts.FileDigests,
|
||||||
FileContents: fileArtifacts.FileContents,
|
FileContents: fileArtifacts.FileContents,
|
||||||
FileLicenses: fileArtifacts.FileLicenses,
|
FileLicenses: fileArtifacts.FileLicenses,
|
||||||
|
Executables: fileArtifacts.Executables,
|
||||||
LinuxDistribution: toSyftLinuxRelease(doc.Distro),
|
LinuxDistribution: toSyftLinuxRelease(doc.Distro),
|
||||||
},
|
},
|
||||||
Source: *toSyftSourceData(doc.Source),
|
Source: *toSyftSourceData(doc.Source),
|
||||||
@ -69,6 +70,7 @@ func toSyftFiles(files []model.File) sbom.Artifacts {
|
|||||||
FileDigests: make(map[file.Coordinates][]file.Digest),
|
FileDigests: make(map[file.Coordinates][]file.Digest),
|
||||||
FileContents: make(map[file.Coordinates]string),
|
FileContents: make(map[file.Coordinates]string),
|
||||||
FileLicenses: make(map[file.Coordinates][]file.License),
|
FileLicenses: make(map[file.Coordinates][]file.License),
|
||||||
|
Executables: make(map[file.Coordinates]file.Executable),
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
@ -124,6 +126,10 @@ func toSyftFiles(files []model.File) sbom.Artifacts {
|
|||||||
LicenseEvidence: evidence,
|
LicenseEvidence: evidence,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if f.Executable != nil {
|
||||||
|
ret.Executables[coord] = *f.Executable
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|||||||
@ -230,6 +230,7 @@ func Test_toSyftFiles(t *testing.T) {
|
|||||||
want: sbom.Artifacts{
|
want: sbom.Artifacts{
|
||||||
FileMetadata: map[file.Coordinates]file.Metadata{},
|
FileMetadata: map[file.Coordinates]file.Metadata{},
|
||||||
FileDigests: map[file.Coordinates][]file.Digest{},
|
FileDigests: map[file.Coordinates][]file.Digest{},
|
||||||
|
Executables: map[file.Coordinates]file.Executable{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -245,6 +246,7 @@ func Test_toSyftFiles(t *testing.T) {
|
|||||||
Value: "123",
|
Value: "123",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Executable: nil,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
want: sbom.Artifacts{
|
want: sbom.Artifacts{
|
||||||
@ -257,6 +259,7 @@ func Test_toSyftFiles(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Executables: map[file.Coordinates]file.Executable{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -280,6 +283,20 @@ func Test_toSyftFiles(t *testing.T) {
|
|||||||
Value: "123",
|
Value: "123",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Executable: &file.Executable{
|
||||||
|
Format: file.ELF,
|
||||||
|
SecurityFeatures: &file.ELFSecurityFeatures{
|
||||||
|
SymbolTableStripped: false,
|
||||||
|
StackCanary: boolRef(true),
|
||||||
|
NoExecutable: false,
|
||||||
|
RelocationReadOnly: "partial",
|
||||||
|
PositionIndependentExecutable: false,
|
||||||
|
DynamicSharedObject: false,
|
||||||
|
LlvmSafeStack: boolRef(false),
|
||||||
|
LlvmControlFlowIntegrity: boolRef(true),
|
||||||
|
ClangFortifySource: boolRef(true),
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
want: sbom.Artifacts{
|
want: sbom.Artifacts{
|
||||||
@ -306,6 +323,22 @@ func Test_toSyftFiles(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Executables: map[file.Coordinates]file.Executable{
|
||||||
|
coord: {
|
||||||
|
Format: file.ELF,
|
||||||
|
SecurityFeatures: &file.ELFSecurityFeatures{
|
||||||
|
SymbolTableStripped: false,
|
||||||
|
StackCanary: boolRef(true),
|
||||||
|
NoExecutable: false,
|
||||||
|
RelocationReadOnly: "partial",
|
||||||
|
PositionIndependentExecutable: false,
|
||||||
|
DynamicSharedObject: false,
|
||||||
|
LlvmSafeStack: boolRef(false),
|
||||||
|
LlvmControlFlowIntegrity: boolRef(true),
|
||||||
|
ClangFortifySource: boolRef(true),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -318,7 +351,11 @@ func Test_toSyftFiles(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func Test_toSyfRelationship(t *testing.T) {
|
func boolRef(b bool) *bool {
|
||||||
|
return &b
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_toSyftRelationship(t *testing.T) {
|
||||||
packageWithId := func(id string) *pkg.Package {
|
packageWithId := func(id string) *pkg.Package {
|
||||||
p := &pkg.Package{}
|
p := &pkg.Package{}
|
||||||
p.OverrideID(artifact.ID(id))
|
p.OverrideID(artifact.ID(id))
|
||||||
|
|||||||
@ -17,8 +17,8 @@ import (
|
|||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/cpe"
|
"github.com/anchore/syft/syft/cpe"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/internal/unionreader"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Classifier is a generic package classifier that can be used to match a package definition
|
// Classifier is a generic package classifier that can be used to match a package definition
|
||||||
|
|||||||
@ -19,10 +19,10 @@ import (
|
|||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/internal/unionreader"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/golang/internal/xcoff"
|
"github.com/anchore/syft/syft/pkg/cataloger/golang/internal/xcoff"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const goArch = "GOARCH"
|
const goArch = "GOARCH"
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import (
|
|||||||
"github.com/kastenhq/goversion/version"
|
"github.com/kastenhq/goversion/version"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader"
|
"github.com/anchore/syft/syft/internal/unionreader"
|
||||||
)
|
)
|
||||||
|
|
||||||
type extendedBuildInfo struct {
|
type extendedBuildInfo struct {
|
||||||
|
|||||||
@ -20,8 +20,8 @@ import (
|
|||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/cpe"
|
"github.com/anchore/syft/syft/cpe"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/internal/unionreader"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type nativeImageCycloneDX struct {
|
type nativeImageCycloneDX struct {
|
||||||
|
|||||||
@ -13,8 +13,8 @@ import (
|
|||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/cpe"
|
"github.com/anchore/syft/syft/cpe"
|
||||||
|
"github.com/anchore/syft/syft/internal/unionreader"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParseNativeImage(t *testing.T) {
|
func TestParseNativeImage(t *testing.T) {
|
||||||
|
|||||||
@ -11,9 +11,9 @@ import (
|
|||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/internal/unionreader"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const linuxKernelMagicName = "Linux kernel"
|
const linuxKernelMagicName = "Linux kernel"
|
||||||
|
|||||||
@ -8,9 +8,9 @@ import (
|
|||||||
|
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/internal/unionreader"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const modinfoName = ".modinfo"
|
const modinfoName = ".modinfo"
|
||||||
|
|||||||
@ -9,9 +9,9 @@ import (
|
|||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/internal/unionreader"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Catalog identifies executables then attempts to read Rust dependency information from them
|
// Catalog identifies executables then attempts to read Rust dependency information from them
|
||||||
|
|||||||
@ -25,6 +25,7 @@ type Artifacts struct {
|
|||||||
FileDigests map[file.Coordinates][]file.Digest
|
FileDigests map[file.Coordinates][]file.Digest
|
||||||
FileContents map[file.Coordinates]string
|
FileContents map[file.Coordinates]string
|
||||||
FileLicenses map[file.Coordinates][]file.License
|
FileLicenses map[file.Coordinates][]file.License
|
||||||
|
Executables map[file.Coordinates]file.Executable
|
||||||
LinuxDistribution *linux.Release
|
LinuxDistribution *linux.Release
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user