Add secrets search capability (#367)

* add initial secrets cataloger

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* update ETUI elements with new catalogers (file metadata, digests, and secrets)

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* update secrets cataloger to read full contents into memory for searching

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* quick prototype of parallelization secret regex search

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* quick prototype with single aggregated regex

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* quick prototype for secret search line-by-line

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* quick prototype hybrid secrets search

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* add secrets cataloger with line strategy

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* adjust verbiage towards SearchResults instead of Secrets + add tests

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* update json schema with secrets cataloger results

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* address PR comments

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* update readme with secrets config options

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* ensure file catalogers call AllLocations once

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
Alex Goodman 2021-04-01 17:34:15 -04:00 committed by GitHub
parent 557ad8be49
commit 9ec09add67
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
53 changed files with 2549 additions and 72 deletions

View File

@ -123,6 +123,37 @@ file-metadata:
# SYFT_FILE_METADATA_DIGESTS env var # SYFT_FILE_METADATA_DIGESTS env var
digests: ["sha256"] digests: ["sha256"]
# cataloging secrets is exposed through the power-user subcommand
secrets:
cataloger:
# enable/disable cataloging of secrets
# SYFT_SECRETS_CATALOGER_ENABLED env var
enabled: true
# the search space to look for secrets (options: all-layers, squashed)
# SYFT_SECRETS_CATALOGER_SCOPE env var
scope: "all-layers"
# show extracted secret values in the final JSON report
# SYFT_SECRETS_REVEAL_VALUES env var
reveal-values: false
# skip searching a file entirely if it is above the given size (default = 10MB; unit = bytes)
# SYFT_SECRETS_SKIP_FILES_ABOVE_SIZE env var
skip-files-above-size: 10485760
# name-regex pairs to consider when searching files for secrets. Note: the regex must match single line patterns
# but may also have OPTIONAL multiline capture groups. Regexes with a named capture group of "value" will
# use the entire regex to match, but the secret value will be assumed to be entirely contained within the
# "value" named capture group.
additional-patterns: {}
# names to exclude from the secrets search, valid values are: "aws-access-key", "aws-secret-key", "pem-private-key",
# "docker-config-auth", and "generic-api-key". Note: this does not consider any names introduced in the
# "secrets.additional-patterns" config option.
# SYFT_SECRETS_EXCLUDE_PATTERN_NAMES env var
exclude-pattern-names: []
log: log:
# use structured logging # use structured logging
# same as SYFT_LOG_STRUCTURED env var # same as SYFT_LOG_STRUCTURED env var

View File

@ -2,6 +2,7 @@ package cmd
import ( import (
"fmt" "fmt"
"sync"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
@ -97,13 +98,20 @@ func powerUserExecWorker(userInput string) <-chan error {
ApplicationConfig: *appConfig, ApplicationConfig: *appConfig,
} }
wg := &sync.WaitGroup{}
for _, task := range tasks { for _, task := range tasks {
wg.Add(1)
go func(task powerUserTask) {
defer wg.Done()
if err = task(&analysisResults, src); err != nil { if err = task(&analysisResults, src); err != nil {
errs <- err errs <- err
return return
} }
}(task)
} }
wg.Wait()
bus.Publish(partybus.Event{ bus.Publish(partybus.Event{
Type: event.PresenterReady, Type: event.PresenterReady,
Value: poweruser.NewJSONPresenter(analysisResults), Value: poweruser.NewJSONPresenter(analysisResults),

View File

@ -18,7 +18,8 @@ func powerUserTasks() ([]powerUserTask, error) {
generators := []func() (powerUserTask, error){ generators := []func() (powerUserTask, error){
catalogPackagesTask, catalogPackagesTask,
catalogFileMetadataTask, catalogFileMetadataTask,
catalogFileDigestTask, catalogFileDigestsTask,
catalogSecretsTask,
} }
for _, generator := range generators { for _, generator := range generators {
@ -78,7 +79,7 @@ func catalogFileMetadataTask() (powerUserTask, error) {
return task, nil return task, nil
} }
func catalogFileDigestTask() (powerUserTask, error) { func catalogFileDigestsTask() (powerUserTask, error) {
if !appConfig.FileMetadata.Cataloger.Enabled { if !appConfig.FileMetadata.Cataloger.Enabled {
return nil, nil return nil, nil
} }
@ -123,3 +124,35 @@ func catalogFileDigestTask() (powerUserTask, error) {
return task, nil return task, nil
} }
func catalogSecretsTask() (powerUserTask, error) {
if !appConfig.Secrets.Cataloger.Enabled {
return nil, nil
}
patterns, err := file.GenerateSearchPatterns(file.DefaultSecretsPatterns, appConfig.Secrets.AdditionalPatterns, appConfig.Secrets.ExcludePatternNames)
if err != nil {
return nil, err
}
secretsCataloger, err := file.NewSecretsCataloger(patterns, appConfig.Secrets.RevealValues, appConfig.Secrets.SkipFilesAboveSize)
if err != nil {
return nil, err
}
task := func(results *poweruser.JSONDocumentConfig, src source.Source) error {
resolver, err := src.FileResolver(appConfig.Secrets.Cataloger.ScopeOpt)
if err != nil {
return err
}
result, err := secretsCataloger.Catalog(resolver)
if err != nil {
return err
}
results.Secrets = result
return nil
}
return task, nil
}

View File

@ -1,5 +1,7 @@
package config package config
import "github.com/spf13/viper"
type anchore struct { type anchore struct {
// upload options // upload options
Host string `yaml:"host" json:"host" mapstructure:"host"` // -H , hostname of the engine/enterprise instance to upload to (setting this value enables upload) Host string `yaml:"host" json:"host" mapstructure:"host"` // -H , hostname of the engine/enterprise instance to upload to (setting this value enables upload)
@ -11,3 +13,7 @@ type anchore struct {
Dockerfile string `yaml:"dockerfile" json:"dockerfile" mapstructure:"dockerfile"` // -d , dockerfile to attach for upload Dockerfile string `yaml:"dockerfile" json:"dockerfile" mapstructure:"dockerfile"` // -d , dockerfile to attach for upload
OverwriteExistingImage bool `yaml:"overwrite-existing-image" json:"overwrite-existing-image" mapstructure:"overwrite-existing-image"` // --overwrite-existing-image , if any of the SBOM components have already been uploaded this flag will ensure they are overwritten with the current upload OverwriteExistingImage bool `yaml:"overwrite-existing-image" json:"overwrite-existing-image" mapstructure:"overwrite-existing-image"` // --overwrite-existing-image , if any of the SBOM components have already been uploaded this flag will ensure they are overwritten with the current upload
} }
func (cfg anchore) loadDefaultValues(v *viper.Viper) {
v.SetDefault("anchore.path", "")
}

View File

@ -4,10 +4,9 @@ import (
"errors" "errors"
"fmt" "fmt"
"path" "path"
"reflect"
"strings" "strings"
"github.com/anchore/syft/syft/source"
"github.com/adrg/xdg" "github.com/adrg/xdg"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/mitchellh/go-homedir" "github.com/mitchellh/go-homedir"
@ -18,6 +17,14 @@ import (
var ErrApplicationConfigNotFound = fmt.Errorf("application config not found") var ErrApplicationConfigNotFound = fmt.Errorf("application config not found")
type defaultValueLoader interface {
loadDefaultValues(*viper.Viper)
}
type parser interface {
parseConfigValues() error
}
// Application is the main syft application configuration. // Application is the main syft application configuration.
type Application struct { type Application struct {
ConfigPath string `yaml:",omitempty" json:"configPath"` // the location where the application config was read from (either from -c or discovered while loading) ConfigPath string `yaml:",omitempty" json:"configPath"` // the location where the application config was read from (either from -c or discovered while loading)
@ -30,34 +37,56 @@ type Application struct {
Anchore anchore `yaml:"anchore" json:"anchore" mapstructure:"anchore"` // options for interacting with Anchore Engine/Enterprise Anchore anchore `yaml:"anchore" json:"anchore" mapstructure:"anchore"` // options for interacting with Anchore Engine/Enterprise
Package Packages `yaml:"package" json:"package" mapstructure:"package"` Package Packages `yaml:"package" json:"package" mapstructure:"package"`
FileMetadata FileMetadata `yaml:"file-metadata" json:"file-metadata" mapstructure:"file-metadata"` FileMetadata FileMetadata `yaml:"file-metadata" json:"file-metadata" mapstructure:"file-metadata"`
Secrets Secrets `yaml:"secrets" json:"secrets" mapstructure:"secrets"`
}
func newApplicationConfig(v *viper.Viper, cliOpts CliOnlyOptions) *Application {
config := &Application{
CliOptions: cliOpts,
}
config.loadDefaultValues(v)
return config
} }
// LoadApplicationConfig populates the given viper object with application configuration discovered on disk // LoadApplicationConfig populates the given viper object with application configuration discovered on disk
func LoadApplicationConfig(v *viper.Viper, cliOpts CliOnlyOptions) (*Application, error) { func LoadApplicationConfig(v *viper.Viper, cliOpts CliOnlyOptions) (*Application, error) {
// the user may not have a config, and this is OK, we can use the default config + default cobra cli values instead // the user may not have a config, and this is OK, we can use the default config + default cobra cli values instead
setNonCliDefaultAppConfigValues(v) config := newApplicationConfig(v, cliOpts)
if err := readConfig(v, cliOpts.ConfigPath); err != nil && !errors.Is(err, ErrApplicationConfigNotFound) { if err := readConfig(v, cliOpts.ConfigPath); err != nil && !errors.Is(err, ErrApplicationConfigNotFound) {
return nil, err return nil, err
} }
config := &Application{
CliOptions: cliOpts,
}
if err := v.Unmarshal(config); err != nil { if err := v.Unmarshal(config); err != nil {
return nil, fmt.Errorf("unable to parse config: %w", err) return nil, fmt.Errorf("unable to parse config: %w", err)
} }
config.ConfigPath = v.ConfigFileUsed() config.ConfigPath = v.ConfigFileUsed()
if err := config.build(); err != nil { if err := config.parseConfigValues(); err != nil {
return nil, fmt.Errorf("invalid application config: %w", err) return nil, fmt.Errorf("invalid application config: %w", err)
} }
return config, nil return config, nil
} }
// init loads the default configuration values into the viper instance (before the config values are read and parsed).
func (cfg Application) loadDefaultValues(v *viper.Viper) {
// set the default values for primitive fields in this struct
v.SetDefault("check-for-app-update", true)
// for each field in the configuration struct, see if the field implements the defaultValueLoader interface and invoke it if it does
value := reflect.ValueOf(cfg)
for i := 0; i < value.NumField(); i++ {
// note: the defaultValueLoader method receiver is NOT a pointer receiver.
if loadable, ok := value.Field(i).Interface().(defaultValueLoader); ok {
// the field implements defaultValueLoader, call it
loadable.loadDefaultValues(v)
}
}
}
// build inflates simple config values into syft native objects (or other complex objects) after the config is fully read in. // build inflates simple config values into syft native objects (or other complex objects) after the config is fully read in.
func (cfg *Application) build() error { func (cfg *Application) parseConfigValues() error {
if cfg.Quiet { if cfg.Quiet {
// TODO: this is bad: quiet option trumps all other logging options // TODO: this is bad: quiet option trumps all other logging options
// we should be able to quiet the console logging and leave file logging alone... // we should be able to quiet the console logging and leave file logging alone...
@ -92,14 +121,18 @@ func (cfg *Application) build() error {
return fmt.Errorf("cannot provide dockerfile option without enabling upload") return fmt.Errorf("cannot provide dockerfile option without enabling upload")
} }
for _, builder := range []func() error{ // for each field in the configuration struct, see if the field implements the parser interface
cfg.Package.build, // note: the app config is a pointer, so we need to grab the elements explicitly (to traverse the address)
cfg.FileMetadata.build, value := reflect.ValueOf(cfg).Elem()
} { for i := 0; i < value.NumField(); i++ {
if err := builder(); err != nil { // note: since the interface method of parser is a pointer receiver we need to get the value of the field as a pointer.
if parsable, ok := value.Field(i).Addr().Interface().(parser); ok {
// the field implements parser, call it
if err := parsable.parseConfigValues(); err != nil {
return err return err
} }
} }
}
return nil return nil
} }
@ -181,16 +214,3 @@ func readConfig(v *viper.Viper, configPath string) error {
return ErrApplicationConfigNotFound return ErrApplicationConfigNotFound
} }
// setNonCliDefaultAppConfigValues ensures that there are sane defaults for values that do not have CLI equivalent options (where there would already be a default value)
func setNonCliDefaultAppConfigValues(v *viper.Viper) {
v.SetDefault("anchore.path", "")
v.SetDefault("log.structured", false)
v.SetDefault("check-for-app-update", true)
v.SetDefault("dev.profile-cpu", false)
v.SetDefault("dev.profile-mem", false)
v.SetDefault("package.cataloger.enabled", true)
v.SetDefault("file-metadata.cataloger.enabled", true)
v.SetDefault("file-metadata.cataloger.scope", source.SquashedScope)
v.SetDefault("file-metadata.digests", []string{"sha256"})
}

View File

@ -12,7 +12,7 @@ type catalogerOptions struct {
ScopeOpt source.Scope `yaml:"-" json:"-"` ScopeOpt source.Scope `yaml:"-" json:"-"`
} }
func (cfg *catalogerOptions) build() error { func (cfg *catalogerOptions) parseConfigValues() error {
scopeOption := source.ParseScope(cfg.Scope) scopeOption := source.ParseScope(cfg.Scope)
if scopeOption == source.UnknownScope { if scopeOption == source.UnknownScope {
return fmt.Errorf("bad scope value %q", cfg.Scope) return fmt.Errorf("bad scope value %q", cfg.Scope)

View File

@ -1,6 +1,13 @@
package config package config
import "github.com/spf13/viper"
type Development struct { type Development struct {
ProfileCPU bool `yaml:"profile-cpu" json:"profile-cpu" mapstructure:"profile-cpu"` ProfileCPU bool `yaml:"profile-cpu" json:"profile-cpu" mapstructure:"profile-cpu"`
ProfileMem bool `yaml:"profile-mem" json:"profile-mem" mapstructure:"profile-mem"` ProfileMem bool `yaml:"profile-mem" json:"profile-mem" mapstructure:"profile-mem"`
} }
func (cfg Development) loadDefaultValues(v *viper.Viper) {
v.SetDefault("dev.profile-cpu", false)
v.SetDefault("dev.profile-mem", false)
}

View File

@ -1,10 +1,21 @@
package config package config
import (
"github.com/anchore/syft/syft/source"
"github.com/spf13/viper"
)
type FileMetadata struct { type FileMetadata struct {
Cataloger catalogerOptions `yaml:"cataloger" json:"cataloger" mapstructure:"cataloger"` Cataloger catalogerOptions `yaml:"cataloger" json:"cataloger" mapstructure:"cataloger"`
Digests []string `yaml:"digests" json:"digests" mapstructure:"digests"` Digests []string `yaml:"digests" json:"digests" mapstructure:"digests"`
} }
func (cfg *FileMetadata) build() error { func (cfg FileMetadata) loadDefaultValues(v *viper.Viper) {
return cfg.Cataloger.build() v.SetDefault("file-metadata.cataloger.enabled", true)
v.SetDefault("file-metadata.cataloger.scope", source.SquashedScope)
v.SetDefault("file-metadata.digests", []string{"sha256"})
}
func (cfg *FileMetadata) parseConfigValues() error {
return cfg.Cataloger.parseConfigValues()
} }

View File

@ -1,6 +1,9 @@
package config package config
import "github.com/sirupsen/logrus" import (
"github.com/sirupsen/logrus"
"github.com/spf13/viper"
)
// logging contains all logging-related configuration options available to the user via the application config. // logging contains all logging-related configuration options available to the user via the application config.
type logging struct { type logging struct {
@ -9,3 +12,7 @@ type logging struct {
Level string `yaml:"level" json:"level" mapstructure:"level"` // the log level string hint Level string `yaml:"level" json:"level" mapstructure:"level"` // the log level string hint
FileLocation string `yaml:"file" json:"file-location" mapstructure:"file"` // the file path to write logs to FileLocation string `yaml:"file" json:"file-location" mapstructure:"file"` // the file path to write logs to
} }
func (cfg logging) loadDefaultValues(v *viper.Viper) {
v.SetDefault("log.structured", false)
}

View File

@ -1,9 +1,15 @@
package config package config
import "github.com/spf13/viper"
type Packages struct { type Packages struct {
Cataloger catalogerOptions `yaml:"cataloger" json:"cataloger" mapstructure:"cataloger"` Cataloger catalogerOptions `yaml:"cataloger" json:"cataloger" mapstructure:"cataloger"`
} }
func (cfg *Packages) build() error { func (cfg Packages) loadDefaultValues(v *viper.Viper) {
return cfg.Cataloger.build() v.SetDefault("package.cataloger.enabled", true)
}
func (cfg *Packages) parseConfigValues() error {
return cfg.Cataloger.parseConfigValues()
} }

View File

@ -0,0 +1,28 @@
package config
import (
"github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/source"
"github.com/spf13/viper"
)
type Secrets struct {
Cataloger catalogerOptions `yaml:"cataloger" json:"cataloger" mapstructure:"cataloger"`
AdditionalPatterns map[string]string `yaml:"additional-patterns" json:"additional-patterns" mapstructure:"additional-patterns"`
ExcludePatternNames []string `yaml:"exclude-pattern-names" json:"exclude-pattern-names" mapstructure:"exclude-pattern-names"`
RevealValues bool `yaml:"reveal-values" json:"reveal-values" mapstructure:"reveal-values"`
SkipFilesAboveSize int64 `yaml:"skip-files-above-size" json:"skip-files-above-size" mapstructure:"skip-files-above-size"`
}
func (cfg Secrets) loadDefaultValues(v *viper.Viper) {
v.SetDefault("secrets.cataloger.enabled", true)
v.SetDefault("secrets.cataloger.scope", source.AllLayersScope)
v.SetDefault("secrets.reveal-values", false)
v.SetDefault("secrets.skip-files-above-size", 1*file.MB)
v.SetDefault("secrets.additional-patterns", map[string]string{})
v.SetDefault("secrets.exclude-pattern-names", []string{})
}
func (cfg *Secrets) parseConfigValues() error {
return cfg.Cataloger.parseConfigValues()
}

View File

@ -6,5 +6,5 @@ const (
// JSONSchemaVersion is the current schema version output by the JSON presenter // JSONSchemaVersion is the current schema version output by the JSON presenter
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment. // This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
JSONSchemaVersion = "1.0.4" JSONSchemaVersion = "1.0.5"
) )

View File

@ -75,7 +75,7 @@
"version": "[not provided]" "version": "[not provided]"
}, },
"schema": { "schema": {
"version": "1.0.4", "version": "1.0.5",
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-1.0.4.json" "url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-1.0.5.json"
} }
} }

View File

@ -9,7 +9,7 @@
"locations": [ "locations": [
{ {
"path": "/somefile-1.txt", "path": "/somefile-1.txt",
"layerID": "sha256:3de16c5b8659a2e8d888b8ded8427be7a5686a3c8c4e4dd30de20f362827285b" "layerID": "sha256:6c376352c0537f4483e4033e332d7a4ab9433db68c54c297a834d36719aeb6c9"
} }
], ],
"licenses": [ "licenses": [
@ -40,7 +40,7 @@
"locations": [ "locations": [
{ {
"path": "/somefile-2.txt", "path": "/somefile-2.txt",
"layerID": "sha256:366a3f5653e34673b875891b021647440d0127c2ef041e3b1a22da2a7d4f3703" "layerID": "sha256:fc8218a8142ee4952bb8d9b96b3e9838322e9e6eae6477136bcad8fd768949b7"
} }
], ],
"licenses": [], "licenses": [],
@ -67,7 +67,7 @@
"type": "image", "type": "image",
"target": { "target": {
"userInput": "user-image-input", "userInput": "user-image-input",
"imageID": "sha256:c2b46b4eb06296933b7cf0722683964e9ecbd93265b9ef6ae9642e3952afbba0", "imageID": "sha256:1f9cb9dc477f7482856f88ed40c38e260db0526d7a0dad5a0be566bfedde929b",
"manifestDigest": "sha256:2731251dc34951c0e50fcc643b4c5f74922dad1a5d98f302b504cf46cd5d9368", "manifestDigest": "sha256:2731251dc34951c0e50fcc643b4c5f74922dad1a5d98f302b504cf46cd5d9368",
"mediaType": "application/vnd.docker.distribution.manifest.v2+json", "mediaType": "application/vnd.docker.distribution.manifest.v2+json",
"tags": [ "tags": [
@ -77,17 +77,17 @@
"layers": [ "layers": [
{ {
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip", "mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
"digest": "sha256:3de16c5b8659a2e8d888b8ded8427be7a5686a3c8c4e4dd30de20f362827285b", "digest": "sha256:6c376352c0537f4483e4033e332d7a4ab9433db68c54c297a834d36719aeb6c9",
"size": 22 "size": 22
}, },
{ {
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip", "mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
"digest": "sha256:366a3f5653e34673b875891b021647440d0127c2ef041e3b1a22da2a7d4f3703", "digest": "sha256:fc8218a8142ee4952bb8d9b96b3e9838322e9e6eae6477136bcad8fd768949b7",
"size": 16 "size": 16
} }
], ],
"manifest": "eyJzY2hlbWFWZXJzaW9uIjoyLCJtZWRpYVR5cGUiOiJhcHBsaWNhdGlvbi92bmQuZG9ja2VyLmRpc3RyaWJ1dGlvbi5tYW5pZmVzdC52Mitqc29uIiwiY29uZmlnIjp7Im1lZGlhVHlwZSI6ImFwcGxpY2F0aW9uL3ZuZC5kb2NrZXIuY29udGFpbmVyLmltYWdlLnYxK2pzb24iLCJzaXplIjoxNTg2LCJkaWdlc3QiOiJzaGEyNTY6YzJiNDZiNGViMDYyOTY5MzNiN2NmMDcyMjY4Mzk2NGU5ZWNiZDkzMjY1YjllZjZhZTk2NDJlMzk1MmFmYmJhMCJ9LCJsYXllcnMiOlt7Im1lZGlhVHlwZSI6ImFwcGxpY2F0aW9uL3ZuZC5kb2NrZXIuaW1hZ2Uucm9vdGZzLmRpZmYudGFyLmd6aXAiLCJzaXplIjoyMDQ4LCJkaWdlc3QiOiJzaGEyNTY6M2RlMTZjNWI4NjU5YTJlOGQ4ODhiOGRlZDg0MjdiZTdhNTY4NmEzYzhjNGU0ZGQzMGRlMjBmMzYyODI3Mjg1YiJ9LHsibWVkaWFUeXBlIjoiYXBwbGljYXRpb24vdm5kLmRvY2tlci5pbWFnZS5yb290ZnMuZGlmZi50YXIuZ3ppcCIsInNpemUiOjIwNDgsImRpZ2VzdCI6InNoYTI1NjozNjZhM2Y1NjUzZTM0NjczYjg3NTg5MWIwMjE2NDc0NDBkMDEyN2MyZWYwNDFlM2IxYTIyZGEyYTdkNGYzNzAzIn1dfQ==", "manifest": "eyJzY2hlbWFWZXJzaW9uIjoyLCJtZWRpYVR5cGUiOiJhcHBsaWNhdGlvbi92bmQuZG9ja2VyLmRpc3RyaWJ1dGlvbi5tYW5pZmVzdC52Mitqc29uIiwiY29uZmlnIjp7Im1lZGlhVHlwZSI6ImFwcGxpY2F0aW9uL3ZuZC5kb2NrZXIuY29udGFpbmVyLmltYWdlLnYxK2pzb24iLCJzaXplIjoxNTg2LCJkaWdlc3QiOiJzaGEyNTY6MWY5Y2I5ZGM0NzdmNzQ4Mjg1NmY4OGVkNDBjMzhlMjYwZGIwNTI2ZDdhMGRhZDVhMGJlNTY2YmZlZGRlOTI5YiJ9LCJsYXllcnMiOlt7Im1lZGlhVHlwZSI6ImFwcGxpY2F0aW9uL3ZuZC5kb2NrZXIuaW1hZ2Uucm9vdGZzLmRpZmYudGFyLmd6aXAiLCJzaXplIjoyMDQ4LCJkaWdlc3QiOiJzaGEyNTY6NmMzNzYzNTJjMDUzN2Y0NDgzZTQwMzNlMzMyZDdhNGFiOTQzM2RiNjhjNTRjMjk3YTgzNGQzNjcxOWFlYjZjOSJ9LHsibWVkaWFUeXBlIjoiYXBwbGljYXRpb24vdm5kLmRvY2tlci5pbWFnZS5yb290ZnMuZGlmZi50YXIuZ3ppcCIsInNpemUiOjIwNDgsImRpZ2VzdCI6InNoYTI1NjpmYzgyMThhODE0MmVlNDk1MmJiOGQ5Yjk2YjNlOTgzODMyMmU5ZTZlYWU2NDc3MTM2YmNhZDhmZDc2ODk0OWI3In1dfQ==",
"config": "eyJhcmNoaXRlY3R1cmUiOiJhbWQ2NCIsImNvbmZpZyI6eyJIb3N0bmFtZSI6IiIsIkRvbWFpbm5hbWUiOiIiLCJVc2VyIjoiIiwiQXR0YWNoU3RkaW4iOmZhbHNlLCJBdHRhY2hTdGRvdXQiOmZhbHNlLCJBdHRhY2hTdGRlcnIiOmZhbHNlLCJUdHkiOmZhbHNlLCJPcGVuU3RkaW4iOmZhbHNlLCJTdGRpbk9uY2UiOmZhbHNlLCJFbnYiOlsiUEFUSD0vdXNyL2xvY2FsL3NiaW46L3Vzci9sb2NhbC9iaW46L3Vzci9zYmluOi91c3IvYmluOi9zYmluOi9iaW4iXSwiQ21kIjpudWxsLCJJbWFnZSI6InNoYTI1NjpkYWMyMTUwMzhjMDUwZTM1NzMwNTVlZmU4YTkwM2NkMWY5YmJkZmU0ZjlhZTlkODk5OTFjNTljY2M2OTA1MmU1IiwiVm9sdW1lcyI6bnVsbCwiV29ya2luZ0RpciI6IiIsIkVudHJ5cG9pbnQiOm51bGwsIk9uQnVpbGQiOm51bGwsIkxhYmVscyI6bnVsbH0sImNvbnRhaW5lcl9jb25maWciOnsiSG9zdG5hbWUiOiIiLCJEb21haW5uYW1lIjoiIiwiVXNlciI6IiIsIkF0dGFjaFN0ZGluIjpmYWxzZSwiQXR0YWNoU3Rkb3V0IjpmYWxzZSwiQXR0YWNoU3RkZXJyIjpmYWxzZSwiVHR5IjpmYWxzZSwiT3BlblN0ZGluIjpmYWxzZSwiU3RkaW5PbmNlIjpmYWxzZSwiRW52IjpbIlBBVEg9L3Vzci9sb2NhbC9zYmluOi91c3IvbG9jYWwvYmluOi91c3Ivc2JpbjovdXNyL2Jpbjovc2JpbjovYmluIl0sIkNtZCI6WyIvYmluL3NoIiwiLWMiLCIjKG5vcCkgQUREIGZpbGU6ZGYzYjc0NGY1NGE5YjE2YjliOWFlZDQwZTNlOThkOWNhMmI0OWY1YTc3ZDlmYThhOTc2OTBkN2JhZjU4ODgyMCBpbiAvc29tZWZpbGUtMi50eHQgIl0sIkltYWdlIjoic2hhMjU2OmRhYzIxNTAzOGMwNTBlMzU3MzA1NWVmZThhOTAzY2QxZjliYmRmZTRmOWFlOWQ4OTk5MWM1OWNjYzY5MDUyZTUiLCJWb2x1bWVzIjpudWxsLCJXb3JraW5nRGlyIjoiIiwiRW50cnlwb2ludCI6bnVsbCwiT25CdWlsZCI6bnVsbCwiTGFiZWxzIjpudWxsfSwiY3JlYXRlZCI6IjIwMjEtMDMtMjNUMTg6MTU6NTguODcyMjg5OFoiLCJkb2NrZXJfdmVyc2lvbiI6IjIwLjEwLjIiLCJoaXN0b3J5IjpbeyJjcmVhdGVkIjoiMjAyMS0wMy0yM1QxODoxNTo1OC42MTc3OTU2WiIsImNyZWF0ZWRfYnkiOiIvYmluL3NoIC1jICMobm9wKSBBREQgZmlsZTphYzMyZGEyM2Q1MWU4MDFmMDJmOTI0MTIzZWQzMDk5MGViM2YwZmVjMWI5ZWQ0ZjBiMDZjMjRlODhiOWMzNjk1IGluIC9zb21lZmlsZS0xLnR4dCAifSx7ImNyZWF0ZWQiOiIyMDIxLTAzLTIzVDE4OjE1OjU4Ljg3MjI4OThaIiwiY3JlYXRlZF9ieSI6Ii9iaW4vc2ggLWMgIyhub3ApIEFERCBmaWxlOmRmM2I3NDRmNTRhOWIxNmI5YjlhZWQ0MGUzZTk4ZDljYTJiNDlmNWE3N2Q5ZmE4YTk3NjkwZDdiYWY1ODg4MjAgaW4gL3NvbWVmaWxlLTIudHh0ICJ9XSwib3MiOiJsaW51eCIsInJvb3RmcyI6eyJ0eXBlIjoibGF5ZXJzIiwiZGlmZl9pZHMiOlsic2hhMjU2OjNkZTE2YzViODY1OWEyZThkODg4YjhkZWQ4NDI3YmU3YTU2ODZhM2M4YzRlNGRkMzBkZTIwZjM2MjgyNzI4NWIiLCJzaGEyNTY6MzY2YTNmNTY1M2UzNDY3M2I4NzU4OTFiMDIxNjQ3NDQwZDAxMjdjMmVmMDQxZTNiMWEyMmRhMmE3ZDRmMzcwMyJdfX0=", "config": "eyJhcmNoaXRlY3R1cmUiOiJhbWQ2NCIsImNvbmZpZyI6eyJIb3N0bmFtZSI6IiIsIkRvbWFpbm5hbWUiOiIiLCJVc2VyIjoiIiwiQXR0YWNoU3RkaW4iOmZhbHNlLCJBdHRhY2hTdGRvdXQiOmZhbHNlLCJBdHRhY2hTdGRlcnIiOmZhbHNlLCJUdHkiOmZhbHNlLCJPcGVuU3RkaW4iOmZhbHNlLCJTdGRpbk9uY2UiOmZhbHNlLCJFbnYiOlsiUEFUSD0vdXNyL2xvY2FsL3NiaW46L3Vzci9sb2NhbC9iaW46L3Vzci9zYmluOi91c3IvYmluOi9zYmluOi9iaW4iXSwiQ21kIjpudWxsLCJJbWFnZSI6InNoYTI1NjoyOWQ1YjFjOTkyNjg0MzgwYjQ3NTEyMjliMmNjN2E4MzdkOTBmOWQ1OTJhYmIxZjAyZGYzZGRkMGQ3OWFjMDkxIiwiVm9sdW1lcyI6bnVsbCwiV29ya2luZ0RpciI6IiIsIkVudHJ5cG9pbnQiOm51bGwsIk9uQnVpbGQiOm51bGwsIkxhYmVscyI6bnVsbH0sImNvbnRhaW5lcl9jb25maWciOnsiSG9zdG5hbWUiOiIiLCJEb21haW5uYW1lIjoiIiwiVXNlciI6IiIsIkF0dGFjaFN0ZGluIjpmYWxzZSwiQXR0YWNoU3Rkb3V0IjpmYWxzZSwiQXR0YWNoU3RkZXJyIjpmYWxzZSwiVHR5IjpmYWxzZSwiT3BlblN0ZGluIjpmYWxzZSwiU3RkaW5PbmNlIjpmYWxzZSwiRW52IjpbIlBBVEg9L3Vzci9sb2NhbC9zYmluOi91c3IvbG9jYWwvYmluOi91c3Ivc2JpbjovdXNyL2Jpbjovc2JpbjovYmluIl0sIkNtZCI6WyIvYmluL3NoIiwiLWMiLCIjKG5vcCkgQUREIGZpbGU6ZGYzYjc0NGY1NGE5YjE2YjliOWFlZDQwZTNlOThkOWNhMmI0OWY1YTc3ZDlmYThhOTc2OTBkN2JhZjU4ODgyMCBpbiAvc29tZWZpbGUtMi50eHQgIl0sIkltYWdlIjoic2hhMjU2OjI5ZDViMWM5OTI2ODQzODBiNDc1MTIyOWIyY2M3YTgzN2Q5MGY5ZDU5MmFiYjFmMDJkZjNkZGQwZDc5YWMwOTEiLCJWb2x1bWVzIjpudWxsLCJXb3JraW5nRGlyIjoiIiwiRW50cnlwb2ludCI6bnVsbCwiT25CdWlsZCI6bnVsbCwiTGFiZWxzIjpudWxsfSwiY3JlYXRlZCI6IjIwMjEtMDQtMDFUMTI6NDg6MzIuMjYzNjAzMVoiLCJkb2NrZXJfdmVyc2lvbiI6IjIwLjEwLjIiLCJoaXN0b3J5IjpbeyJjcmVhdGVkIjoiMjAyMS0wNC0wMVQxMjo0ODozMi4wODY3MTY2WiIsImNyZWF0ZWRfYnkiOiIvYmluL3NoIC1jICMobm9wKSBBREQgZmlsZTphYzMyZGEyM2Q1MWU4MDFmMDJmOTI0MTIzZWQzMDk5MGViM2YwZmVjMWI5ZWQ0ZjBiMDZjMjRlODhiOWMzNjk1IGluIC9zb21lZmlsZS0xLnR4dCAifSx7ImNyZWF0ZWQiOiIyMDIxLTA0LTAxVDEyOjQ4OjMyLjI2MzYwMzFaIiwiY3JlYXRlZF9ieSI6Ii9iaW4vc2ggLWMgIyhub3ApIEFERCBmaWxlOmRmM2I3NDRmNTRhOWIxNmI5YjlhZWQ0MGUzZTk4ZDljYTJiNDlmNWE3N2Q5ZmE4YTk3NjkwZDdiYWY1ODg4MjAgaW4gL3NvbWVmaWxlLTIudHh0ICJ9XSwib3MiOiJsaW51eCIsInJvb3RmcyI6eyJ0eXBlIjoibGF5ZXJzIiwiZGlmZl9pZHMiOlsic2hhMjU2OjZjMzc2MzUyYzA1MzdmNDQ4M2U0MDMzZTMzMmQ3YTRhYjk0MzNkYjY4YzU0YzI5N2E4MzRkMzY3MTlhZWI2YzkiLCJzaGEyNTY6ZmM4MjE4YTgxNDJlZTQ5NTJiYjhkOWI5NmIzZTk4MzgzMjJlOWU2ZWFlNjQ3NzEzNmJjYWQ4ZmQ3Njg5NDliNyJdfX0=",
"repoDigests": [], "repoDigests": [],
"scope": "Squashed" "scope": "Squashed"
} }
@ -102,7 +102,7 @@
"version": "[not provided]" "version": "[not provided]"
}, },
"schema": { "schema": {
"version": "1.0.4", "version": "1.0.5",
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-1.0.4.json" "url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-1.0.5.json"
} }
} }

View File

@ -9,7 +9,8 @@ type JSONDocument struct {
// here should be optional by supplying "omitempty" on these fields hint to the jsonschema generator to not // here should be optional by supplying "omitempty" on these fields hint to the jsonschema generator to not
// require these fields. As an accepted rule in this repo all collections should still be initialized in the // require these fields. As an accepted rule in this repo all collections should still be initialized in the
// context of being used in a JSON document. // context of being used in a JSON document.
FileMetadata []JSONFileMetadata `json:"fileMetadata,omitempty"` FileMetadata []JSONFileMetadata `json:"fileMetadata,omitempty"` // note: must have omitempty
Secrets []JSONSecrets `json:"secrets,omitempty"` // note: must have omitempty
packages.JSONDocument packages.JSONDocument
} }
@ -27,6 +28,7 @@ func NewJSONDocument(config JSONDocumentConfig) (JSONDocument, error) {
return JSONDocument{ return JSONDocument{
FileMetadata: fileMetadata, FileMetadata: fileMetadata,
Secrets: NewJSONSecrets(config.Secrets),
JSONDocument: pkgsDoc, JSONDocument: pkgsDoc,
}, nil }, nil
} }

View File

@ -13,6 +13,7 @@ type JSONDocumentConfig struct {
PackageCatalog *pkg.Catalog PackageCatalog *pkg.Catalog
FileMetadata map[source.Location]source.FileMetadata FileMetadata map[source.Location]source.FileMetadata
FileDigests map[source.Location][]file.Digest FileDigests map[source.Location][]file.Digest
Secrets map[source.Location][]file.SearchResult
Distro *distro.Distro Distro *distro.Distro
SourceMetadata source.Metadata SourceMetadata source.Metadata
} }

View File

@ -0,0 +1,32 @@
package poweruser
import (
"sort"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/source"
)
type JSONSecrets struct {
Location source.Location `json:"location"`
Secrets []file.SearchResult `json:"secrets"`
}
func NewJSONSecrets(data map[source.Location][]file.SearchResult) []JSONSecrets {
results := make([]JSONSecrets, 0)
for location, secrets := range data {
results = append(results, JSONSecrets{
Location: location,
Secrets: secrets,
})
}
// sort by real path then virtual path to ensure the result is stable across multiple runs
sort.SliceStable(results, func(i, j int) bool {
if results[i].Location.RealPath != results[j].Location.RealPath {
return results[i].Location.VirtualPath < results[j].Location.VirtualPath
}
return false
})
return results
}

View File

@ -191,11 +191,21 @@
"digests": [ "digests": [
"sha256" "sha256"
] ]
},
"secrets": {
"cataloger": {
"enabled": false,
"scope": ""
},
"additional-patterns": null,
"exclude-pattern-names": null,
"reveal-values": false,
"skip-files-above-size": 0
} }
} }
}, },
"schema": { "schema": {
"version": "1.0.4", "version": "1.0.5",
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-1.0.4.json" "url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-1.0.5.json"
} }
} }

View File

@ -0,0 +1,890 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/Document",
"definitions": {
"ApkFileRecord": {
"required": [
"path"
],
"properties": {
"path": {
"type": "string"
},
"ownerUid": {
"type": "string"
},
"ownerGid": {
"type": "string"
},
"permissions": {
"type": "string"
},
"checksum": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"ApkMetadata": {
"required": [
"package",
"originPackage",
"maintainer",
"version",
"license",
"architecture",
"url",
"description",
"size",
"installedSize",
"pullDependencies",
"pullChecksum",
"gitCommitOfApkPort",
"files"
],
"properties": {
"package": {
"type": "string"
},
"originPackage": {
"type": "string"
},
"maintainer": {
"type": "string"
},
"version": {
"type": "string"
},
"license": {
"type": "string"
},
"architecture": {
"type": "string"
},
"url": {
"type": "string"
},
"description": {
"type": "string"
},
"size": {
"type": "integer"
},
"installedSize": {
"type": "integer"
},
"pullDependencies": {
"type": "string"
},
"pullChecksum": {
"type": "string"
},
"gitCommitOfApkPort": {
"type": "string"
},
"files": {
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/ApkFileRecord"
},
"type": "array"
}
},
"additionalProperties": true,
"type": "object"
},
"CargoPackageMetadata": {
"required": [
"name",
"version",
"source",
"checksum",
"dependencies"
],
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"source": {
"type": "string"
},
"checksum": {
"type": "string"
},
"dependencies": {
"items": {
"type": "string"
},
"type": "array"
}
},
"additionalProperties": true,
"type": "object"
},
"Descriptor": {
"required": [
"name",
"version"
],
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"configuration": {
"additionalProperties": true
}
},
"additionalProperties": true,
"type": "object"
},
"Digest": {
"required": [
"algorithm",
"value"
],
"properties": {
"algorithm": {
"type": "string"
},
"value": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"Distribution": {
"required": [
"name",
"version",
"idLike"
],
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"idLike": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"Document": {
"required": [
"artifacts",
"artifactRelationships",
"source",
"distro",
"descriptor",
"schema"
],
"properties": {
"fileMetadata": {
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/FileMetadata"
},
"type": "array"
},
"secrets": {
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/Secrets"
},
"type": "array"
},
"artifacts": {
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/Package"
},
"type": "array"
},
"artifactRelationships": {
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/Relationship"
},
"type": "array"
},
"source": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/Source"
},
"distro": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/Distribution"
},
"descriptor": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/Descriptor"
},
"schema": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/Schema"
},
"artifacts.metadata": {
"anyOf": [
{
"type": "null"
},
{
"$ref": "#/definitions/ApkMetadata"
},
{
"$ref": "#/definitions/CargoPackageMetadata"
},
{
"$ref": "#/definitions/DpkgMetadata"
},
{
"$ref": "#/definitions/GemMetadata"
},
{
"$ref": "#/definitions/JavaMetadata"
},
{
"$ref": "#/definitions/NpmPackageJSONMetadata"
},
{
"$ref": "#/definitions/PythonPackageMetadata"
},
{
"$ref": "#/definitions/RpmdbMetadata"
}
]
}
},
"additionalProperties": true,
"type": "object"
},
"DpkgFileRecord": {
"required": [
"path",
"md5"
],
"properties": {
"path": {
"type": "string"
},
"md5": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"DpkgMetadata": {
"required": [
"package",
"source",
"version",
"sourceVersion",
"architecture",
"maintainer",
"installedSize",
"files"
],
"properties": {
"package": {
"type": "string"
},
"source": {
"type": "string"
},
"version": {
"type": "string"
},
"sourceVersion": {
"type": "string"
},
"architecture": {
"type": "string"
},
"maintainer": {
"type": "string"
},
"installedSize": {
"type": "integer"
},
"files": {
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/DpkgFileRecord"
},
"type": "array"
}
},
"additionalProperties": true,
"type": "object"
},
"FileMetadata": {
"required": [
"location",
"metadata"
],
"properties": {
"location": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/Location"
},
"metadata": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/FileMetadataEntry"
}
},
"additionalProperties": true,
"type": "object"
},
"FileMetadataEntry": {
"required": [
"mode",
"type",
"userID",
"groupID"
],
"properties": {
"mode": {
"type": "integer"
},
"type": {
"type": "string"
},
"linkDestination": {
"type": "string"
},
"userID": {
"type": "integer"
},
"groupID": {
"type": "integer"
},
"digests": {
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/Digest"
},
"type": "array"
}
},
"additionalProperties": true,
"type": "object"
},
"GemMetadata": {
"required": [
"name",
"version"
],
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"files": {
"items": {
"type": "string"
},
"type": "array"
},
"authors": {
"items": {
"type": "string"
},
"type": "array"
},
"licenses": {
"items": {
"type": "string"
},
"type": "array"
},
"homepage": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"JavaManifest": {
"properties": {
"main": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object"
},
"namedSections": {
"patternProperties": {
".*": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object"
}
},
"type": "object"
}
},
"additionalProperties": true,
"type": "object"
},
"JavaMetadata": {
"required": [
"virtualPath"
],
"properties": {
"virtualPath": {
"type": "string"
},
"manifest": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/JavaManifest"
},
"pomProperties": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/PomProperties"
}
},
"additionalProperties": true,
"type": "object"
},
"Location": {
"required": [
"path"
],
"properties": {
"path": {
"type": "string"
},
"layerID": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"NpmPackageJSONMetadata": {
"required": [
"author",
"licenses",
"homepage",
"description",
"url"
],
"properties": {
"files": {
"items": {
"type": "string"
},
"type": "array"
},
"author": {
"type": "string"
},
"licenses": {
"items": {
"type": "string"
},
"type": "array"
},
"homepage": {
"type": "string"
},
"description": {
"type": "string"
},
"url": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"Package": {
"required": [
"id",
"name",
"version",
"type",
"foundBy",
"locations",
"licenses",
"language",
"cpes",
"purl",
"metadataType",
"metadata"
],
"properties": {
"id": {
"type": "string"
},
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"type": {
"type": "string"
},
"foundBy": {
"type": "string"
},
"locations": {
"items": {
"$ref": "#/definitions/Location"
},
"type": "array"
},
"licenses": {
"items": {
"type": "string"
},
"type": "array"
},
"language": {
"type": "string"
},
"cpes": {
"items": {
"type": "string"
},
"type": "array"
},
"purl": {
"type": "string"
},
"metadataType": {
"type": "string"
},
"metadata": {
"additionalProperties": true
}
},
"additionalProperties": true,
"type": "object"
},
"PomProperties": {
"required": [
"path",
"name",
"groupId",
"artifactId",
"version",
"extraFields"
],
"properties": {
"path": {
"type": "string"
},
"name": {
"type": "string"
},
"groupId": {
"type": "string"
},
"artifactId": {
"type": "string"
},
"version": {
"type": "string"
},
"extraFields": {
"patternProperties": {
".*": {
"type": "string"
}
},
"type": "object"
}
},
"additionalProperties": true,
"type": "object"
},
"PythonFileDigest": {
"required": [
"algorithm",
"value"
],
"properties": {
"algorithm": {
"type": "string"
},
"value": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"PythonFileRecord": {
"required": [
"path"
],
"properties": {
"path": {
"type": "string"
},
"digest": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/PythonFileDigest"
},
"size": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"PythonPackageMetadata": {
"required": [
"name",
"version",
"license",
"author",
"authorEmail",
"platform",
"sitePackagesRootPath"
],
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"license": {
"type": "string"
},
"author": {
"type": "string"
},
"authorEmail": {
"type": "string"
},
"platform": {
"type": "string"
},
"files": {
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/PythonFileRecord"
},
"type": "array"
},
"sitePackagesRootPath": {
"type": "string"
},
"topLevelPackages": {
"items": {
"type": "string"
},
"type": "array"
}
},
"additionalProperties": true,
"type": "object"
},
"Relationship": {
"required": [
"parent",
"child",
"type",
"metadata"
],
"properties": {
"parent": {
"type": "string"
},
"child": {
"type": "string"
},
"type": {
"type": "string"
},
"metadata": {
"additionalProperties": true
}
},
"additionalProperties": true,
"type": "object"
},
"RpmdbFileRecord": {
"required": [
"path",
"mode",
"size",
"sha256"
],
"properties": {
"path": {
"type": "string"
},
"mode": {
"type": "integer"
},
"size": {
"type": "integer"
},
"sha256": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"RpmdbMetadata": {
"required": [
"name",
"version",
"epoch",
"architecture",
"release",
"sourceRpm",
"size",
"license",
"vendor",
"files"
],
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"epoch": {
"type": "integer"
},
"architecture": {
"type": "string"
},
"release": {
"type": "string"
},
"sourceRpm": {
"type": "string"
},
"size": {
"type": "integer"
},
"license": {
"type": "string"
},
"vendor": {
"type": "string"
},
"files": {
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/RpmdbFileRecord"
},
"type": "array"
}
},
"additionalProperties": true,
"type": "object"
},
"Schema": {
"required": [
"version",
"url"
],
"properties": {
"version": {
"type": "string"
},
"url": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"SearchResult": {
"required": [
"classification",
"lineNumber",
"lineOffset",
"seekPosition",
"length"
],
"properties": {
"classification": {
"type": "string"
},
"lineNumber": {
"type": "integer"
},
"lineOffset": {
"type": "integer"
},
"seekPosition": {
"type": "integer"
},
"length": {
"type": "integer"
},
"value": {
"type": "string"
}
},
"additionalProperties": true,
"type": "object"
},
"Secrets": {
"required": [
"location",
"secrets"
],
"properties": {
"location": {
"$ref": "#/definitions/Location"
},
"secrets": {
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$ref": "#/definitions/SearchResult"
},
"type": "array"
}
},
"additionalProperties": true,
"type": "object"
},
"Source": {
"required": [
"type",
"target"
],
"properties": {
"type": {
"type": "string"
},
"target": {
"additionalProperties": true
}
},
"additionalProperties": true,
"type": "object"
}
}
}

View File

@ -11,7 +11,17 @@ const (
AppUpdateAvailable partybus.EventType = "syft-app-update-available" AppUpdateAvailable partybus.EventType = "syft-app-update-available"
// PackageCatalogerStarted is a partybus event that occurs when the package cataloging has begun // PackageCatalogerStarted is a partybus event that occurs when the package cataloging has begun
PackageCatalogerStarted partybus.EventType = "syft-cataloger-started-event" PackageCatalogerStarted partybus.EventType = "syft-package-cataloger-started-event"
// nolint:gosec
// SecretsCatalogerStarted is a partybus event that occurs when the secrets cataloging has begun
SecretsCatalogerStarted partybus.EventType = "syft-secrets-cataloger-started-event"
// FileMetadataCatalogerStarted is a partybus event that occurs when the file metadata cataloging has begun
FileMetadataCatalogerStarted partybus.EventType = "syft-file-metadata-cataloger-started-event"
// FileDigestsCatalogerStarted is a partybus event that occurs when the file digests cataloging has begun
FileDigestsCatalogerStarted partybus.EventType = "syft-file-digests-cataloger-started-event"
// PresenterReady is a partybus event that occurs when an analysis result is ready for final presentation // PresenterReady is a partybus event that occurs when an analysis result is ready for final presentation
PresenterReady partybus.EventType = "syft-presenter-ready-event" PresenterReady partybus.EventType = "syft-presenter-ready-event"

View File

@ -6,6 +6,8 @@ package parsers
import ( import (
"fmt" "fmt"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/internal/presenter" "github.com/anchore/syft/internal/presenter"
"github.com/wagoodman/go-progress" "github.com/wagoodman/go-progress"
@ -40,7 +42,7 @@ func checkEventType(actual, expected partybus.EventType) error {
return nil return nil
} }
func ParseCatalogerStarted(e partybus.Event) (*cataloger.Monitor, error) { func ParsePackageCatalogerStarted(e partybus.Event) (*cataloger.Monitor, error) {
if err := checkEventType(e.Type, event.PackageCatalogerStarted); err != nil { if err := checkEventType(e.Type, event.PackageCatalogerStarted); err != nil {
return nil, err return nil, err
} }
@ -53,6 +55,45 @@ func ParseCatalogerStarted(e partybus.Event) (*cataloger.Monitor, error) {
return &monitor, nil return &monitor, nil
} }
func ParseSecretsCatalogingStarted(e partybus.Event) (*file.SecretsMonitor, error) {
if err := checkEventType(e.Type, event.SecretsCatalogerStarted); err != nil {
return nil, err
}
monitor, ok := e.Value.(file.SecretsMonitor)
if !ok {
return nil, newPayloadErr(e.Type, "Value", e.Value)
}
return &monitor, nil
}
func ParseFileMetadataCatalogingStarted(e partybus.Event) (progress.StagedProgressable, error) {
if err := checkEventType(e.Type, event.FileMetadataCatalogerStarted); err != nil {
return nil, err
}
prog, ok := e.Value.(progress.StagedProgressable)
if !ok {
return nil, newPayloadErr(e.Type, "Value", e.Value)
}
return prog, nil
}
func ParseFileDigestsCatalogingStarted(e partybus.Event) (progress.StagedProgressable, error) {
if err := checkEventType(e.Type, event.FileDigestsCatalogerStarted); err != nil {
return nil, err
}
prog, ok := e.Value.(progress.StagedProgressable)
if !ok {
return nil, newPayloadErr(e.Type, "Value", e.Value)
}
return prog, nil
}
func ParsePresenterReady(e partybus.Event) (presenter.Presenter, error) { func ParsePresenterReady(e partybus.Event) (presenter.Presenter, error) {
if err := checkEventType(e.Type, event.PresenterReady); err != nil { if err := checkEventType(e.Type, event.PresenterReady); err != nil {
return nil, err return nil, err

View File

@ -7,6 +7,13 @@ import (
"io" "io"
"strings" "strings"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/syft/event"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
@ -22,13 +29,22 @@ func NewDigestsCataloger(hashes []crypto.Hash) (*DigestsCataloger, error) {
func (i *DigestsCataloger) Catalog(resolver source.FileResolver) (map[source.Location][]Digest, error) { func (i *DigestsCataloger) Catalog(resolver source.FileResolver) (map[source.Location][]Digest, error) {
results := make(map[source.Location][]Digest) results := make(map[source.Location][]Digest)
var locations []source.Location
for location := range resolver.AllLocations() { for location := range resolver.AllLocations() {
locations = append(locations, location)
}
stage, prog := digestsCatalogingProgress(int64(len(locations)))
for _, location := range locations {
stage.Current = location.RealPath
result, err := i.catalogLocation(resolver, location) result, err := i.catalogLocation(resolver, location)
if err != nil { if err != nil {
return nil, err return nil, err
} }
prog.N++
results[location] = result results[location] = result
} }
log.Debugf("file digests cataloger processed %d files", prog.N)
prog.SetCompleted()
return results, nil return results, nil
} }
@ -78,3 +94,23 @@ func CleanDigestAlgorithmName(name string) string {
lower := strings.ToLower(name) lower := strings.ToLower(name)
return strings.Replace(lower, "-", "", -1) return strings.Replace(lower, "-", "", -1)
} }
func digestsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manual) {
stage := &progress.Stage{}
prog := &progress.Manual{
Total: locations,
}
bus.Publish(partybus.Event{
Type: event.FileDigestsCatalogerStarted,
Value: struct {
progress.Stager
progress.Progressable
}{
Stager: progress.Stager(stage),
Progressable: prog,
},
})
return stage, prog
}

View File

@ -0,0 +1,56 @@
package file
import (
"fmt"
"regexp"
"github.com/bmatcuk/doublestar/v2"
"github.com/hashicorp/go-multierror"
)
// GenerateSearchPatterns takes a set of named base patterns, a set of additional named patterns and an name exclusion list and generates a final
// set of regular expressions (indexed by name). The sets are aggregated roughly as such: (base - excluded) + additional.
func GenerateSearchPatterns(basePatterns map[string]string, additionalPatterns map[string]string, excludePatternNames []string) (map[string]*regexp.Regexp, error) {
var regexObjs = make(map[string]*regexp.Regexp)
var errs error
addFn := func(name, pattern string) {
// always enable multiline search option for extracting secrets with multiline values
obj, err := regexp.Compile(`(?m)` + pattern)
if err != nil {
errs = multierror.Append(errs, fmt.Errorf("unable to parse %q regular expression: %w", name, err))
}
regexObjs[name] = obj
}
// add all base cases... unless that base case was asked to be excluded
for name, pattern := range basePatterns {
if !matchesExclusion(excludePatternNames, name) {
addFn(name, pattern)
}
}
// add all additional cases
for name, pattern := range additionalPatterns {
addFn(name, pattern)
}
if errs != nil {
return nil, errs
}
return regexObjs, nil
}
func matchesExclusion(excludePatternNames []string, name string) bool {
for _, exclude := range excludePatternNames {
matches, err := doublestar.Match(exclude, name)
if err != nil {
return false
}
if matches {
return true
}
}
return false
}

View File

@ -0,0 +1,125 @@
package file
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestGenerateSearchPatterns(t *testing.T) {
tests := []struct {
name string
base map[string]string
additional map[string]string
exclude []string
expected map[string]string
}{
{
name: "use-base-set",
base: map[string]string{
"in-default": `^secret_key=.*`,
},
expected: map[string]string{
"in-default": `(?m)^secret_key=.*`,
},
},
{
name: "exclude-from-base-set",
base: map[string]string{
"in-default": `^secret_key=.*`,
"also-in-default": `^also-in-default=.*`,
},
exclude: []string{"also-in-default"},
expected: map[string]string{
"in-default": `(?m)^secret_key=.*`,
},
},
{
name: "exclude-multiple-from-base-set",
base: map[string]string{
"in-default": `^secret_key=.*`,
"also-in-default": `^also-in-default=.*`,
"furthermore-in-default": `^furthermore-in-default=.*`,
},
exclude: []string{"also-in-default", "furthermore-in-default"},
expected: map[string]string{
"in-default": `(?m)^secret_key=.*`,
},
},
{
name: "exclude-all",
base: map[string]string{
"in-default": `^secret_key=.*`,
"also-in-default": `^also-in-default=.*`,
},
exclude: []string{"*"},
expected: map[string]string{},
},
{
name: "exclude-some",
base: map[string]string{
"real": `^real=.*`,
"in-default": `^secret_key=.*`,
"also-in-default": `^also-in-default=.*`,
},
exclude: []string{"*-default"},
expected: map[string]string{
"real": `(?m)^real=.*`,
},
},
{
name: "additional-pattern-unison",
base: map[string]string{
"in-default": `^secret_key=.*`,
},
additional: map[string]string{
"additional": `^additional=.*`,
},
expected: map[string]string{
"in-default": `(?m)^secret_key=.*`,
"additional": `(?m)^additional=.*`,
},
},
{
name: "override",
base: map[string]string{
"in-default": `^secret_key=.*`,
},
additional: map[string]string{
"in-default": `^additional=.*`,
},
expected: map[string]string{
"in-default": `(?m)^additional=.*`,
},
},
{
name: "exclude-and-override",
base: map[string]string{
"in-default": `^secret_key=.*`,
},
exclude: []string{"in-default"},
additional: map[string]string{
"in-default": `^additional=.*`,
},
expected: map[string]string{
"in-default": `(?m)^additional=.*`,
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actualObj, err := GenerateSearchPatterns(test.base, test.additional, test.exclude)
if err != nil {
t.Fatalf("unable to combine: %+v", err)
}
actual := make(map[string]string)
for n, v := range actualObj {
actual[n] = v.String()
}
assert.Equal(t, test.expected, actual, "mismatched combination")
})
}
}

View File

@ -1,7 +1,12 @@
package file package file
import ( import (
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
) )
type MetadataCataloger struct { type MetadataCataloger struct {
@ -13,13 +18,42 @@ func NewMetadataCataloger() *MetadataCataloger {
func (i *MetadataCataloger) Catalog(resolver source.FileResolver) (map[source.Location]source.FileMetadata, error) { func (i *MetadataCataloger) Catalog(resolver source.FileResolver) (map[source.Location]source.FileMetadata, error) {
results := make(map[source.Location]source.FileMetadata) results := make(map[source.Location]source.FileMetadata)
var locations []source.Location
for location := range resolver.AllLocations() { for location := range resolver.AllLocations() {
locations = append(locations, location)
}
stage, prog := metadataCatalogingProgress(int64(len(locations)))
for _, location := range locations {
stage.Current = location.RealPath
metadata, err := resolver.FileMetadataByLocation(location) metadata, err := resolver.FileMetadataByLocation(location)
if err != nil { if err != nil {
return nil, err return nil, err
} }
results[location] = metadata results[location] = metadata
prog.N++
} }
log.Debugf("file metadata cataloger processed %d files", prog.N)
prog.SetCompleted()
return results, nil return results, nil
} }
func metadataCatalogingProgress(locations int64) (*progress.Stage, *progress.Manual) {
stage := &progress.Stage{}
prog := &progress.Manual{
Total: locations,
}
bus.Publish(partybus.Event{
Type: event.FileMetadataCatalogerStarted,
Value: struct {
progress.Stager
progress.Progressable
}{
Stager: progress.Stager(stage),
Progressable: prog,
},
})
return stage, prog
}

View File

@ -53,6 +53,7 @@ func TestFileMetadataCataloger(t *testing.T) {
Type: "RegularFile", Type: "RegularFile",
UserID: 1, UserID: 1,
GroupID: 2, GroupID: 2,
Size: 7,
}, },
}, },
{ {
@ -128,7 +129,7 @@ func TestFileMetadataCataloger(t *testing.T) {
l := source.NewLocationFromImage(test.path, *ref, img) l := source.NewLocationFromImage(test.path, *ref, img)
assert.Equal(t, actual[l], test.expected, "mismatched metadata") assert.Equal(t, test.expected, actual[l], "mismatched metadata")
}) })
} }

View File

@ -0,0 +1,39 @@
package file
import "io"
type newlineCounter struct {
io.RuneReader
numBytes int64
newLines []int64
}
func (c *newlineCounter) ReadRune() (r rune, size int, err error) {
r, size, err = c.RuneReader.ReadRune()
c.numBytes += int64(size)
if r == '\n' {
c.newLines = append(c.newLines, c.numBytes)
}
return
}
func (c *newlineCounter) newlinesBefore(pos int64) int {
var result int
for _, nlPos := range c.newLines {
if nlPos <= pos {
result++
}
}
return result
}
func (c *newlineCounter) newlinePositionBefore(pos int64) int64 {
var last int64
for _, nlPos := range c.newLines {
if nlPos > pos {
break
}
last = nlPos
}
return last
}

View File

@ -0,0 +1,35 @@
package file
import (
"bufio"
"io"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func TestLineCounter_ReadRune(t *testing.T) {
counter := &newlineCounter{RuneReader: bufio.NewReader(strings.NewReader("hi\nwhat's the weather like today?\ndunno...\n"))}
var err error
for err == nil {
_, _, err = counter.ReadRune()
}
if err != io.EOF {
t.Fatalf("should have gotten an eof, got %+v", err)
}
assert.Equal(t, 3, len(counter.newLines), "bad line count")
assert.Equal(t, []int64{3, 34, 43}, counter.newLines, "bad line positions")
}
func TestLineCounter_newlinesBefore(t *testing.T) {
counter := &newlineCounter{RuneReader: bufio.NewReader(strings.NewReader("hi\nwhat's the weather like today?\ndunno...\n"))}
var err error
for err == nil {
_, _, err = counter.ReadRune()
}
if err != io.EOF {
t.Fatalf("should have gotten an eof, got %+v", err)
}
assert.Equal(t, 1, counter.newlinesBefore(10), "bad line count")
}

View File

@ -0,0 +1,18 @@
package file
import (
"fmt"
)
type SearchResult struct {
Classification string `json:"classification"`
LineNumber int64 `json:"lineNumber"`
LineOffset int64 `json:"lineOffset"`
SeekPosition int64 `json:"seekPosition"`
Length int64 `json:"length"`
Value string `json:"value,omitempty"`
}
func (s SearchResult) String() string {
return fmt.Sprintf("SearchResult(classification=%q seek=%q length=%q)", s.Classification, s.SeekPosition, s.Length)
}

View File

@ -0,0 +1,150 @@
package file
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"regexp"
"sort"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/source"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
)
var DefaultSecretsPatterns = map[string]string{
"aws-access-key": `(?i)aws_access_key_id["'=:\s]*?(?P<value>(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16})`,
"aws-secret-key": `(?i)aws_secret_access_key["'=:\s]*?(?P<value>[0-9a-zA-Z/+]{40})`,
"pem-private-key": `-----BEGIN (\S+ )?PRIVATE KEY(\sBLOCK)?-----((?P<value>(\n.*?)+)-----END (\S+ )?PRIVATE KEY(\sBLOCK)?-----)?`,
"docker-config-auth": `"auths"((.*\n)*.*?"auth"\s*:\s*"(?P<value>[^"]+)")?`,
"generic-api-key": `(?i)api(-|_)?key["'=:\s]*?(?P<value>[A-Z0-9]{20,60})["']?(\s|$)`,
}
type SecretsCataloger struct {
patterns map[string]*regexp.Regexp
revealValues bool
skipFilesAboveSize int64
}
func NewSecretsCataloger(patterns map[string]*regexp.Regexp, revealValues bool, maxFileSize int64) (*SecretsCataloger, error) {
return &SecretsCataloger{
patterns: patterns,
revealValues: revealValues,
skipFilesAboveSize: maxFileSize,
}, nil
}
func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Location][]SearchResult, error) {
results := make(map[source.Location][]SearchResult)
var locations []source.Location
for location := range resolver.AllLocations() {
locations = append(locations, location)
}
stage, prog, secretsDiscovered := secretsCatalogingProgress(int64(len(locations)))
for _, location := range locations {
stage.Current = location.RealPath
result, err := i.catalogLocation(resolver, location)
if err != nil {
return nil, err
}
if len(result) > 0 {
secretsDiscovered.N += int64(len(result))
results[location] = result
}
prog.N++
}
log.Debugf("secrets cataloger discovered %d secrets", secretsDiscovered.N)
prog.SetCompleted()
return results, nil
}
func (i *SecretsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]SearchResult, error) {
metadata, err := resolver.FileMetadataByLocation(location)
if err != nil {
return nil, err
}
if i.skipFilesAboveSize > 0 && metadata.Size > i.skipFilesAboveSize {
return nil, nil
}
// TODO: in the future we can swap out search strategies here
secrets, err := catalogLocationByLine(resolver, location, i.patterns)
if err != nil {
return nil, err
}
if i.revealValues {
for idx, secret := range secrets {
value, err := extractValue(resolver, location, secret.SeekPosition, secret.Length)
if err != nil {
return nil, err
}
secrets[idx].Value = value
}
}
// sort by the start location of each secret as it appears in the location
sort.SliceStable(secrets, func(i, j int) bool {
return secrets[i].SeekPosition < secrets[j].SeekPosition
})
return secrets, nil
}
func extractValue(resolver source.FileResolver, location source.Location, start, length int64) (string, error) {
readCloser, err := resolver.FileContentsByLocation(location)
if err != nil {
return "", fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
}
defer readCloser.Close()
n, err := io.CopyN(ioutil.Discard, readCloser, start)
if err != nil {
return "", fmt.Errorf("unable to read contents for location=%q : %w", location, err)
}
if n != start {
return "", fmt.Errorf("unexpected seek location for location=%q : %d != %d", location, n, start)
}
var buf bytes.Buffer
n, err = io.CopyN(&buf, readCloser, length)
if err != nil {
return "", fmt.Errorf("unable to read secret value for location=%q : %w", location, err)
}
if n != length {
return "", fmt.Errorf("unexpected secret length for location=%q : %d != %d", location, n, length)
}
return buf.String(), nil
}
type SecretsMonitor struct {
progress.Stager
SecretsDiscovered progress.Monitorable
progress.Progressable
}
func secretsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manual, *progress.Manual) {
stage := &progress.Stage{}
secretsDiscovered := &progress.Manual{}
prog := &progress.Manual{
Total: locations,
}
bus.Publish(partybus.Event{
Type: event.SecretsCatalogerStarted,
Source: secretsDiscovered,
Value: SecretsMonitor{
Stager: progress.Stager(stage),
SecretsDiscovered: secretsDiscovered,
Progressable: prog,
},
})
return stage, prog, secretsDiscovered
}

View File

@ -0,0 +1,444 @@
package file
import (
"regexp"
"testing"
"github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/source"
"github.com/stretchr/testify/assert"
)
func TestSecretsCataloger(t *testing.T) {
tests := []struct {
name string
fixture string
reveal bool
maxSize int64
patterns map[string]string
expected []SearchResult
constructorErr bool
catalogErr bool
}{
{
name: "go-case-find-and-reveal",
fixture: "test-fixtures/secrets/simple.txt",
reveal: true,
patterns: map[string]string{
"simple-secret-key": `^secret_key=.*`,
},
expected: []SearchResult{
{
Classification: "simple-secret-key",
LineNumber: 2,
LineOffset: 0,
SeekPosition: 34,
Length: 21,
Value: "secret_key=clear_text",
},
},
},
{
name: "dont-reveal-secret-value",
fixture: "test-fixtures/secrets/simple.txt",
reveal: false,
patterns: map[string]string{
"simple-secret-key": `^secret_key=.*`,
},
expected: []SearchResult{
{
Classification: "simple-secret-key",
LineNumber: 2,
LineOffset: 0,
SeekPosition: 34,
Length: 21,
Value: "",
},
},
},
{
name: "reveal-named-capture-group",
fixture: "test-fixtures/secrets/simple.txt",
reveal: true,
patterns: map[string]string{
"simple-secret-key": `^secret_key=(?P<value>.*)`,
},
expected: []SearchResult{
{
Classification: "simple-secret-key",
LineNumber: 2,
LineOffset: 11,
SeekPosition: 45,
Length: 10,
Value: "clear_text",
},
},
},
{
name: "multiple-secret-instances",
fixture: "test-fixtures/secrets/multiple.txt",
reveal: true,
patterns: map[string]string{
"simple-secret-key": `secret_key=.*`,
},
expected: []SearchResult{
{
Classification: "simple-secret-key",
LineNumber: 1,
LineOffset: 0,
SeekPosition: 0,
Length: 22,
Value: "secret_key=clear_text1",
},
{
Classification: "simple-secret-key",
LineNumber: 3,
LineOffset: 0,
SeekPosition: 57,
Length: 22,
Value: "secret_key=clear_text2",
},
{
Classification: "simple-secret-key",
LineNumber: 4,
// note: this test captures a line offset case
LineOffset: 1,
SeekPosition: 81,
Length: 22,
Value: "secret_key=clear_text3",
},
{
Classification: "simple-secret-key",
LineNumber: 6,
LineOffset: 0,
SeekPosition: 139,
Length: 22,
Value: "secret_key=clear_text4",
},
},
},
{
name: "multiple-secret-instances-with-capture-group",
fixture: "test-fixtures/secrets/multiple.txt",
reveal: true,
patterns: map[string]string{
"simple-secret-key": `secret_key=(?P<value>.*)`,
},
expected: []SearchResult{
{
Classification: "simple-secret-key",
LineNumber: 1,
// note: value capture group location
LineOffset: 11,
SeekPosition: 11,
Length: 11,
Value: "clear_text1",
},
{
Classification: "simple-secret-key",
LineNumber: 3,
LineOffset: 11,
SeekPosition: 68,
Length: 11,
Value: "clear_text2",
},
{
Classification: "simple-secret-key",
LineNumber: 4,
// note: value capture group location + offset
LineOffset: 12,
SeekPosition: 92,
Length: 11,
Value: "clear_text3",
},
{
Classification: "simple-secret-key",
LineNumber: 6,
LineOffset: 11,
SeekPosition: 150,
Length: 11,
Value: "clear_text4",
},
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
regexObjs := make(map[string]*regexp.Regexp)
for name, pattern := range test.patterns {
// always assume given patterns should be multiline
obj, err := regexp.Compile(`` + pattern)
if err != nil {
t.Fatalf("unable to parse regex: %+v", err)
}
regexObjs[name] = obj
}
c, err := NewSecretsCataloger(regexObjs, test.reveal, test.maxSize)
if err != nil && !test.constructorErr {
t.Fatalf("could not create cataloger (but should have been able to): %+v", err)
} else if err == nil && test.constructorErr {
t.Fatalf("expected constructor error but did not get one")
} else if test.constructorErr && err != nil {
return
}
resolver := source.NewMockResolverForPaths(test.fixture)
actualResults, err := c.Catalog(resolver)
if err != nil && !test.catalogErr {
t.Fatalf("could not catalog (but should have been able to): %+v", err)
} else if err == nil && test.catalogErr {
t.Fatalf("expected catalog error but did not get one")
} else if test.catalogErr && err != nil {
return
}
loc := source.NewLocation(test.fixture)
if _, exists := actualResults[loc]; !exists {
t.Fatalf("could not find location=%q in results", loc)
}
assert.Equal(t, test.expected, actualResults[loc], "mismatched secrets")
})
}
}
func TestSecretsCataloger_DefaultSecrets(t *testing.T) {
regexObjs, err := GenerateSearchPatterns(DefaultSecretsPatterns, nil, nil)
if err != nil {
t.Fatalf("unable to get patterns: %+v", err)
}
tests := []struct {
fixture string
expected []SearchResult
}{
{
fixture: "test-fixtures/secrets/default/aws.env",
expected: []SearchResult{
{
Classification: "aws-access-key",
LineNumber: 2,
LineOffset: 25,
SeekPosition: 64,
Length: 20,
Value: "AKIAIOSFODNN7EXAMPLE",
},
{
Classification: "aws-secret-key",
LineNumber: 3,
LineOffset: 29,
SeekPosition: 114,
Length: 40,
Value: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
},
},
},
{
fixture: "test-fixtures/secrets/default/aws.ini",
expected: []SearchResult{
{
Classification: "aws-access-key",
LineNumber: 3,
LineOffset: 18,
SeekPosition: 67,
Length: 20,
Value: "AKIAIOSFODNN7EXAMPLE",
},
{
Classification: "aws-secret-key",
LineNumber: 4,
LineOffset: 22,
SeekPosition: 110,
Length: 40,
Value: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
},
},
},
{
fixture: "test-fixtures/secrets/default/private-key.pem",
expected: []SearchResult{
{
Classification: "pem-private-key",
LineNumber: 2,
LineOffset: 27,
SeekPosition: 66,
Length: 351,
Value: `
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
`,
},
},
},
{
fixture: "test-fixtures/secrets/default/private-key-openssl.pem",
expected: []SearchResult{
{
Classification: "pem-private-key",
LineNumber: 2,
LineOffset: 35,
SeekPosition: 74,
Length: 351,
Value: `
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
`,
},
},
},
{
// note: this test proves that the PEM regex matches the smallest possible match
// since the test catches two adjacent secrets
fixture: "test-fixtures/secrets/default/private-keys.pem",
expected: []SearchResult{
{
Classification: "pem-private-key",
LineNumber: 1,
LineOffset: 35,
SeekPosition: 35,
Length: 351,
Value: `
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
`,
},
{
Classification: "pem-private-key",
LineNumber: 9,
LineOffset: 35,
SeekPosition: 455,
Length: 351,
Value: `
MIIEvgTHISISNOTAREALKEYoIBAQDBj08DBj08DBj08DBj08DBj08DBsp5++4an3
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgY5
VQQDDBcqLmF3cy10ZXN0SISNOTAREALKEYoIBAQDBj08DfffKoZIhvcNAQEBBQA7
bml6SISNOTAREALKEYoIBAQDBj08DdssBggrBgEFBQcBAQSBkzCBkDBNBggrBgE8
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmd1
j4f668YfhUbKdRF6S6734856
`,
},
},
},
{
fixture: "test-fixtures/secrets/default/private-key-false-positive.pem",
expected: nil,
},
{
// this test represents:
// 1. a docker config
// 2. a named capture group with the correct line number and line offset case
// 3. the named capture group is in a different line than the match start, and both the match start and the capture group have different line offsets
fixture: "test-fixtures/secrets/default/docker-config.json",
expected: []SearchResult{
{
Classification: "docker-config-auth",
LineNumber: 5,
LineOffset: 15,
SeekPosition: 100,
Length: 10,
Value: "tOpsyKreTz",
},
},
},
{
fixture: "test-fixtures/secrets/default/not-docker-config.json",
expected: nil,
},
{
fixture: "test-fixtures/secrets/default/api-key.txt",
expected: []SearchResult{
{
Classification: "generic-api-key",
LineNumber: 2,
LineOffset: 7,
SeekPosition: 33,
Length: 20,
Value: "12345A7a901b34567890",
},
{
Classification: "generic-api-key",
LineNumber: 3,
LineOffset: 9,
SeekPosition: 63,
Length: 30,
Value: "12345A7a901b345678901234567890",
},
{
Classification: "generic-api-key",
LineNumber: 4,
LineOffset: 10,
SeekPosition: 104,
Length: 40,
Value: "12345A7a901b3456789012345678901234567890",
},
{
Classification: "generic-api-key",
LineNumber: 5,
LineOffset: 10,
SeekPosition: 156,
Length: 50,
Value: "12345A7a901b34567890123456789012345678901234567890",
},
{
Classification: "generic-api-key",
LineNumber: 6,
LineOffset: 16,
SeekPosition: 224,
Length: 60,
Value: "12345A7a901b345678901234567890123456789012345678901234567890",
},
{
Classification: "generic-api-key",
LineNumber: 14,
LineOffset: 8,
SeekPosition: 502,
Length: 20,
Value: "11111111111111111111",
},
},
},
}
for _, test := range tests {
t.Run(test.fixture, func(t *testing.T) {
c, err := NewSecretsCataloger(regexObjs, true, 10*file.MB)
if err != nil {
t.Fatalf("could not create cataloger: %+v", err)
}
resolver := source.NewMockResolverForPaths(test.fixture)
actualResults, err := c.Catalog(resolver)
if err != nil {
t.Fatalf("could not catalog: %+v", err)
}
loc := source.NewLocation(test.fixture)
if _, exists := actualResults[loc]; !exists && test.expected != nil {
t.Fatalf("could not find location=%q in results", loc)
} else if !exists && test.expected == nil {
return
}
assert.Equal(t, test.expected, actualResults[loc], "mismatched secrets")
})
}
}

View File

@ -0,0 +1,134 @@
package file
import (
"bufio"
"errors"
"fmt"
"io"
"io/ioutil"
"regexp"
"github.com/anchore/syft/syft/source"
)
func catalogLocationByLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp) ([]SearchResult, error) {
readCloser, err := resolver.FileContentsByLocation(location)
if err != nil {
return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
}
defer readCloser.Close()
var scanner = bufio.NewReader(readCloser)
var position int64
var allSecrets []SearchResult
var lineNo int64
var readErr error
for !errors.Is(readErr, io.EOF) {
lineNo++
var line []byte
// TODO: we're at risk of large memory usage for very long lines
line, readErr = scanner.ReadBytes('\n')
if readErr != nil && readErr != io.EOF {
return nil, readErr
}
lineSecrets, err := searchForSecretsWithinLine(resolver, location, patterns, line, lineNo, position)
if err != nil {
return nil, err
}
position += int64(len(line))
allSecrets = append(allSecrets, lineSecrets...)
}
return allSecrets, nil
}
func searchForSecretsWithinLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]SearchResult, error) {
var secrets []SearchResult
for name, pattern := range patterns {
matches := pattern.FindAllIndex(line, -1)
for i, match := range matches {
if i%2 == 1 {
// FindAllIndex returns pairs of numbers for each match, we are only interested in the starting (first)
// position in each pair.
continue
}
lineOffset := int64(match[0])
seekLocation := position + lineOffset
reader, err := readerAtPosition(resolver, location, seekLocation)
if err != nil {
return nil, err
}
secret := extractSecretFromPosition(reader, name, pattern, lineNo, lineOffset, seekLocation)
if secret != nil {
secrets = append(secrets, *secret)
}
}
}
return secrets, nil
}
func readerAtPosition(resolver source.FileResolver, location source.Location, seekPosition int64) (io.ReadCloser, error) {
readCloser, err := resolver.FileContentsByLocation(location)
if err != nil {
return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
}
if seekPosition > 0 {
n, err := io.CopyN(ioutil.Discard, readCloser, seekPosition)
if err != nil {
return nil, fmt.Errorf("unable to read contents for location=%q while searching for secrets: %w", location, err)
}
if n != seekPosition {
return nil, fmt.Errorf("unexpected seek location for location=%q while searching for secrets: %d != %d", location, n, seekPosition)
}
}
return readCloser, nil
}
func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *regexp.Regexp, lineNo, lineOffset, seekPosition int64) *SearchResult {
reader := &newlineCounter{RuneReader: bufio.NewReader(readCloser)}
positions := pattern.FindReaderSubmatchIndex(reader)
if len(positions) == 0 {
// no matches found
return nil
}
index := pattern.SubexpIndex("value")
var indexOffset int
if index != -1 {
// there is a capture group, use the capture group selection as the secret value. To do this we want to
// use the position at the discovered offset. Note: all positions come in pairs, so you will need to adjust
// the offset accordingly (multiply by 2).
indexOffset = index * 2
}
// get the start and stop of the secret value. Note: this covers both when there is a capture group
// and when there is not a capture group (full value match)
start, stop := int64(positions[indexOffset]), int64(positions[indexOffset+1])
if start < 0 || stop < 0 {
// no match location found. This can happen when there is a value capture group specified by the user
// and there was a match on the overall regex, but not for the capture group (which is possible if the capture
// group is optional).
return nil
}
// lineNoOfSecret are the number of lines which occur before the start of the secret value
var lineNoOfSecret = lineNo + int64(reader.newlinesBefore(start))
// lineOffsetOfSecret are the number of bytes that occur after the last newline but before the secret value.
var lineOffsetOfSecret = start - reader.newlinePositionBefore(start)
if lineNoOfSecret == lineNo {
// the secret value starts in the same line as the overall match, so we must consider that line offset
lineOffsetOfSecret += lineOffset
}
return &SearchResult{
Classification: name,
SeekPosition: start + seekPosition,
Length: stop - start,
LineNumber: lineNoOfSecret,
LineOffset: lineOffsetOfSecret,
}
}

View File

@ -0,0 +1,14 @@
# these should be matches
apikey=12345A7a901b34567890
api_key =12345A7a901b345678901234567890
API-KEY= '12345A7a901b3456789012345678901234567890'
API-key: "12345A7a901b34567890123456789012345678901234567890"
some_ApI-kEy = "12345A7a901b345678901234567890123456789012345678901234567890"
# these should be non matches
api_key = "toolong12345A7a901b345678901234567890123456789012345678901234567890"
api_key = "tooshort"
not_api_k3y = "badkeyname12345A7a901b34567890"
# value at EOF should match
api_key=11111111111111111111

View File

@ -0,0 +1,3 @@
# note: these are NOT real credentials
export AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE
export AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY

View File

@ -0,0 +1,4 @@
# note: these are NOT real credentials
[default]
aws_access_key_id=AKIAIOSFODNN7EXAMPLE
aws_secret_access_key=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY

View File

@ -0,0 +1,10 @@
{
"experimental" : "disabled",
"auths" : {
"https://index.docker.io/v1/" : {
"auth": "tOpsyKreTz"
}
},
"stackOrchestrator" : "swarm",
"credsStore" : "desktop"
}

View File

@ -0,0 +1,4 @@
{
"endpoint" : "http://somewhere",
"auth" : "basic"
}

View File

@ -0,0 +1 @@
-----BEGIN OPENSSL PRIVATE KEY-----

View File

@ -0,0 +1,9 @@
# note: this is NOT a real private key
-----BEGIN OPENSSL PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
-----END OPENSSL PRIVATE KEY-----

View File

@ -0,0 +1,10 @@
# note: this is NOT a real private key
-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
-----END PRIVATE KEY-----
other embedded text

View File

@ -0,0 +1,16 @@
-----BEGIN OPENSSL PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
-----END OPENSSL PRIVATE KEY-----
-----BEGIN OPENSSL PRIVATE KEY-----
MIIEvgTHISISNOTAREALKEYoIBAQDBj08DBj08DBj08DBj08DBj08DBsp5++4an3
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgY5
VQQDDBcqLmF3cy10ZXN0SISNOTAREALKEYoIBAQDBj08DfffKoZIhvcNAQEBBQA7
bml6SISNOTAREALKEYoIBAQDBj08DdssBggrBgEFBQcBAQSBkzCBkDBNBggrBgE8
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmd1
j4f668YfhUbKdRF6S6734856
-----END OPENSSL PRIVATE KEY-----

View File

@ -0,0 +1,6 @@
secret_key=clear_text1
other text that should be ignored
secret_key=clear_text2
secret_key=clear_text3
also things that should be ignored
secret_key=clear_text4

View File

@ -0,0 +1,4 @@
other text that should be ignored
secret_key=clear_text
---secret_key=clear_text
also things that should be ignored

View File

@ -35,7 +35,7 @@ import (
func CatalogPackages(src source.Source, scope source.Scope) (*pkg.Catalog, *distro.Distro, error) { func CatalogPackages(src source.Source, scope source.Scope) (*pkg.Catalog, *distro.Distro, error) {
resolver, err := src.FileResolver(scope) resolver, err := src.FileResolver(scope)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("unable to determine FileResolver while cataloging packages: %w", err) return nil, nil, fmt.Errorf("unable to determine resolver while cataloging packages: %w", err)
} }
// find the distro // find the distro

View File

@ -54,7 +54,7 @@ func Catalog(resolver source.FileResolver, theDistro *distro.Distro, catalogers
catalogedPackages := len(packages) catalogedPackages := len(packages)
log.Debugf("cataloger '%s' discovered '%d' packages", theCataloger.Name(), catalogedPackages) log.Debugf("package cataloger %q discovered %d packages", theCataloger.Name(), catalogedPackages)
packagesDiscovered.N += int64(catalogedPackages) packagesDiscovered.N += int64(catalogedPackages)
for _, p := range packages { for _, p := range packages {

View File

@ -12,6 +12,7 @@ type FileMetadata struct {
UserID int UserID int
GroupID int GroupID int
LinkDestination string LinkDestination string
Size int64
} }
func fileMetadataByLocation(img *image.Image, location Location) (FileMetadata, error) { func fileMetadataByLocation(img *image.Image, location Location) (FileMetadata, error) {
@ -26,5 +27,6 @@ func fileMetadataByLocation(img *image.Image, location Location) (FileMetadata,
UserID: entry.Metadata.UserID, UserID: entry.Metadata.UserID,
GroupID: entry.Metadata.GroupID, GroupID: entry.Metadata.GroupID,
LinkDestination: entry.Metadata.Linkname, LinkDestination: entry.Metadata.Linkname,
Size: entry.Metadata.Size,
}, nil }, nil
} }

View File

@ -118,6 +118,23 @@ func (r MockResolver) AllLocations() <-chan Location {
return results return results
} }
func (r MockResolver) FileMetadataByLocation(Location) (FileMetadata, error) { func (r MockResolver) FileMetadataByLocation(l Location) (FileMetadata, error) {
panic("not implemented") info, err := os.Stat(l.RealPath)
if err != nil {
return FileMetadata{}, err
}
// other types not supported
ty := RegularFile
if info.IsDir() {
ty = Directory
}
return FileMetadata{
Mode: info.Mode(),
Type: ty,
UserID: 0, // not supported
GroupID: 0, // not supported
Size: info.Size(),
}, nil
} }

View File

@ -6,8 +6,6 @@ import (
) )
func TestPowerUserCmdFlags(t *testing.T) { func TestPowerUserCmdFlags(t *testing.T) {
request := "docker-archive:" + getFixtureImage(t, "image-pkg-coverage")
tests := []struct { tests := []struct {
name string name string
args []string args []string
@ -16,14 +14,14 @@ func TestPowerUserCmdFlags(t *testing.T) {
}{ }{
{ {
name: "json-output-flag-fails", name: "json-output-flag-fails",
args: []string{"power-user", "-o", "json", request}, args: []string{"power-user", "-o", "json", "docker-archive:" + getFixtureImage(t, "image-pkg-coverage")},
assertions: []traitAssertion{ assertions: []traitAssertion{
assertFailingReturnCode, assertFailingReturnCode,
}, },
}, },
{ {
name: "default-results", name: "default-results-w-pkg-coverage",
args: []string{"power-user", request}, args: []string{"power-user", "docker-archive:" + getFixtureImage(t, "image-pkg-coverage")},
assertions: []traitAssertion{ assertions: []traitAssertion{
assertNotInOutput(" command is deprecated"), // only the root command should be deprecated assertNotInOutput(" command is deprecated"), // only the root command should be deprecated
assertInOutput(`"type": "RegularFile"`), // proof of file-metadata data assertInOutput(`"type": "RegularFile"`), // proof of file-metadata data
@ -32,6 +30,27 @@ func TestPowerUserCmdFlags(t *testing.T) {
assertSuccessfulReturnCode, assertSuccessfulReturnCode,
}, },
}, },
{
name: "defaut-secrets-results-w-reveal-values",
env: map[string]string{
"SYFT_SECRETS_REVEAL_VALUES": "true",
},
args: []string{"power-user", "docker-archive:" + getFixtureImage(t, "image-secrets")},
assertions: []traitAssertion{
assertInOutput(`"classification": "generic-api-key"`), // proof of the secrets cataloger finding something
assertInOutput(`"12345A7a901b345678901234567890123456789012345678901234567890"`), // proof of the secrets cataloger finding the api key
assertSuccessfulReturnCode,
},
},
{
name: "default-secret-results-dont-reveal-values",
args: []string{"power-user", "docker-archive:" + getFixtureImage(t, "image-secrets")},
assertions: []traitAssertion{
assertInOutput(`"classification": "generic-api-key"`), // proof of the secrets cataloger finding something
assertNotInOutput(`"12345A7a901b345678901234567890123456789012345678901234567890"`), // proof of the secrets cataloger finding the api key
assertSuccessfulReturnCode,
},
},
} }
for _, test := range tests { for _, test := range tests {

View File

@ -0,0 +1,2 @@
FROM scratch
ADD api-key.txt .

View File

@ -0,0 +1 @@
some_ApI-kEy = "12345A7a901b345678901234567890123456789012345678901234567890"

View File

@ -25,7 +25,7 @@ const maxBarWidth = 50
const statusSet = common.SpinnerDotSet // SpinnerCircleOutlineSet const statusSet = common.SpinnerDotSet // SpinnerCircleOutlineSet
const completedStatus = "✔" // "●" const completedStatus = "✔" // "●"
const tileFormat = color.Bold const tileFormat = color.Bold
const statusTitleTemplate = " %s %-28s " const statusTitleTemplate = " %s %-31s "
const interval = 150 * time.Millisecond const interval = 150 * time.Millisecond
var ( var (
@ -270,7 +270,7 @@ func ReadImageHandler(ctx context.Context, fr *frame.Frame, event partybus.Event
// PackageCatalogerStartedHandler periodically writes catalog statistics to a single line. // PackageCatalogerStartedHandler periodically writes catalog statistics to a single line.
func PackageCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, event partybus.Event, wg *sync.WaitGroup) error { func PackageCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, event partybus.Event, wg *sync.WaitGroup) error {
monitor, err := syftEventParsers.ParseCatalogerStarted(event) monitor, err := syftEventParsers.ParsePackageCatalogerStarted(event)
if err != nil { if err != nil {
return fmt.Errorf("bad %s event: %w", event.Type, err) return fmt.Errorf("bad %s event: %w", event.Type, err)
} }
@ -284,7 +284,7 @@ func PackageCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, event
_, spinner := startProcess() _, spinner := startProcess()
stream := progress.StreamMonitors(ctx, []progress.Monitorable{monitor.FilesProcessed, monitor.PackagesDiscovered}, interval) stream := progress.StreamMonitors(ctx, []progress.Monitorable{monitor.FilesProcessed, monitor.PackagesDiscovered}, interval)
title := tileFormat.Sprint("Cataloging image") title := tileFormat.Sprint("Cataloging packages")
formatFn := func(p int64) { formatFn := func(p int64) {
spin := color.Magenta.Sprint(spinner.Next()) spin := color.Magenta.Sprint(spinner.Next())
@ -301,7 +301,7 @@ func PackageCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, event
} }
spin := color.Green.Sprint(completedStatus) spin := color.Green.Sprint(completedStatus)
title = tileFormat.Sprint("Cataloged image") title = tileFormat.Sprint("Cataloged packages")
auxInfo := auxInfoFormat.Sprintf("[%d packages]", monitor.PackagesDiscovered.Current()) auxInfo := auxInfoFormat.Sprintf("[%d packages]", monitor.PackagesDiscovered.Current())
_, _ = io.WriteString(line, fmt.Sprintf(statusTitleTemplate+"%s", spin, title, auxInfo)) _, _ = io.WriteString(line, fmt.Sprintf(statusTitleTemplate+"%s", spin, title, auxInfo))
}() }()
@ -309,6 +309,137 @@ func PackageCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, event
return nil return nil
} }
// SecretsCatalogerStartedHandler shows the intermittent secrets searching progress.
// nolint:dupl
func SecretsCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, event partybus.Event, wg *sync.WaitGroup) error {
prog, err := syftEventParsers.ParseSecretsCatalogingStarted(event)
if err != nil {
return fmt.Errorf("bad %s event: %w", event.Type, err)
}
line, err := fr.Append()
if err != nil {
return err
}
wg.Add(1)
formatter, spinner := startProcess()
stream := progress.Stream(ctx, prog, interval)
title := tileFormat.Sprint("Cataloging secrets")
formatFn := func(p progress.Progress) {
progStr, err := formatter.Format(p)
spin := color.Magenta.Sprint(spinner.Next())
if err != nil {
_, _ = io.WriteString(line, fmt.Sprintf("Error: %+v", err))
} else {
auxInfo := auxInfoFormat.Sprintf("[%d secrets]", prog.SecretsDiscovered.Current())
_, _ = io.WriteString(line, fmt.Sprintf(statusTitleTemplate+"%s %s", spin, title, progStr, auxInfo))
}
}
go func() {
defer wg.Done()
formatFn(progress.Progress{})
for p := range stream {
formatFn(p)
}
spin := color.Green.Sprint(completedStatus)
title = tileFormat.Sprint("Cataloged secrets")
auxInfo := auxInfoFormat.Sprintf("[%d secrets]", prog.SecretsDiscovered.Current())
_, _ = io.WriteString(line, fmt.Sprintf(statusTitleTemplate+"%s", spin, title, auxInfo))
}()
return err
}
// FileMetadataCatalogerStartedHandler shows the intermittent secrets searching progress.
// nolint:dupl
func FileMetadataCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, event partybus.Event, wg *sync.WaitGroup) error {
prog, err := syftEventParsers.ParseFileMetadataCatalogingStarted(event)
if err != nil {
return fmt.Errorf("bad %s event: %w", event.Type, err)
}
line, err := fr.Append()
if err != nil {
return err
}
wg.Add(1)
formatter, spinner := startProcess()
stream := progress.Stream(ctx, prog, interval)
title := tileFormat.Sprint("Cataloging file metadata")
formatFn := func(p progress.Progress) {
progStr, err := formatter.Format(p)
spin := color.Magenta.Sprint(spinner.Next())
if err != nil {
_, _ = io.WriteString(line, fmt.Sprintf("Error: %+v", err))
} else {
_, _ = io.WriteString(line, fmt.Sprintf(statusTitleTemplate+"%s", spin, title, progStr))
}
}
go func() {
defer wg.Done()
formatFn(progress.Progress{})
for p := range stream {
formatFn(p)
}
spin := color.Green.Sprint(completedStatus)
title = tileFormat.Sprint("Cataloged file metadata")
_, _ = io.WriteString(line, fmt.Sprintf(statusTitleTemplate, spin, title))
}()
return err
}
// FileMetadataCatalogerStartedHandler shows the intermittent secrets searching progress.
// nolint:dupl
func FileDigestsCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, event partybus.Event, wg *sync.WaitGroup) error {
prog, err := syftEventParsers.ParseFileDigestsCatalogingStarted(event)
if err != nil {
return fmt.Errorf("bad %s event: %w", event.Type, err)
}
line, err := fr.Append()
if err != nil {
return err
}
wg.Add(1)
formatter, spinner := startProcess()
stream := progress.Stream(ctx, prog, interval)
title := tileFormat.Sprint("Cataloging file digests")
formatFn := func(p progress.Progress) {
progStr, err := formatter.Format(p)
spin := color.Magenta.Sprint(spinner.Next())
if err != nil {
_, _ = io.WriteString(line, fmt.Sprintf("Error: %+v", err))
} else {
_, _ = io.WriteString(line, fmt.Sprintf(statusTitleTemplate+"%s", spin, title, progStr))
}
}
go func() {
defer wg.Done()
formatFn(progress.Progress{})
for p := range stream {
formatFn(p)
}
spin := color.Green.Sprint(completedStatus)
title = tileFormat.Sprint("Cataloged file digests")
_, _ = io.WriteString(line, fmt.Sprintf(statusTitleTemplate, spin, title))
}()
return err
}
// ImportStartedHandler shows the intermittent upload progress to Anchore Enterprise. // ImportStartedHandler shows the intermittent upload progress to Anchore Enterprise.
// nolint:dupl // nolint:dupl
func ImportStartedHandler(ctx context.Context, fr *frame.Frame, event partybus.Event, wg *sync.WaitGroup) error { func ImportStartedHandler(ctx context.Context, fr *frame.Frame, event partybus.Event, wg *sync.WaitGroup) error {

View File

@ -27,7 +27,7 @@ func NewHandler() *Handler {
// RespondsTo indicates if the handler is capable of handling the given event. // RespondsTo indicates if the handler is capable of handling the given event.
func (r *Handler) RespondsTo(event partybus.Event) bool { func (r *Handler) RespondsTo(event partybus.Event) bool {
switch event.Type { switch event.Type {
case stereoscopeEvent.PullDockerImage, stereoscopeEvent.ReadImage, stereoscopeEvent.FetchImage, syftEvent.PackageCatalogerStarted, syftEvent.ImportStarted: case stereoscopeEvent.PullDockerImage, stereoscopeEvent.ReadImage, stereoscopeEvent.FetchImage, syftEvent.PackageCatalogerStarted, syftEvent.SecretsCatalogerStarted, syftEvent.FileDigestsCatalogerStarted, syftEvent.FileMetadataCatalogerStarted, syftEvent.ImportStarted:
return true return true
default: default:
return false return false
@ -49,6 +49,15 @@ func (r *Handler) Handle(ctx context.Context, fr *frame.Frame, event partybus.Ev
case syftEvent.PackageCatalogerStarted: case syftEvent.PackageCatalogerStarted:
return PackageCatalogerStartedHandler(ctx, fr, event, wg) return PackageCatalogerStartedHandler(ctx, fr, event, wg)
case syftEvent.SecretsCatalogerStarted:
return SecretsCatalogerStartedHandler(ctx, fr, event, wg)
case syftEvent.FileDigestsCatalogerStarted:
return FileDigestsCatalogerStartedHandler(ctx, fr, event, wg)
case syftEvent.FileMetadataCatalogerStarted:
return FileMetadataCatalogerStartedHandler(ctx, fr, event, wg)
case syftEvent.ImportStarted: case syftEvent.ImportStarted:
return ImportStartedHandler(ctx, fr, event, wg) return ImportStartedHandler(ctx, fr, event, wg)
} }