Remove the power-user command and related catalogers (#2306)

* remove the power-user command

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* remove secrets + classifier catalogers

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* bump json schema

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

* regenerate json schema

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>

---------

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
This commit is contained in:
Alex Goodman 2023-11-20 10:44:28 -05:00 committed by GitHub
parent 1676934c63
commit 5565bdef0c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
41 changed files with 2182 additions and 1677 deletions

View File

@ -84,8 +84,6 @@ They are registered in `syft/cli/commands/go`.
│ ├── options/ │ ├── options/
│ ├── packages/ │ ├── packages/
│ ├── packages.go │ ├── packages.go
│ ├── poweruser/
│ ├── poweruser.go
│ └── version.go │ └── version.go
└── main.go └── main.go
``` ```

View File

@ -656,14 +656,13 @@ python:
# when given an arbitrary constraint will be used (even if that version may not be available/published). # when given an arbitrary constraint will be used (even if that version may not be available/published).
guess-unpinned-requirements: false guess-unpinned-requirements: false
# cataloging file contents is exposed through the power-user subcommand
file-contents: file-contents:
cataloger: cataloger:
# enable/disable cataloging of secrets # enable/disable cataloging of file contents
# SYFT_FILE_CONTENTS_CATALOGER_ENABLED env var # SYFT_FILE_CONTENTS_CATALOGER_ENABLED env var
enabled: true enabled: true
# the search space to look for secrets (options: all-layers, squashed) # the search space to look for file contents (options: all-layers, squashed)
# SYFT_FILE_CONTENTS_CATALOGER_SCOPE env var # SYFT_FILE_CONTENTS_CATALOGER_SCOPE env var
scope: "squashed" scope: "squashed"
@ -675,7 +674,6 @@ file-contents:
# SYFT_FILE_CONTENTS_GLOBS env var # SYFT_FILE_CONTENTS_GLOBS env var
globs: [] globs: []
# cataloging file metadata is exposed through the power-user subcommand
file-metadata: file-metadata:
cataloger: cataloger:
# enable/disable cataloging of file metadata # enable/disable cataloging of file metadata
@ -693,37 +691,6 @@ file-metadata:
# maximum number of workers used to process the list of package catalogers in parallel # maximum number of workers used to process the list of package catalogers in parallel
parallelism: 1 parallelism: 1
# cataloging secrets is exposed through the power-user subcommand
secrets:
cataloger:
# enable/disable cataloging of secrets
# SYFT_SECRETS_CATALOGER_ENABLED env var
enabled: true
# the search space to look for secrets (options: all-layers, squashed)
# SYFT_SECRETS_CATALOGER_SCOPE env var
scope: "all-layers"
# show extracted secret values in the final JSON report
# SYFT_SECRETS_REVEAL_VALUES env var
reveal-values: false
# skip searching a file entirely if it is above the given size (default = 1MB; unit = bytes)
# SYFT_SECRETS_SKIP_FILES_ABOVE_SIZE env var
skip-files-above-size: 1048576
# name-regex pairs to consider when searching files for secrets. Note: the regex must match single line patterns
# but may also have OPTIONAL multiline capture groups. Regexes with a named capture group of "value" will
# use the entire regex to match, but the secret value will be assumed to be entirely contained within the
# "value" named capture group.
additional-patterns: {}
# names to exclude from the secrets search, valid values are: "aws-access-key", "aws-secret-key", "pem-private-key",
# "docker-config-auth", and "generic-api-key". Note: this does not consider any names introduced in the
# "secrets.additional-patterns" config option.
# SYFT_SECRETS_EXCLUDE_PATTERN_NAMES env var
exclude-pattern-names: []
# options that apply to all scan sources # options that apply to all scan sources
source: source:
# alias name for the source # alias name for the source

View File

@ -17,9 +17,8 @@ import (
"github.com/anchore/syft/internal/redact" "github.com/anchore/syft/internal/redact"
) )
// Application constructs the `syft packages` command, aliases the root command to `syft packages`, // Application constructs the `syft packages` command and aliases the root command to `syft packages`.
// and constructs the `syft power-user` command. It is also responsible for // It is also responsible for organizing flag usage and injecting the application config for each command.
// organizing flag usage and injecting the application config for each command.
// It also constructs the syft attest command and the syft version command. // It also constructs the syft attest command and the syft version command.
// `RunE` is the earliest that the complete application configuration can be loaded. // `RunE` is the earliest that the complete application configuration can be loaded.
func Application(id clio.Identification) clio.Application { func Application(id clio.Identification) clio.Application {
@ -86,7 +85,6 @@ func create(id clio.Identification, out io.Writer) (clio.Application, *cobra.Com
// add sub-commands // add sub-commands
rootCmd.AddCommand( rootCmd.AddCommand(
packagesCmd, packagesCmd,
commands.PowerUser(app),
commands.Attest(app), commands.Attest(app),
commands.Convert(app), commands.Convert(app),
clio.VersionCommand(id), clio.VersionCommand(id),

View File

@ -12,6 +12,7 @@ import (
"github.com/anchore/syft/cmd/syft/cli/options" "github.com/anchore/syft/cmd/syft/cli/options"
"github.com/anchore/syft/cmd/syft/internal/ui" "github.com/anchore/syft/cmd/syft/internal/ui"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/file" "github.com/anchore/syft/internal/file"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
@ -157,6 +158,9 @@ func runPackages(id clio.Identification, opts *packagesOptions, userInput string
) )
if err != nil { if err != nil {
if userInput == "power-user" {
bus.Notify("Note: the 'power-user' command has been removed.")
}
return fmt.Errorf("failed to construct source from user input %q: %w", userInput, err) return fmt.Errorf("failed to construct source from user input %q: %w", userInput, err)
} }

View File

@ -1,161 +0,0 @@
package commands
import (
"fmt"
"os"
"github.com/gookit/color"
"github.com/hashicorp/go-multierror"
"github.com/spf13/cobra"
"github.com/anchore/clio"
"github.com/anchore/stereoscope/pkg/image"
"github.com/anchore/syft/cmd/syft/cli/eventloop"
"github.com/anchore/syft/cmd/syft/cli/options"
"github.com/anchore/syft/cmd/syft/internal/ui"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/format/syftjson"
"github.com/anchore/syft/syft/sbom"
"github.com/anchore/syft/syft/source"
)
const powerUserExample = ` {{.appName}} {{.command}} <image>
DEPRECATED - THIS COMMAND WILL BE REMOVED in v1.0.0
Template outputs are not supported.
All behavior is controlled via application configuration and environment variables (see https://github.com/anchore/syft#configuration)
`
type powerUserOptions struct {
options.Config `yaml:",inline" mapstructure:",squash"`
options.OutputFile `yaml:",inline" mapstructure:",squash"`
options.UpdateCheck `yaml:",inline" mapstructure:",squash"`
options.Catalog `yaml:",inline" mapstructure:",squash"`
}
func PowerUser(app clio.Application) *cobra.Command {
id := app.ID()
pkgs := options.DefaultCatalog()
pkgs.Secrets.Cataloger.Enabled = true
pkgs.FileMetadata.Cataloger.Enabled = true
pkgs.FileContents.Cataloger.Enabled = true
pkgs.FileClassification.Cataloger.Enabled = true
opts := &powerUserOptions{
Catalog: pkgs,
OutputFile: options.OutputFile{ // nolint:staticcheck
Enabled: true,
},
}
return app.SetupCommand(&cobra.Command{
Use: "power-user [IMAGE]",
Short: "Run bulk operations on container images",
Example: internal.Tprintf(powerUserExample, map[string]interface{}{
"appName": id.Name,
"command": "power-user",
}),
Args: validatePackagesArgs,
Hidden: true,
PreRunE: applicationUpdateCheck(id, &opts.UpdateCheck),
RunE: func(cmd *cobra.Command, args []string) error {
restoreStdout := ui.CaptureStdoutToTraceLog()
defer restoreStdout()
return runPowerUser(id, opts, args[0])
},
}, opts)
}
//nolint:funlen
func runPowerUser(id clio.Identification, opts *powerUserOptions, userInput string) error {
writer, err := opts.SBOMWriter(syftjson.NewFormatEncoder())
if err != nil {
return err
}
defer func() {
// inform user at end of run that command will be removed
deprecated := color.Style{color.Red, color.OpBold}.Sprint("DEPRECATED: This command will be removed in v1.0.0")
fmt.Fprintln(os.Stderr, deprecated)
}()
tasks, err := eventloop.Tasks(&opts.Catalog)
if err != nil {
return err
}
detection, err := source.Detect(
userInput,
source.DetectConfig{
DefaultImageSource: opts.DefaultImagePullSource,
},
)
if err != nil {
return fmt.Errorf("could not deteremine source: %w", err)
}
var platform *image.Platform
if opts.Platform != "" {
platform, err = image.NewPlatform(opts.Platform)
if err != nil {
return fmt.Errorf("invalid platform: %w", err)
}
}
src, err := detection.NewSource(
source.DetectionSourceConfig{
Alias: source.Alias{
Name: opts.Source.Name,
Version: opts.Source.Version,
},
RegistryOptions: opts.Registry.ToOptions(),
Platform: platform,
Exclude: source.ExcludeConfig{
Paths: opts.Exclusions,
},
DigestAlgorithms: nil,
BasePath: opts.BasePath,
},
)
if src != nil {
defer src.Close()
}
if err != nil {
return fmt.Errorf("failed to construct source from user input %q: %w", userInput, err)
}
s := sbom.SBOM{
Source: src.Describe(),
Descriptor: sbom.Descriptor{
Name: id.Name,
Version: id.Version,
Configuration: opts,
},
}
var errs error
var relationships []<-chan artifact.Relationship
for _, task := range tasks {
c := make(chan artifact.Relationship)
relationships = append(relationships, c)
go func(task eventloop.Task) {
err := eventloop.RunTask(task, &s.Artifacts, src, c)
errs = multierror.Append(errs, err)
}(task)
}
if errs != nil {
return errs
}
s.Relationships = append(s.Relationships, mergeRelationships(relationships...)...)
if err := writer.Write(s); err != nil {
return fmt.Errorf("failed to write sbom: %w", err)
}
return nil
}

View File

@ -8,7 +8,6 @@ import (
"github.com/anchore/syft/syft/file/cataloger/filecontent" "github.com/anchore/syft/syft/file/cataloger/filecontent"
"github.com/anchore/syft/syft/file/cataloger/filedigest" "github.com/anchore/syft/syft/file/cataloger/filedigest"
"github.com/anchore/syft/syft/file/cataloger/filemetadata" "github.com/anchore/syft/syft/file/cataloger/filemetadata"
"github.com/anchore/syft/syft/file/cataloger/secrets"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
@ -22,7 +21,6 @@ func Tasks(opts *options.Catalog) ([]Task, error) {
generateCatalogPackagesTask, generateCatalogPackagesTask,
generateCatalogFileMetadataTask, generateCatalogFileMetadataTask,
generateCatalogFileDigestsTask, generateCatalogFileDigestsTask,
generateCatalogSecretsTask,
generateCatalogContentsTask, generateCatalogContentsTask,
} }
@ -110,38 +108,6 @@ func generateCatalogFileDigestsTask(opts *options.Catalog) (Task, error) {
return task, nil return task, nil
} }
func generateCatalogSecretsTask(opts *options.Catalog) (Task, error) {
if !opts.Secrets.Cataloger.Enabled {
return nil, nil
}
patterns, err := secrets.GenerateSearchPatterns(secrets.DefaultSecretsPatterns, opts.Secrets.AdditionalPatterns, opts.Secrets.ExcludePatternNames)
if err != nil {
return nil, err
}
secretsCataloger, err := secrets.NewCataloger(patterns, opts.Secrets.RevealValues, opts.Secrets.SkipFilesAboveSize) //nolint:staticcheck
if err != nil {
return nil, err
}
task := func(results *sbom.Artifacts, src source.Source) ([]artifact.Relationship, error) {
resolver, err := src.FileResolver(opts.Secrets.Cataloger.GetScope())
if err != nil {
return nil, err
}
result, err := secretsCataloger.Catalog(resolver)
if err != nil {
return nil, err
}
results.Secrets = result
return nil, nil
}
return task, nil
}
func generateCatalogContentsTask(opts *options.Catalog) (Task, error) { func generateCatalogContentsTask(opts *options.Catalog) (Task, error) {
if !opts.FileContents.Cataloger.Enabled { if !opts.FileContents.Cataloger.Enabled {
return nil, nil return nil, nil

View File

@ -28,9 +28,7 @@ type Catalog struct {
LinuxKernel linuxKernel `yaml:"linux-kernel" json:"linux-kernel" mapstructure:"linux-kernel"` LinuxKernel linuxKernel `yaml:"linux-kernel" json:"linux-kernel" mapstructure:"linux-kernel"`
Python python `yaml:"python" json:"python" mapstructure:"python"` Python python `yaml:"python" json:"python" mapstructure:"python"`
FileMetadata fileMetadata `yaml:"file-metadata" json:"file-metadata" mapstructure:"file-metadata"` FileMetadata fileMetadata `yaml:"file-metadata" json:"file-metadata" mapstructure:"file-metadata"`
FileClassification fileClassification `yaml:"file-classification" json:"file-classification" mapstructure:"file-classification"`
FileContents fileContents `yaml:"file-contents" json:"file-contents" mapstructure:"file-contents"` FileContents fileContents `yaml:"file-contents" json:"file-contents" mapstructure:"file-contents"`
Secrets secrets `yaml:"secrets" json:"secrets" mapstructure:"secrets"`
Registry registry `yaml:"registry" json:"registry" mapstructure:"registry"` Registry registry `yaml:"registry" json:"registry" mapstructure:"registry"`
Exclusions []string `yaml:"exclude" json:"exclude" mapstructure:"exclude"` Exclusions []string `yaml:"exclude" json:"exclude" mapstructure:"exclude"`
Platform string `yaml:"platform" json:"platform" mapstructure:"platform"` Platform string `yaml:"platform" json:"platform" mapstructure:"platform"`
@ -52,9 +50,7 @@ func DefaultCatalog() Catalog {
Package: defaultPkg(), Package: defaultPkg(),
LinuxKernel: defaultLinuxKernel(), LinuxKernel: defaultLinuxKernel(),
FileMetadata: defaultFileMetadata(), FileMetadata: defaultFileMetadata(),
FileClassification: defaultFileClassification(),
FileContents: defaultFileContents(), FileContents: defaultFileContents(),
Secrets: defaultSecrets(),
Source: defaultSourceCfg(), Source: defaultSourceCfg(),
Parallelism: 1, Parallelism: 1,
ExcludeBinaryOverlapByOwnership: true, ExcludeBinaryOverlapByOwnership: true,

View File

@ -1,17 +0,0 @@
package options
import (
"github.com/anchore/syft/syft/source"
)
type fileClassification struct {
Cataloger scope `yaml:"cataloger" json:"cataloger" mapstructure:"cataloger"`
}
func defaultFileClassification() fileClassification {
return fileClassification{
Cataloger: scope{
Scope: source.SquashedScope.String(),
},
}
}

View File

@ -1,23 +0,0 @@
package options
import (
"github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/source"
)
type secrets struct {
Cataloger scope `yaml:"cataloger" json:"cataloger" mapstructure:"cataloger"`
AdditionalPatterns map[string]string `yaml:"additional-patterns" json:"additional-patterns" mapstructure:"additional-patterns"`
ExcludePatternNames []string `yaml:"exclude-pattern-names" json:"exclude-pattern-names" mapstructure:"exclude-pattern-names"`
RevealValues bool `yaml:"reveal-values" json:"reveal-values" mapstructure:"reveal-values"`
SkipFilesAboveSize int64 `yaml:"skip-files-above-size" json:"skip-files-above-size" mapstructure:"skip-files-above-size"`
}
func defaultSecrets() secrets {
return secrets{
Cataloger: scope{
Scope: source.AllLayersScope.String(),
},
SkipFilesAboveSize: 1 * file.MB,
}
}

View File

@ -1,57 +0,0 @@
package ui
import (
"fmt"
tea "github.com/charmbracelet/bubbletea"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
"github.com/anchore/bubbly/bubbles/taskprogress"
"github.com/anchore/syft/internal/log"
syftEventParsers "github.com/anchore/syft/syft/event/parsers"
"github.com/anchore/syft/syft/file/cataloger/secrets"
)
var _ progress.StagedProgressable = (*secretsCatalogerProgressAdapter)(nil)
// Deprecated: will be removed in syft 1.0
type secretsCatalogerProgressAdapter struct {
*secrets.Monitor
}
// Deprecated: will be removed in syft 1.0
func newSecretsCatalogerProgressAdapter(monitor *secrets.Monitor) secretsCatalogerProgressAdapter {
return secretsCatalogerProgressAdapter{
Monitor: monitor,
}
}
func (s secretsCatalogerProgressAdapter) Stage() string {
return fmt.Sprintf("%d secrets", s.Monitor.SecretsDiscovered.Current())
}
// Deprecated: will be removed in syft 1.0
func (m *Handler) handleSecretsCatalogerStarted(e partybus.Event) []tea.Model {
mon, err := syftEventParsers.ParseSecretsCatalogingStarted(e)
if err != nil {
log.WithFields("error", err).Warn("unable to parse event")
return nil
}
tsk := m.newTaskProgress(
taskprogress.Title{
Default: "Catalog secrets",
Running: "Cataloging secrets",
Success: "Cataloged secrets",
},
taskprogress.WithStagedProgressable(
newSecretsCatalogerProgressAdapter(mon),
),
)
tsk.HideStageOnSuccess = false
return []tea.Model{tsk}
}

View File

@ -1,96 +0,0 @@
package ui
import (
"testing"
"time"
tea "github.com/charmbracelet/bubbletea"
"github.com/gkampitakis/go-snaps/snaps"
"github.com/stretchr/testify/require"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
"github.com/anchore/bubbly/bubbles/taskprogress"
syftEvent "github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/file/cataloger/secrets"
)
func TestHandler_handleSecretsCatalogerStarted(t *testing.T) {
tests := []struct {
name string
eventFn func(*testing.T) partybus.Event
iterations int
}{
{
name: "cataloging in progress",
eventFn: func(t *testing.T) partybus.Event {
stage := &progress.Stage{
Current: "current",
}
secretsDiscovered := progress.NewManual(-1)
secretsDiscovered.Set(64)
prog := progress.NewManual(72)
prog.Set(50)
return partybus.Event{
Type: syftEvent.SecretsCatalogerStarted,
Source: secretsDiscovered,
Value: secrets.Monitor{
Stager: progress.Stager(stage),
SecretsDiscovered: secretsDiscovered,
Progressable: prog,
},
}
},
},
{
name: "cataloging complete",
eventFn: func(t *testing.T) partybus.Event {
stage := &progress.Stage{
Current: "current",
}
secretsDiscovered := progress.NewManual(-1)
secretsDiscovered.Set(64)
prog := progress.NewManual(72)
prog.Set(72)
prog.SetCompleted()
return partybus.Event{
Type: syftEvent.SecretsCatalogerStarted,
Source: secretsDiscovered,
Value: secrets.Monitor{
Stager: progress.Stager(stage),
SecretsDiscovered: secretsDiscovered,
Progressable: prog,
},
}
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
event := tt.eventFn(t)
handler := New(DefaultHandlerConfig())
handler.WindowSize = tea.WindowSizeMsg{
Width: 100,
Height: 80,
}
models := handler.Handle(event)
require.Len(t, models, 1)
model := models[0]
tsk, ok := model.(taskprogress.Model)
require.True(t, ok)
got := runModel(t, tsk, tt.iterations, taskprogress.TickMsg{
Time: time.Now(),
Sequence: tsk.Sequence(),
ID: tsk.ID(),
})
t.Log(got)
snaps.MatchSnapshot(t, got)
})
}
}

View File

@ -58,9 +58,6 @@ func New(cfg HandlerConfig) *Handler {
syftEvent.FileIndexingStarted: h.handleFileIndexingStarted, syftEvent.FileIndexingStarted: h.handleFileIndexingStarted,
syftEvent.AttestationStarted: h.handleAttestationStarted, syftEvent.AttestationStarted: h.handleAttestationStarted,
syftEvent.CatalogerTaskStarted: h.handleCatalogerTaskStarted, syftEvent.CatalogerTaskStarted: h.handleCatalogerTaskStarted,
// deprecated
syftEvent.SecretsCatalogerStarted: h.handleSecretsCatalogerStarted,
}) })
return h return h

View File

@ -3,5 +3,5 @@ package internal
const ( const (
// JSONSchemaVersion is the current schema version output by the JSON encoder // JSONSchemaVersion is the current schema version output by the JSON encoder
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment. // This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
JSONSchemaVersion = "12.0.1" JSONSchemaVersion = "13.0.0"
) )

View File

@ -1,6 +1,6 @@
# JSON Schema # JSON Schema
This is the JSON schema for output from the JSON presenters (`syft packages <img> -o json` and `syft power-user <img>`). The required inputs for defining the JSON schema are as follows: This is the JSON schema for output from the JSON presenters (`syft packages <img> -o json`). The required inputs for defining the JSON schema are as follows:
- the value of `internal.JSONSchemaVersion` that governs the schema filename - the value of `internal.JSONSchemaVersion` that governs the schema filename
- the `Document` struct definition within `github.com/anchore/syft/syft/formats/syftjson/model/document.go` that governs the overall document shape - the `Document` struct definition within `github.com/anchore/syft/syft/formats/syftjson/model/document.go` that governs the overall document shape

File diff suppressed because it is too large Load Diff

View File

@ -17,10 +17,6 @@ const (
// PackageCatalogerStarted is a partybus event that occurs when the package cataloging has begun // PackageCatalogerStarted is a partybus event that occurs when the package cataloging has begun
PackageCatalogerStarted partybus.EventType = typePrefix + "-package-cataloger-started-event" PackageCatalogerStarted partybus.EventType = typePrefix + "-package-cataloger-started-event"
//nolint:gosec
// SecretsCatalogerStarted is a partybus event that occurs when the secrets cataloging has begun
SecretsCatalogerStarted partybus.EventType = typePrefix + "-secrets-cataloger-started-event"
// FileMetadataCatalogerStarted is a partybus event that occurs when the file metadata cataloging has begun // FileMetadataCatalogerStarted is a partybus event that occurs when the file metadata cataloging has begun
FileMetadataCatalogerStarted partybus.EventType = typePrefix + "-file-metadata-cataloger-started-event" FileMetadataCatalogerStarted partybus.EventType = typePrefix + "-file-metadata-cataloger-started-event"

View File

@ -12,7 +12,6 @@ import (
"github.com/anchore/syft/syft/event" "github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/event/monitor" "github.com/anchore/syft/syft/event/monitor"
"github.com/anchore/syft/syft/file/cataloger/secrets"
"github.com/anchore/syft/syft/pkg/cataloger" "github.com/anchore/syft/syft/pkg/cataloger"
) )
@ -54,19 +53,6 @@ func ParsePackageCatalogerStarted(e partybus.Event) (*cataloger.Monitor, error)
return &monitor, nil return &monitor, nil
} }
func ParseSecretsCatalogingStarted(e partybus.Event) (*secrets.Monitor, error) {
if err := checkEventType(e.Type, event.SecretsCatalogerStarted); err != nil {
return nil, err
}
monitor, ok := e.Value.(secrets.Monitor)
if !ok {
return nil, newPayloadErr(e.Type, "Value", e.Value)
}
return &monitor, nil
}
func ParseFileMetadataCatalogingStarted(e partybus.Event) (progress.StagedProgressable, error) { func ParseFileMetadataCatalogingStarted(e partybus.Event) (progress.StagedProgressable, error) {
if err := checkEventType(e.Type, event.FileMetadataCatalogerStarted); err != nil { if err := checkEventType(e.Type, event.FileMetadataCatalogerStarted); err != nil {
return nil, err return nil, err

View File

@ -11,13 +11,11 @@ import (
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
) )
// Deprecated: will be removed in syft v1.0.0
type Cataloger struct { type Cataloger struct {
globs []string globs []string
skipFilesAboveSizeInBytes int64 skipFilesAboveSizeInBytes int64
} }
// Deprecated: will be removed in syft v1.0.0
func NewCataloger(globs []string, skipFilesAboveSize int64) (*Cataloger, error) { func NewCataloger(globs []string, skipFilesAboveSize int64) (*Cataloger, error) {
return &Cataloger{ return &Cataloger{
globs: globs, globs: globs,

View File

@ -1,158 +0,0 @@
package secrets
import (
"bytes"
"fmt"
"io"
"regexp"
"sort"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/file"
internal2 "github.com/anchore/syft/syft/file/cataloger/internal"
)
var DefaultSecretsPatterns = map[string]string{
"aws-access-key": `(?i)aws_access_key_id["'=:\s]*?(?P<value>(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16})`,
"aws-secret-key": `(?i)aws_secret_access_key["'=:\s]*?(?P<value>[0-9a-zA-Z/+]{40})`,
"pem-private-key": `-----BEGIN (\S+ )?PRIVATE KEY(\sBLOCK)?-----((?P<value>(\n.*?)+)-----END (\S+ )?PRIVATE KEY(\sBLOCK)?-----)?`,
"docker-config-auth": `"auths"((.*\n)*.*?"auth"\s*:\s*"(?P<value>[^"]+)")?`,
"generic-api-key": `(?i)api(-|_)?key["'=:\s]*?(?P<value>[A-Z0-9]{20,60})["']?(\s|$)`,
}
// Deprecated: will be removed in syft v1.0.0
type Cataloger struct {
patterns map[string]*regexp.Regexp
revealValues bool
skipFilesAboveSize int64
}
// Deprecated: will be removed in syft v1.0.0
func NewCataloger(patterns map[string]*regexp.Regexp, revealValues bool, maxFileSize int64) (*Cataloger, error) {
return &Cataloger{
patterns: patterns,
revealValues: revealValues,
skipFilesAboveSize: maxFileSize,
}, nil
}
func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates][]file.SearchResult, error) {
results := make(map[file.Coordinates][]file.SearchResult)
locations := internal2.AllRegularFiles(resolver)
stage, prog, secretsDiscovered := secretsCatalogingProgress(int64(len(locations)))
for _, location := range locations {
stage.Current = location.RealPath
result, err := i.catalogLocation(resolver, location)
if internal.IsErrPathPermission(err) {
log.Debugf("secrets cataloger skipping - %+v", err)
continue
}
if err != nil {
return nil, err
}
if len(result) > 0 {
secretsDiscovered.Add(int64(len(result)))
results[location.Coordinates] = result
}
prog.Increment()
}
log.Debugf("secrets cataloger discovered %d secrets", secretsDiscovered.Current())
prog.SetCompleted()
return results, nil
}
func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) ([]file.SearchResult, error) {
metadata, err := resolver.FileMetadataByLocation(location)
if err != nil {
return nil, err
}
if metadata.Size() == 0 {
return nil, nil
}
if i.skipFilesAboveSize > 0 && metadata.Size() > i.skipFilesAboveSize {
return nil, nil
}
// TODO: in the future we can swap out search strategies here
secrets, err := catalogLocationByLine(resolver, location, i.patterns)
if err != nil {
return nil, internal.ErrPath{Context: "secrets-cataloger", Path: location.RealPath, Err: err}
}
if i.revealValues {
for idx, secret := range secrets {
value, err := extractValue(resolver, location, secret.SeekPosition, secret.Length)
if err != nil {
return nil, err
}
secrets[idx].Value = value
}
}
// sort by the start location of each secret as it appears in the location
sort.SliceStable(secrets, func(i, j int) bool {
return secrets[i].SeekPosition < secrets[j].SeekPosition
})
return secrets, nil
}
func extractValue(resolver file.Resolver, location file.Location, start, length int64) (string, error) {
readCloser, err := resolver.FileContentsByLocation(location)
if err != nil {
return "", fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
}
defer internal.CloseAndLogError(readCloser, location.AccessPath)
n, err := io.CopyN(io.Discard, readCloser, start)
if err != nil {
return "", fmt.Errorf("unable to read contents for location=%q : %w", location, err)
}
if n != start {
return "", fmt.Errorf("unexpected seek location for location=%q : %d != %d", location, n, start)
}
var buf bytes.Buffer
n, err = io.CopyN(&buf, readCloser, length)
if err != nil {
return "", fmt.Errorf("unable to read secret value for location=%q : %w", location, err)
}
if n != length {
return "", fmt.Errorf("unexpected secret length for location=%q : %d != %d", location, n, length)
}
return buf.String(), nil
}
type Monitor struct {
progress.Stager
SecretsDiscovered progress.Monitorable
progress.Progressable
}
func secretsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manual, *progress.Manual) {
stage := &progress.Stage{}
secretsDiscovered := &progress.Manual{}
prog := progress.NewManual(locations)
bus.Publish(partybus.Event{
Type: event.SecretsCatalogerStarted,
Source: secretsDiscovered,
Value: Monitor{
Stager: progress.Stager(stage),
SecretsDiscovered: secretsDiscovered,
Progressable: prog,
},
})
return stage, prog, secretsDiscovered
}

View File

@ -1,443 +0,0 @@
package secrets
import (
"regexp"
"testing"
"github.com/stretchr/testify/assert"
intFile "github.com/anchore/syft/internal/file"
"github.com/anchore/syft/syft/file"
)
func TestSecretsCataloger(t *testing.T) {
tests := []struct {
name string
fixture string
reveal bool
maxSize int64
patterns map[string]string
expected []file.SearchResult
constructorErr bool
catalogErr bool
}{
{
name: "go-case-find-and-reveal",
fixture: "test-fixtures/secrets/simple.txt",
reveal: true,
patterns: map[string]string{
"simple-secret-key": `^secret_key=.*`,
},
expected: []file.SearchResult{
{
Classification: "simple-secret-key",
LineNumber: 2,
LineOffset: 0,
SeekPosition: 34,
Length: 21,
Value: "secret_key=clear_text",
},
},
},
{
name: "dont-reveal-secret-value",
fixture: "test-fixtures/secrets/simple.txt",
reveal: false,
patterns: map[string]string{
"simple-secret-key": `^secret_key=.*`,
},
expected: []file.SearchResult{
{
Classification: "simple-secret-key",
LineNumber: 2,
LineOffset: 0,
SeekPosition: 34,
Length: 21,
Value: "",
},
},
},
{
name: "reveal-named-capture-group",
fixture: "test-fixtures/secrets/simple.txt",
reveal: true,
patterns: map[string]string{
"simple-secret-key": `^secret_key=(?P<value>.*)`,
},
expected: []file.SearchResult{
{
Classification: "simple-secret-key",
LineNumber: 2,
LineOffset: 11,
SeekPosition: 45,
Length: 10,
Value: "clear_text",
},
},
},
{
name: "multiple-secret-instances",
fixture: "test-fixtures/secrets/multiple.txt",
reveal: true,
patterns: map[string]string{
"simple-secret-key": `secret_key=.*`,
},
expected: []file.SearchResult{
{
Classification: "simple-secret-key",
LineNumber: 1,
LineOffset: 0,
SeekPosition: 0,
Length: 22,
Value: "secret_key=clear_text1",
},
{
Classification: "simple-secret-key",
LineNumber: 3,
LineOffset: 0,
SeekPosition: 57,
Length: 22,
Value: "secret_key=clear_text2",
},
{
Classification: "simple-secret-key",
LineNumber: 4,
// note: this test captures a line offset case
LineOffset: 1,
SeekPosition: 81,
Length: 22,
Value: "secret_key=clear_text3",
},
{
Classification: "simple-secret-key",
LineNumber: 6,
LineOffset: 0,
SeekPosition: 139,
Length: 22,
Value: "secret_key=clear_text4",
},
},
},
{
name: "multiple-secret-instances-with-capture-group",
fixture: "test-fixtures/secrets/multiple.txt",
reveal: true,
patterns: map[string]string{
"simple-secret-key": `secret_key=(?P<value>.*)`,
},
expected: []file.SearchResult{
{
Classification: "simple-secret-key",
LineNumber: 1,
// note: value capture group location
LineOffset: 11,
SeekPosition: 11,
Length: 11,
Value: "clear_text1",
},
{
Classification: "simple-secret-key",
LineNumber: 3,
LineOffset: 11,
SeekPosition: 68,
Length: 11,
Value: "clear_text2",
},
{
Classification: "simple-secret-key",
LineNumber: 4,
// note: value capture group location + offset
LineOffset: 12,
SeekPosition: 92,
Length: 11,
Value: "clear_text3",
},
{
Classification: "simple-secret-key",
LineNumber: 6,
LineOffset: 11,
SeekPosition: 150,
Length: 11,
Value: "clear_text4",
},
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
regexObjs := make(map[string]*regexp.Regexp)
for name, pattern := range test.patterns {
// always assume given patterns should be multiline
obj, err := regexp.Compile(`` + pattern)
if err != nil {
t.Fatalf("unable to parse regex: %+v", err)
}
regexObjs[name] = obj
}
c, err := NewCataloger(regexObjs, test.reveal, test.maxSize)
if err != nil && !test.constructorErr {
t.Fatalf("could not create cataloger (but should have been able to): %+v", err)
} else if err == nil && test.constructorErr {
t.Fatalf("expected constructor error but did not get one")
} else if test.constructorErr && err != nil {
return
}
resolver := file.NewMockResolverForPaths(test.fixture)
actualResults, err := c.Catalog(resolver)
if err != nil && !test.catalogErr {
t.Fatalf("could not catalog (but should have been able to): %+v", err)
} else if err == nil && test.catalogErr {
t.Fatalf("expected catalog error but did not get one")
} else if test.catalogErr && err != nil {
return
}
loc := file.NewLocation(test.fixture)
if _, exists := actualResults[loc.Coordinates]; !exists {
t.Fatalf("could not find location=%q in results", loc)
}
assert.Equal(t, test.expected, actualResults[loc.Coordinates], "mismatched secrets")
})
}
}
func TestSecretsCataloger_DefaultSecrets(t *testing.T) {
regexObjs, err := GenerateSearchPatterns(DefaultSecretsPatterns, nil, nil)
if err != nil {
t.Fatalf("unable to get patterns: %+v", err)
}
tests := []struct {
fixture string
expected []file.SearchResult
}{
{
fixture: "test-fixtures/secrets/default/aws.env",
expected: []file.SearchResult{
{
Classification: "aws-access-key",
LineNumber: 2,
LineOffset: 25,
SeekPosition: 64,
Length: 20,
Value: "AKIAIOSFODNN7EXAMPLE",
},
{
Classification: "aws-secret-key",
LineNumber: 3,
LineOffset: 29,
SeekPosition: 114,
Length: 40,
Value: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
},
},
},
{
fixture: "test-fixtures/secrets/default/aws.ini",
expected: []file.SearchResult{
{
Classification: "aws-access-key",
LineNumber: 3,
LineOffset: 18,
SeekPosition: 67,
Length: 20,
Value: "AKIAIOSFODNN7EXAMPLE",
},
{
Classification: "aws-secret-key",
LineNumber: 4,
LineOffset: 22,
SeekPosition: 110,
Length: 40,
Value: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
},
},
},
{
fixture: "test-fixtures/secrets/default/private-key.pem",
expected: []file.SearchResult{
{
Classification: "pem-private-key",
LineNumber: 2,
LineOffset: 27,
SeekPosition: 66,
Length: 351,
Value: `
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
`,
},
},
},
{
fixture: "test-fixtures/secrets/default/private-key-openssl.pem",
expected: []file.SearchResult{
{
Classification: "pem-private-key",
LineNumber: 2,
LineOffset: 35,
SeekPosition: 74,
Length: 351,
Value: `
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
`,
},
},
},
{
// note: this test proves that the PEM regex matches the smallest possible match
// since the test catches two adjacent secrets
fixture: "test-fixtures/secrets/default/private-keys.pem",
expected: []file.SearchResult{
{
Classification: "pem-private-key",
LineNumber: 1,
LineOffset: 35,
SeekPosition: 35,
Length: 351,
Value: `
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
`,
},
{
Classification: "pem-private-key",
LineNumber: 9,
LineOffset: 35,
SeekPosition: 455,
Length: 351,
Value: `
MIIEvgTHISISNOTAREALKEYoIBAQDBj08DBj08DBj08DBj08DBj08DBsp5++4an3
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgY5
VQQDDBcqLmF3cy10ZXN0SISNOTAREALKEYoIBAQDBj08DfffKoZIhvcNAQEBBQA7
bml6SISNOTAREALKEYoIBAQDBj08DdssBggrBgEFBQcBAQSBkzCBkDBNBggrBgE8
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmd1
j4f668YfhUbKdRF6S6734856
`,
},
},
},
{
fixture: "test-fixtures/secrets/default/private-key-false-positive.pem",
expected: nil,
},
{
// this test represents:
// 1. a docker config
// 2. a named capture group with the correct line number and line offset case
// 3. the named capture group is in a different line than the match start, and both the match start and the capture group have different line offsets
fixture: "test-fixtures/secrets/default/docker-config.json",
expected: []file.SearchResult{
{
Classification: "docker-config-auth",
LineNumber: 5,
LineOffset: 15,
SeekPosition: 100,
Length: 10,
Value: "tOpsyKreTz",
},
},
},
{
fixture: "test-fixtures/secrets/default/not-docker-config.json",
expected: nil,
},
{
fixture: "test-fixtures/secrets/default/api-key.txt",
expected: []file.SearchResult{
{
Classification: "generic-api-key",
LineNumber: 2,
LineOffset: 7,
SeekPosition: 33,
Length: 20,
Value: "12345A7a901b34567890",
},
{
Classification: "generic-api-key",
LineNumber: 3,
LineOffset: 9,
SeekPosition: 63,
Length: 30,
Value: "12345A7a901b345678901234567890",
},
{
Classification: "generic-api-key",
LineNumber: 4,
LineOffset: 10,
SeekPosition: 104,
Length: 40,
Value: "12345A7a901b3456789012345678901234567890",
},
{
Classification: "generic-api-key",
LineNumber: 5,
LineOffset: 10,
SeekPosition: 156,
Length: 50,
Value: "12345A7a901b34567890123456789012345678901234567890",
},
{
Classification: "generic-api-key",
LineNumber: 6,
LineOffset: 16,
SeekPosition: 224,
Length: 60,
Value: "12345A7a901b345678901234567890123456789012345678901234567890",
},
{
Classification: "generic-api-key",
LineNumber: 14,
LineOffset: 8,
SeekPosition: 502,
Length: 20,
Value: "11111111111111111111",
},
},
},
}
for _, test := range tests {
t.Run(test.fixture, func(t *testing.T) {
c, err := NewCataloger(regexObjs, true, 10*intFile.MB)
if err != nil {
t.Fatalf("could not create cataloger: %+v", err)
}
resolver := file.NewMockResolverForPaths(test.fixture)
actualResults, err := c.Catalog(resolver)
if err != nil {
t.Fatalf("could not catalog: %+v", err)
}
loc := file.NewLocation(test.fixture)
if _, exists := actualResults[loc.Coordinates]; !exists && test.expected != nil {
t.Fatalf("could not find location=%q in results", loc)
} else if !exists && test.expected == nil {
return
}
assert.Equal(t, test.expected, actualResults[loc.Coordinates], "mismatched secrets")
})
}
}

View File

@ -1,56 +0,0 @@
package secrets
import (
"fmt"
"regexp"
"github.com/bmatcuk/doublestar/v4"
"github.com/hashicorp/go-multierror"
)
// GenerateSearchPatterns takes a set of named base patterns, a set of additional named patterns and an name exclusion list and generates a final
// set of regular expressions (indexed by name). The sets are aggregated roughly as such: (base - excluded) + additional.
func GenerateSearchPatterns(basePatterns map[string]string, additionalPatterns map[string]string, excludePatternNames []string) (map[string]*regexp.Regexp, error) {
var regexObjs = make(map[string]*regexp.Regexp)
var errs error
addFn := func(name, pattern string) {
// always enable multiline search option for extracting secrets with multiline values
obj, err := regexp.Compile(`(?m)` + pattern)
if err != nil {
errs = multierror.Append(errs, fmt.Errorf("unable to parse %q regular expression: %w", name, err))
}
regexObjs[name] = obj
}
// add all base cases... unless that base case was asked to be excluded
for name, pattern := range basePatterns {
if !matchesExclusion(excludePatternNames, name) {
addFn(name, pattern)
}
}
// add all additional cases
for name, pattern := range additionalPatterns {
addFn(name, pattern)
}
if errs != nil {
return nil, errs
}
return regexObjs, nil
}
func matchesExclusion(excludePatternNames []string, name string) bool {
for _, exclude := range excludePatternNames {
matches, err := doublestar.Match(exclude, name)
if err != nil {
return false
}
if matches {
return true
}
}
return false
}

View File

@ -1,125 +0,0 @@
package secrets
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestGenerateSearchPatterns(t *testing.T) {
tests := []struct {
name string
base map[string]string
additional map[string]string
exclude []string
expected map[string]string
}{
{
name: "use-base-set",
base: map[string]string{
"in-default": `^secret_key=.*`,
},
expected: map[string]string{
"in-default": `(?m)^secret_key=.*`,
},
},
{
name: "exclude-from-base-set",
base: map[string]string{
"in-default": `^secret_key=.*`,
"also-in-default": `^also-in-default=.*`,
},
exclude: []string{"also-in-default"},
expected: map[string]string{
"in-default": `(?m)^secret_key=.*`,
},
},
{
name: "exclude-multiple-from-base-set",
base: map[string]string{
"in-default": `^secret_key=.*`,
"also-in-default": `^also-in-default=.*`,
"furthermore-in-default": `^furthermore-in-default=.*`,
},
exclude: []string{"also-in-default", "furthermore-in-default"},
expected: map[string]string{
"in-default": `(?m)^secret_key=.*`,
},
},
{
name: "exclude-all",
base: map[string]string{
"in-default": `^secret_key=.*`,
"also-in-default": `^also-in-default=.*`,
},
exclude: []string{"*"},
expected: map[string]string{},
},
{
name: "exclude-some",
base: map[string]string{
"real": `^real=.*`,
"in-default": `^secret_key=.*`,
"also-in-default": `^also-in-default=.*`,
},
exclude: []string{"*-default"},
expected: map[string]string{
"real": `(?m)^real=.*`,
},
},
{
name: "additional-pattern-unison",
base: map[string]string{
"in-default": `^secret_key=.*`,
},
additional: map[string]string{
"additional": `^additional=.*`,
},
expected: map[string]string{
"in-default": `(?m)^secret_key=.*`,
"additional": `(?m)^additional=.*`,
},
},
{
name: "override",
base: map[string]string{
"in-default": `^secret_key=.*`,
},
additional: map[string]string{
"in-default": `^additional=.*`,
},
expected: map[string]string{
"in-default": `(?m)^additional=.*`,
},
},
{
name: "exclude-and-override",
base: map[string]string{
"in-default": `^secret_key=.*`,
},
exclude: []string{"in-default"},
additional: map[string]string{
"in-default": `^additional=.*`,
},
expected: map[string]string{
"in-default": `(?m)^additional=.*`,
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actualObj, err := GenerateSearchPatterns(test.base, test.additional, test.exclude)
if err != nil {
t.Fatalf("unable to combine: %+v", err)
}
actual := make(map[string]string)
for n, v := range actualObj {
actual[n] = v.String()
}
assert.Equal(t, test.expected, actual, "mismatched combination")
})
}
}

View File

@ -1,39 +0,0 @@
package secrets
import "io"
type newlineCounter struct {
io.RuneReader
numBytes int64
newLines []int64
}
func (c *newlineCounter) ReadRune() (r rune, size int, err error) {
r, size, err = c.RuneReader.ReadRune()
c.numBytes += int64(size)
if r == '\n' {
c.newLines = append(c.newLines, c.numBytes)
}
return
}
func (c *newlineCounter) newlinesBefore(pos int64) int {
var result int
for _, nlPos := range c.newLines {
if nlPos <= pos {
result++
}
}
return result
}
func (c *newlineCounter) newlinePositionBefore(pos int64) int64 {
var last int64
for _, nlPos := range c.newLines {
if nlPos > pos {
break
}
last = nlPos
}
return last
}

View File

@ -1,35 +0,0 @@
package secrets
import (
"bufio"
"io"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func TestLineCounter_ReadRune(t *testing.T) {
counter := &newlineCounter{RuneReader: bufio.NewReader(strings.NewReader("hi\nwhat's the weather like today?\ndunno...\n"))}
var err error
for err == nil {
_, _, err = counter.ReadRune()
}
if err != io.EOF {
t.Fatalf("should have gotten an eof, got %+v", err)
}
assert.Equal(t, 3, len(counter.newLines), "bad line count")
assert.Equal(t, []int64{3, 34, 43}, counter.newLines, "bad line positions")
}
func TestLineCounter_newlinesBefore(t *testing.T) {
counter := &newlineCounter{RuneReader: bufio.NewReader(strings.NewReader("hi\nwhat's the weather like today?\ndunno...\n"))}
var err error
for err == nil {
_, _, err = counter.ReadRune()
}
if err != io.EOF {
t.Fatalf("should have gotten an eof, got %+v", err)
}
assert.Equal(t, 1, counter.newlinesBefore(10), "bad line count")
}

View File

@ -1,135 +0,0 @@
package secrets
import (
"bufio"
"errors"
"fmt"
"io"
"regexp"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/file"
)
func catalogLocationByLine(resolver file.Resolver, location file.Location, patterns map[string]*regexp.Regexp) ([]file.SearchResult, error) {
readCloser, err := resolver.FileContentsByLocation(location)
if err != nil {
return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
}
defer internal.CloseAndLogError(readCloser, location.AccessPath)
var scanner = bufio.NewReader(readCloser)
var position int64
var allSecrets []file.SearchResult
var lineNo int64
var readErr error
for !errors.Is(readErr, io.EOF) {
lineNo++
var line []byte
// TODO: we're at risk of large memory usage for very long lines
line, readErr = scanner.ReadBytes('\n')
if readErr != nil && readErr != io.EOF {
return nil, readErr
}
lineSecrets, err := searchForSecretsWithinLine(resolver, location, patterns, line, lineNo, position)
if err != nil {
return nil, err
}
position += int64(len(line))
allSecrets = append(allSecrets, lineSecrets...)
}
return allSecrets, nil
}
func searchForSecretsWithinLine(resolver file.Resolver, location file.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]file.SearchResult, error) {
var secrets []file.SearchResult
for name, pattern := range patterns {
matches := pattern.FindAllIndex(line, -1)
for i, match := range matches {
if i%2 == 1 {
// FindAllIndex returns pairs of numbers for each match, we are only interested in the starting (first)
// position in each pair.
continue
}
lineOffset := int64(match[0])
seekLocation := position + lineOffset
reader, err := readerAtPosition(resolver, location, seekLocation)
if err != nil {
return nil, err
}
secret := extractSecretFromPosition(reader, name, pattern, lineNo, lineOffset, seekLocation)
if secret != nil {
secrets = append(secrets, *secret)
}
internal.CloseAndLogError(reader, location.AccessPath)
}
}
return secrets, nil
}
func readerAtPosition(resolver file.Resolver, location file.Location, seekPosition int64) (io.ReadCloser, error) {
readCloser, err := resolver.FileContentsByLocation(location)
if err != nil {
return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err)
}
if seekPosition > 0 {
n, err := io.CopyN(io.Discard, readCloser, seekPosition)
if err != nil {
return nil, fmt.Errorf("unable to read contents for location=%q while searching for secrets: %w", location, err)
}
if n != seekPosition {
return nil, fmt.Errorf("unexpected seek location for location=%q while searching for secrets: %d != %d", location, n, seekPosition)
}
}
return readCloser, nil
}
func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *regexp.Regexp, lineNo, lineOffset, seekPosition int64) *file.SearchResult {
reader := &newlineCounter{RuneReader: bufio.NewReader(readCloser)}
positions := pattern.FindReaderSubmatchIndex(reader)
if len(positions) == 0 {
// no matches found
return nil
}
index := pattern.SubexpIndex("value")
var indexOffset int
if index != -1 {
// there is a capture group, use the capture group selection as the secret value. To do this we want to
// use the position at the discovered offset. Note: all positions come in pairs, so you will need to adjust
// the offset accordingly (multiply by 2).
indexOffset = index * 2
}
// get the start and stop of the secret value. Note: this covers both when there is a capture group
// and when there is not a capture group (full value match)
start, stop := int64(positions[indexOffset]), int64(positions[indexOffset+1])
if start < 0 || stop < 0 {
// no match location found. This can happen when there is a value capture group specified by the user
// and there was a match on the overall regex, but not for the capture group (which is possible if the capture
// group is optional).
return nil
}
// lineNoOfSecret are the number of lines which occur before the start of the secret value
var lineNoOfSecret = lineNo + int64(reader.newlinesBefore(start))
// lineOffsetOfSecret are the number of bytes that occur after the last newline but before the secret value.
var lineOffsetOfSecret = start - reader.newlinePositionBefore(start)
if lineNoOfSecret == lineNo {
// the secret value starts in the same line as the overall match, so we must consider that line offset
lineOffsetOfSecret += lineOffset
}
return &file.SearchResult{
Classification: name,
SeekPosition: start + seekPosition,
Length: stop - start,
LineNumber: lineNoOfSecret,
LineOffset: lineOffsetOfSecret,
}
}

View File

@ -1,14 +0,0 @@
# these should be matches
apikey=12345A7a901b34567890
api_key =12345A7a901b345678901234567890
API-KEY= '12345A7a901b3456789012345678901234567890'
API-key: "12345A7a901b34567890123456789012345678901234567890"
some_ApI-kEy = "12345A7a901b345678901234567890123456789012345678901234567890"
# these should be non matches
api_key = "toolong12345A7a901b345678901234567890123456789012345678901234567890"
api_key = "tooshort"
not_api_k3y = "badkeyname12345A7a901b34567890"
# value at EOF should match
api_key=11111111111111111111

View File

@ -1,3 +0,0 @@
# note: these are NOT real credentials
export AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE
export AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY

View File

@ -1,4 +0,0 @@
# note: these are NOT real credentials
[default]
aws_access_key_id=AKIAIOSFODNN7EXAMPLE
aws_secret_access_key=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY

View File

@ -1,10 +0,0 @@
{
"experimental" : "disabled",
"auths" : {
"https://index.docker.io/v1/" : {
"auth": "tOpsyKreTz"
}
},
"stackOrchestrator" : "swarm",
"credsStore" : "desktop"
}

View File

@ -1,4 +0,0 @@
{
"endpoint" : "http://somewhere",
"auth" : "basic"
}

View File

@ -1 +0,0 @@
-----BEGIN OPENSSL PRIVATE KEY-----

View File

@ -1,9 +0,0 @@
# note: this is NOT a real private key
-----BEGIN OPENSSL PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
-----END OPENSSL PRIVATE KEY-----

View File

@ -1,10 +0,0 @@
# note: this is NOT a real private key
-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
-----END PRIVATE KEY-----
other embedded text

View File

@ -1,16 +0,0 @@
-----BEGIN OPENSSL PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBj08sp5++4anG
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgYD
VQQDDBcqLmF3cy10ZXN0LnByb2dyZXNzLmNvbTCCASIwDQYJKoZIhvcNAQEBBQAD
bml6YXRpb252YWxzaGEyZzIuY3JsMIGgBggrBgEFBQcBAQSBkzCBkDBNBggrBgEF
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmdh
z3P668YfhUbKdRF6S42Cg6zn
-----END OPENSSL PRIVATE KEY-----
-----BEGIN OPENSSL PRIVATE KEY-----
MIIEvgTHISISNOTAREALKEYoIBAQDBj08DBj08DBj08DBj08DBj08DBsp5++4an3
cmQxJjAkBgNVBAoTHVByb2dyZXNzIFNvZnR3YXJlIENvcnBvcmF0aW9uMSAwHgY5
VQQDDBcqLmF3cy10ZXN0SISNOTAREALKEYoIBAQDBj08DfffKoZIhvcNAQEBBQA7
bml6SISNOTAREALKEYoIBAQDBj08DdssBggrBgEFBQcBAQSBkzCBkDBNBggrBgE8
BQcwAoZBaHR0cDovL3NlY3VyZS5nbG9iYWxzaWduLmNvbS9jYWNlcnQvZ3Nvcmd1
j4f668YfhUbKdRF6S6734856
-----END OPENSSL PRIVATE KEY-----

View File

@ -1,6 +0,0 @@
secret_key=clear_text1
other text that should be ignored
secret_key=clear_text2
secret_key=clear_text3
also things that should be ignored
secret_key=clear_text4

View File

@ -1,4 +0,0 @@
other text that should be ignored
secret_key=clear_text
---secret_key=clear_text
also things that should be ignored

View File

@ -5,7 +5,6 @@ type Document struct {
Artifacts []Package `json:"artifacts"` // Artifacts is the list of packages discovered and placed into the catalog Artifacts []Package `json:"artifacts"` // Artifacts is the list of packages discovered and placed into the catalog
ArtifactRelationships []Relationship `json:"artifactRelationships"` ArtifactRelationships []Relationship `json:"artifactRelationships"`
Files []File `json:"files,omitempty"` // note: must have omitempty Files []File `json:"files,omitempty"` // note: must have omitempty
Secrets []Secrets `json:"secrets,omitempty"` // note: must have omitempty
Source Source `json:"source"` // Source represents the original object that was cataloged Source Source `json:"source"` // Source represents the original object that was cataloged
Distro LinuxRelease `json:"distro"` // Distro represents the Linux distribution that was detected from the source Distro LinuxRelease `json:"distro"` // Distro represents the Linux distribution that was detected from the source
Descriptor Descriptor `json:"descriptor"` // Descriptor is a block containing self-describing information about syft Descriptor Descriptor `json:"descriptor"` // Descriptor is a block containing self-describing information about syft

View File

@ -38,7 +38,6 @@ func ToFormatModel(s sbom.SBOM, cfg EncoderConfig) model.Document {
Artifacts: toPackageModels(s.Artifacts.Packages, cfg), Artifacts: toPackageModels(s.Artifacts.Packages, cfg),
ArtifactRelationships: toRelationshipModel(s.Relationships), ArtifactRelationships: toRelationshipModel(s.Relationships),
Files: toFile(s), Files: toFile(s),
Secrets: toSecrets(s.Artifacts.Secrets),
Source: toSourceModel(s.Source), Source: toSourceModel(s.Source),
Distro: toLinuxReleaser(s.Artifacts.LinuxDistribution), Distro: toLinuxReleaser(s.Artifacts.LinuxDistribution),
Descriptor: toDescriptor(s.Descriptor), Descriptor: toDescriptor(s.Descriptor),
@ -83,22 +82,6 @@ func toDescriptor(d sbom.Descriptor) model.Descriptor {
} }
} }
func toSecrets(data map[file.Coordinates][]file.SearchResult) []model.Secrets {
results := make([]model.Secrets, 0)
for coordinates, secrets := range data {
results = append(results, model.Secrets{
Location: coordinates,
Secrets: secrets,
})
}
// sort by real path then virtual path to ensure the result is stable across multiple runs
sort.SliceStable(results, func(i, j int) bool {
return results[i].Location.RealPath < results[j].Location.RealPath
})
return results
}
func toFile(s sbom.SBOM) []model.File { func toFile(s sbom.SBOM) []model.File {
results := make([]model.File, 0) results := make([]model.File, 0)
artifacts := s.Artifacts artifacts := s.Artifacts

View File

@ -25,7 +25,6 @@ type Artifacts struct {
FileDigests map[file.Coordinates][]file.Digest FileDigests map[file.Coordinates][]file.Digest
FileContents map[file.Coordinates]string FileContents map[file.Coordinates]string
FileLicenses map[file.Coordinates][]file.License FileLicenses map[file.Coordinates][]file.License
Secrets map[file.Coordinates][]file.SearchResult
LinuxDistribution *linux.Release LinuxDistribution *linux.Release
} }

View File

@ -36,11 +36,6 @@ func TestJSONSchema(t *testing.T) {
args: []string{"-o", "json"}, args: []string{"-o", "json"},
fixture: imageFixture, fixture: imageFixture,
}, },
{
name: "power-user:image:docker-archive:pkg-coverage",
subcommand: "power-user",
fixture: imageFixture,
},
{ {
name: "packages:dir:pkg-coverage", name: "packages:dir:pkg-coverage",
subcommand: "packages", subcommand: "packages",

View File

@ -1,101 +0,0 @@
package cli
import (
"testing"
)
func TestPowerUserCmdFlags(t *testing.T) {
secretsFixture := getFixtureImage(t, "image-secrets")
tests := []struct {
name string
args []string
env map[string]string
assertions []traitAssertion
}{
{
name: "no-args-shows-help",
args: []string{"power-user"},
assertions: []traitAssertion{
assertInOutput("an image/directory argument is required"), // specific error that should be shown
assertInOutput("Run bulk operations on container images"), // excerpt from help description
assertFailingReturnCode,
},
},
{
name: "default-results-w-pkg-coverage",
args: []string{"power-user", "docker-archive:" + getFixtureImage(t, "image-pkg-coverage")},
assertions: []traitAssertion{
assertNotInOutput(" command is deprecated"), // only the root command should be deprecated
assertInOutput(`"type":"RegularFile"`), // proof of file-metadata data
assertInOutput(`"algorithm":"sha256"`), // proof of file-metadata default digest algorithm of sha256
assertInOutput(`"metadataType":"apk-db-entry"`), // proof of package artifacts data
assertSuccessfulReturnCode,
},
},
{
name: "content-cataloger-wired-up",
args: []string{"power-user", "docker-archive:" + secretsFixture},
env: map[string]string{
"SYFT_FILE_CONTENTS_GLOBS": "/api-key.txt",
},
assertions: []traitAssertion{
assertInOutput(`"contents":"c29tZV9BcEkta0V5ID0gIjEyMzQ1QTdhOTAxYjM0NTY3ODkwMTIzNDU2Nzg5MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTIzNDU2Nzg5MCIK"`), // proof of the content cataloger
assertSuccessfulReturnCode,
},
},
{
name: "default-dir-results-w-pkg-coverage",
args: []string{"power-user", "dir:test-fixtures/image-pkg-coverage"},
assertions: []traitAssertion{
assertNotInOutput(" command is deprecated"), // only the root command should be deprecated
assertInOutput(`"type":"RegularFile"`), // proof of file-metadata data
assertInOutput(`"algorithm":"sha256"`), // proof of file-metadata default digest algorithm of sha256
assertInOutput(`"metadataType":"apk-db-entry"`), // proof of package artifacts data
assertSuccessfulReturnCode,
},
},
{
name: "default-secrets-results-w-reveal-values",
env: map[string]string{
"SYFT_SECRETS_REVEAL_VALUES": "true",
},
args: []string{"power-user", "docker-archive:" + secretsFixture},
assertions: []traitAssertion{
assertInOutput(`"classification":"generic-api-key"`), // proof of the secrets cataloger finding something
assertInOutput(`"12345A7a901b345678901234567890123456789012345678901234567890"`), // proof of the secrets cataloger finding the api key
assertSuccessfulReturnCode,
},
},
{
name: "default-secret-results-dont-reveal-values",
args: []string{"power-user", "docker-archive:" + secretsFixture},
assertions: []traitAssertion{
assertInOutput(`"classification":"generic-api-key"`), // proof of the secrets cataloger finding something
assertNotInOutput(`"12345A7a901b345678901234567890123456789012345678901234567890"`), // proof of the secrets cataloger finding the api key
assertSuccessfulReturnCode,
},
},
{
name: "default-secrets-dir-results-w-reveal-values",
env: map[string]string{
"SYFT_SECRETS_REVEAL_VALUES": "true",
},
args: []string{"power-user", "dir:test-fixtures/image-secrets-dir"},
assertions: []traitAssertion{
assertInOutput(`"classification":"generic-api-key"`), // proof of the secrets cataloger finding something
assertInOutput(`"12345A7a901b345678901234567890123456789012345678901234567890"`), // proof of the secrets cataloger finding the api key
assertSuccessfulReturnCode,
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
cmd, stdout, stderr := runSyftSafe(t, test.env, test.args...)
for _, traitFn := range test.assertions {
traitFn(t, stdout, stderr, cmd.ProcessState.ExitCode())
}
logOutputOnFailure(t, cmd, stdout, stderr)
})
}
}