improve doc comments

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
Alex Goodman 2020-11-17 08:25:01 -05:00
parent 62b03f3a91
commit c892c3609e
No known key found for this signature in database
GPG Key ID: 5CB45AE22BAB7EA7
46 changed files with 191 additions and 83 deletions

View File

@ -4,6 +4,8 @@ import (
"fmt" "fmt"
"os" "os"
"github.com/gookit/color"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/anchore/syft/syft/presenter" "github.com/anchore/syft/syft/presenter"
@ -11,7 +13,6 @@ import (
"github.com/anchore/stereoscope" "github.com/anchore/stereoscope"
"github.com/anchore/syft/internal/config" "github.com/anchore/syft/internal/config"
"github.com/anchore/syft/internal/format"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/logger" "github.com/anchore/syft/internal/logger"
"github.com/anchore/syft/syft" "github.com/anchore/syft/syft"
@ -111,7 +112,7 @@ func logAppConfig() {
if err != nil { if err != nil {
log.Debugf("Could not display application config: %+v", err) log.Debugf("Could not display application config: %+v", err)
} else { } else {
log.Debugf("Application config:\n%+v", format.Magenta.Format(string(appCfgStr))) log.Debugf("Application config:\n%+v", color.Magenta.Sprint(appCfgStr))
} }
} }

View File

@ -1,4 +1,4 @@
package internal package internal
// note: do not change this // ApplicationName is the non-capitalized name of the application (do not change this)
const ApplicationName = "syft" const ApplicationName = "syft"

4
internal/docs.go Normal file
View File

@ -0,0 +1,4 @@
/*
Package internal contains miscellaneous functions and objects useful within syft but should not be used externally.
*/
package internal

View File

@ -1,5 +1,6 @@
package file package file
// GlobMatch evaluates the given glob pattern against the given "name" string, indicating if there is a match or not.
// Source: https://research.swtch.com/glob.go // Source: https://research.swtch.com/glob.go
func GlobMatch(pattern, name string) bool { func GlobMatch(pattern, name string) bool {
px := 0 px := 0

View File

@ -5,10 +5,12 @@ import (
"os" "os"
) )
// Opener is an object that stores a path to later be opened as a file.
type Opener struct { type Opener struct {
path string path string
} }
// Open the stored path as a io.ReadCloser.
func (o Opener) Open() (io.ReadCloser, error) { func (o Opener) Open() (io.ReadCloser, error) {
return os.Open(o.path) return os.Open(o.path)
} }

View File

@ -12,16 +12,20 @@ import (
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
) )
// ZipFileManifest is a collection of paths and their file metadata.
type ZipFileManifest map[string]os.FileInfo type ZipFileManifest map[string]os.FileInfo
// newZipManifest creates an empty ZipFileManifest.
func newZipManifest() ZipFileManifest { func newZipManifest() ZipFileManifest {
return make(ZipFileManifest) return make(ZipFileManifest)
} }
// Add a new path and it's file metadata to the collection.
func (z ZipFileManifest) Add(entry string, info os.FileInfo) { func (z ZipFileManifest) Add(entry string, info os.FileInfo) {
z[entry] = info z[entry] = info
} }
// GlobMatch returns the path keys that match the given value(s).
func (z ZipFileManifest) GlobMatch(patterns ...string) []string { func (z ZipFileManifest) GlobMatch(patterns ...string) []string {
uniqueMatches := internal.NewStringSet() uniqueMatches := internal.NewStringSet()
@ -43,6 +47,7 @@ func (z ZipFileManifest) GlobMatch(patterns ...string) []string {
return results return results
} }
// NewZipFileManifest creates and returns a new ZipFileManifest populated with path and metadata from the given zip archive path.
func NewZipFileManifest(archivePath string) (ZipFileManifest, error) { func NewZipFileManifest(archivePath string) (ZipFileManifest, error) {
zipReader, err := zip.OpenReader(archivePath) zipReader, err := zip.OpenReader(archivePath)
manifest := newZipManifest() manifest := newZipManifest()
@ -62,6 +67,7 @@ func NewZipFileManifest(archivePath string) (ZipFileManifest, error) {
return manifest, nil return manifest, nil
} }
// normalizeZipEntryName takes the given path entry and ensures it is prefixed with "/".
func normalizeZipEntryName(entry string) string { func normalizeZipEntryName(entry string) string {
if !strings.HasPrefix(entry, "/") { if !strings.HasPrefix(entry, "/") {
return "/" + entry return "/" + entry

View File

@ -15,6 +15,7 @@ import (
) )
const ( const (
// represents the order of bytes
_ = iota _ = iota
KB = 1 << (10 * iota) KB = 1 << (10 * iota)
MB MB
@ -33,6 +34,7 @@ func newZipTraverseRequest(paths ...string) zipTraversalRequest {
return results return results
} }
// TraverseFilesInZip enumerates all paths stored within a zip archive using the visitor pattern.
func TraverseFilesInZip(archivePath string, visitor func(*zip.File) error, paths ...string) error { func TraverseFilesInZip(archivePath string, visitor func(*zip.File) error, paths ...string) error {
request := newZipTraverseRequest(paths...) request := newZipTraverseRequest(paths...)
@ -63,6 +65,7 @@ func TraverseFilesInZip(archivePath string, visitor func(*zip.File) error, paths
return nil return nil
} }
// ExtractFromZipToUniqueTempFile extracts select paths for the given archive to a temporary directory, returning file openers for each file extracted.
func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (map[string]Opener, error) { func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (map[string]Opener, error) {
results := make(map[string]Opener) results := make(map[string]Opener)
@ -121,6 +124,7 @@ func ExtractFromZipToUniqueTempFile(archivePath, dir string, paths ...string) (m
return results, TraverseFilesInZip(archivePath, visitor, paths...) return results, TraverseFilesInZip(archivePath, visitor, paths...)
} }
// ContentsFromZip extracts select paths for the given archive and returns a set of string contents for each path.
func ContentsFromZip(archivePath string, paths ...string) (map[string]string, error) { func ContentsFromZip(archivePath string, paths ...string) (map[string]string, error) {
results := make(map[string]string) results := make(map[string]string)
@ -162,6 +166,7 @@ func ContentsFromZip(archivePath string, paths ...string) (map[string]string, er
return results, TraverseFilesInZip(archivePath, visitor, paths...) return results, TraverseFilesInZip(archivePath, visitor, paths...)
} }
// UnzipToDir extracts a zip archive to a target directory.
func UnzipToDir(archivePath, targetDir string) error { func UnzipToDir(archivePath, targetDir string) error {
visitor := func(file *zip.File) error { visitor := func(file *zip.File) error {
// the zip-slip attack protection is still being erroneously detected // the zip-slip attack protection is still being erroneously detected

View File

@ -1,21 +0,0 @@
package format
import "fmt"
const (
DefaultColor Color = iota + 30
Red
Green
Yellow
Blue
Magenta
Cyan
White
)
type Color uint8
// TODO: not cross platform (windows...)
func (c Color) Format(s string) string {
return fmt.Sprintf("\x1b[%dm%s\x1b[0m", c, s)
}

View File

@ -1,37 +1,49 @@
/*
Package log contains the singleton object and helper functions for facilitating logging within the syft library.
*/
package log package log
import "github.com/anchore/syft/syft/logger" import "github.com/anchore/syft/syft/logger"
// Log is the singleton used to facilitate logging internally within syft
var Log logger.Logger = &nopLogger{} var Log logger.Logger = &nopLogger{}
// Errorf takes a formatted template string and template arguments for the error logging level.
func Errorf(format string, args ...interface{}) { func Errorf(format string, args ...interface{}) {
Log.Errorf(format, args...) Log.Errorf(format, args...)
} }
// Error logs the given arguments at the error logging level.
func Error(args ...interface{}) { func Error(args ...interface{}) {
Log.Error(args...) Log.Error(args...)
} }
// Warnf takes a formatted template string and template arguments for the warning logging level.
func Warnf(format string, args ...interface{}) { func Warnf(format string, args ...interface{}) {
Log.Warnf(format, args...) Log.Warnf(format, args...)
} }
// Warn logs the given arguments at the warning logging level.
func Warn(args ...interface{}) { func Warn(args ...interface{}) {
Log.Warn(args...) Log.Warn(args...)
} }
// Infof takes a formatted template string and template arguments for the info logging level.
func Infof(format string, args ...interface{}) { func Infof(format string, args ...interface{}) {
Log.Infof(format, args...) Log.Infof(format, args...)
} }
// Info logs the given arguments at the info logging level.
func Info(args ...interface{}) { func Info(args ...interface{}) {
Log.Info(args...) Log.Info(args...)
} }
// Debugf takes a formatted template string and template arguments for the debug logging level.
func Debugf(format string, args ...interface{}) { func Debugf(format string, args ...interface{}) {
Log.Debugf(format, args...) Log.Debugf(format, args...)
} }
// Debug logs the given arguments at the debug logging level.
func Debug(args ...interface{}) { func Debug(args ...interface{}) {
Log.Debug(args...) Log.Debug(args...)
} }

4
internal/logger/doc.go Normal file
View File

@ -0,0 +1,4 @@
/*
Package logger contains implementations for the syft.logger.Logger interface.
*/
package logger

View File

@ -10,6 +10,7 @@ import (
prefixed "github.com/x-cray/logrus-prefixed-formatter" prefixed "github.com/x-cray/logrus-prefixed-formatter"
) )
// LogrusConfig contains all configurable values for the Logrus logger
type LogrusConfig struct { type LogrusConfig struct {
EnableConsole bool EnableConsole bool
EnableFile bool EnableFile bool
@ -18,16 +19,19 @@ type LogrusConfig struct {
FileLocation string FileLocation string
} }
// LogrusLogger contains all runtime values for using Logrus with the configured output target and input configuration values.
type LogrusLogger struct { type LogrusLogger struct {
Config LogrusConfig Config LogrusConfig
Logger *logrus.Logger Logger *logrus.Logger
Output io.Writer Output io.Writer
} }
// LogrusNestedLogger is a wrapper for Logrus to enable nested logging configuration (loggers that always attach key-value pairs to all log entries)
type LogrusNestedLogger struct { type LogrusNestedLogger struct {
Logger *logrus.Entry Logger *logrus.Entry
} }
// NewLogrusLogger creates a new LogrusLogger with the given configuration
func NewLogrusLogger(cfg LogrusConfig) *LogrusLogger { func NewLogrusLogger(cfg LogrusConfig) *LogrusLogger {
appLogger := logrus.New() appLogger := logrus.New()
@ -76,66 +80,82 @@ func NewLogrusLogger(cfg LogrusConfig) *LogrusLogger {
} }
} }
// Debugf takes a formatted template string and template arguments for the debug logging level.
func (l *LogrusLogger) Debugf(format string, args ...interface{}) { func (l *LogrusLogger) Debugf(format string, args ...interface{}) {
l.Logger.Debugf(format, args...) l.Logger.Debugf(format, args...)
} }
// Infof takes a formatted template string and template arguments for the info logging level.
func (l *LogrusLogger) Infof(format string, args ...interface{}) { func (l *LogrusLogger) Infof(format string, args ...interface{}) {
l.Logger.Infof(format, args...) l.Logger.Infof(format, args...)
} }
// Warnf takes a formatted template string and template arguments for the warning logging level.
func (l *LogrusLogger) Warnf(format string, args ...interface{}) { func (l *LogrusLogger) Warnf(format string, args ...interface{}) {
l.Logger.Warnf(format, args...) l.Logger.Warnf(format, args...)
} }
// Errorf takes a formatted template string and template arguments for the error logging level.
func (l *LogrusLogger) Errorf(format string, args ...interface{}) { func (l *LogrusLogger) Errorf(format string, args ...interface{}) {
l.Logger.Errorf(format, args...) l.Logger.Errorf(format, args...)
} }
// Debug logs the given arguments at the debug logging level.
func (l *LogrusLogger) Debug(args ...interface{}) { func (l *LogrusLogger) Debug(args ...interface{}) {
l.Logger.Debug(args...) l.Logger.Debug(args...)
} }
// Info logs the given arguments at the info logging level.
func (l *LogrusLogger) Info(args ...interface{}) { func (l *LogrusLogger) Info(args ...interface{}) {
l.Logger.Info(args...) l.Logger.Info(args...)
} }
// Warn logs the given arguments at the warning logging level.
func (l *LogrusLogger) Warn(args ...interface{}) { func (l *LogrusLogger) Warn(args ...interface{}) {
l.Logger.Warn(args...) l.Logger.Warn(args...)
} }
// Error logs the given arguments at the error logging level.
func (l *LogrusLogger) Error(args ...interface{}) { func (l *LogrusLogger) Error(args ...interface{}) {
l.Logger.Error(args...) l.Logger.Error(args...)
} }
// Debugf takes a formatted template string and template arguments for the debug logging level.
func (l *LogrusNestedLogger) Debugf(format string, args ...interface{}) { func (l *LogrusNestedLogger) Debugf(format string, args ...interface{}) {
l.Logger.Debugf(format, args...) l.Logger.Debugf(format, args...)
} }
// Infof takes a formatted template string and template arguments for the info logging level.
func (l *LogrusNestedLogger) Infof(format string, args ...interface{}) { func (l *LogrusNestedLogger) Infof(format string, args ...interface{}) {
l.Logger.Infof(format, args...) l.Logger.Infof(format, args...)
} }
// Warnf takes a formatted template string and template arguments for the warning logging level.
func (l *LogrusNestedLogger) Warnf(format string, args ...interface{}) { func (l *LogrusNestedLogger) Warnf(format string, args ...interface{}) {
l.Logger.Warnf(format, args...) l.Logger.Warnf(format, args...)
} }
// Errorf takes a formatted template string and template arguments for the error logging level.
func (l *LogrusNestedLogger) Errorf(format string, args ...interface{}) { func (l *LogrusNestedLogger) Errorf(format string, args ...interface{}) {
l.Logger.Errorf(format, args...) l.Logger.Errorf(format, args...)
} }
// Debug logs the given arguments at the debug logging level.
func (l *LogrusNestedLogger) Debug(args ...interface{}) { func (l *LogrusNestedLogger) Debug(args ...interface{}) {
l.Logger.Debug(args...) l.Logger.Debug(args...)
} }
// Info logs the given arguments at the info logging level.
func (l *LogrusNestedLogger) Info(args ...interface{}) { func (l *LogrusNestedLogger) Info(args ...interface{}) {
l.Logger.Info(args...) l.Logger.Info(args...)
} }
// Warn logs the given arguments at the warning logging level.
func (l *LogrusNestedLogger) Warn(args ...interface{}) { func (l *LogrusNestedLogger) Warn(args ...interface{}) {
l.Logger.Warn(args...) l.Logger.Warn(args...)
} }
// Error logs the given arguments at the error logging level.
func (l *LogrusNestedLogger) Error(args ...interface{}) { func (l *LogrusNestedLogger) Error(args ...interface{}) {
l.Logger.Error(args...) l.Logger.Error(args...)
} }

View File

@ -1,11 +1,16 @@
package internal package internal
import "sort"
// StringSet represents a set of string types.
type StringSet map[string]struct{} type StringSet map[string]struct{}
// NewStringSet creates a new empty StringSet.
func NewStringSet() StringSet { func NewStringSet() StringSet {
return make(StringSet) return make(StringSet)
} }
// NewStringSetFromSlice creates a StringSet populated with values from the given slice.
func NewStringSetFromSlice(start []string) StringSet { func NewStringSetFromSlice(start []string) StringSet {
ret := make(StringSet) ret := make(StringSet)
for _, s := range start { for _, s := range start {
@ -14,19 +19,23 @@ func NewStringSetFromSlice(start []string) StringSet {
return ret return ret
} }
// Add a string to the set.
func (s StringSet) Add(i string) { func (s StringSet) Add(i string) {
s[i] = struct{}{} s[i] = struct{}{}
} }
// Remove a string from the set.
func (s StringSet) Remove(i string) { func (s StringSet) Remove(i string) {
delete(s, i) delete(s, i)
} }
// Contains indicates if the given string is contained within the set.
func (s StringSet) Contains(i string) bool { func (s StringSet) Contains(i string) bool {
_, ok := s[i] _, ok := s[i]
return ok return ok
} }
// ToSlice returns a sorted slice of strings that are contained within the set.
func (s StringSet) ToSlice() []string { func (s StringSet) ToSlice() []string {
ret := make([]string, len(s)) ret := make([]string, len(s))
idx := 0 idx := 0
@ -34,5 +43,6 @@ func (s StringSet) ToSlice() []string {
ret[idx] = v ret[idx] = v
idx++ idx++
} }
sort.Strings(ret)
return ret return ret
} }

View File

@ -1,3 +1,6 @@
/*
Package version contains all build time metadata (version, build time, git commit, etc).
*/
package version package version
import ( import (

View File

@ -18,6 +18,7 @@ var latestAppVersionURL = struct {
path: fmt.Sprintf("/%s/releases/latest/VERSION", internal.ApplicationName), path: fmt.Sprintf("/%s/releases/latest/VERSION", internal.ApplicationName),
} }
// IsUpdateAvailable indicates if there is a newer application version available, and if so, what the new version is.
func IsUpdateAvailable() (bool, string, error) { func IsUpdateAvailable() (bool, string, error) {
currentVersionStr := FromBuild().Version currentVersionStr := FromBuild().Version
currentVersion, err := hashiVersion.NewVersion(currentVersionStr) currentVersion, err := hashiVersion.NewVersion(currentVersionStr)

View File

@ -142,12 +142,12 @@ func TestPythonPackageWheelCataloger(t *testing.T) {
AuthorEmail: "me@kennethreitz.org", AuthorEmail: "me@kennethreitz.org",
SitePackagesRootPath: "test-fixtures", SitePackagesRootPath: "test-fixtures",
Files: []pkg.PythonFileRecord{ Files: []pkg.PythonFileRecord{
{Path: "requests-2.22.0.dist-info/INSTALLER", Digest: &pkg.Digest{"sha256", "zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg"}, Size: "4"}, {Path: "requests-2.22.0.dist-info/INSTALLER", Digest: &pkg.PythonFileDigest{"sha256", "zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg"}, Size: "4"},
{Path: "requests/__init__.py", Digest: &pkg.Digest{"sha256", "PnKCgjcTq44LaAMzB-7--B2FdewRrE8F_vjZeaG9NhA"}, Size: "3921"}, {Path: "requests/__init__.py", Digest: &pkg.PythonFileDigest{"sha256", "PnKCgjcTq44LaAMzB-7--B2FdewRrE8F_vjZeaG9NhA"}, Size: "3921"},
{Path: "requests/__pycache__/__version__.cpython-38.pyc"}, {Path: "requests/__pycache__/__version__.cpython-38.pyc"},
{Path: "requests/__pycache__/utils.cpython-38.pyc"}, {Path: "requests/__pycache__/utils.cpython-38.pyc"},
{Path: "requests/__version__.py", Digest: &pkg.Digest{"sha256", "Bm-GFstQaFezsFlnmEMrJDe8JNROz9n2XXYtODdvjjc"}, Size: "436"}, {Path: "requests/__version__.py", Digest: &pkg.PythonFileDigest{"sha256", "Bm-GFstQaFezsFlnmEMrJDe8JNROz9n2XXYtODdvjjc"}, Size: "436"},
{Path: "requests/utils.py", Digest: &pkg.Digest{"sha256", "LtPJ1db6mJff2TJSJWKi7rBpzjPS3mSOrjC9zRhoD3A"}, Size: "30049"}, {Path: "requests/utils.py", Digest: &pkg.PythonFileDigest{"sha256", "LtPJ1db6mJff2TJSJWKi7rBpzjPS3mSOrjC9zRhoD3A"}, Size: "30049"},
}, },
TopLevelPackages: []string{"requests"}, TopLevelPackages: []string{"requests"},
}, },
@ -174,11 +174,11 @@ func TestPythonPackageWheelCataloger(t *testing.T) {
AuthorEmail: "georg@python.org", AuthorEmail: "georg@python.org",
SitePackagesRootPath: "test-fixtures", SitePackagesRootPath: "test-fixtures",
Files: []pkg.PythonFileRecord{ Files: []pkg.PythonFileRecord{
{Path: "../../../bin/pygmentize", Digest: &pkg.Digest{"sha256", "dDhv_U2jiCpmFQwIRHpFRLAHUO4R1jIJPEvT_QYTFp8"}, Size: "220"}, {Path: "../../../bin/pygmentize", Digest: &pkg.PythonFileDigest{"sha256", "dDhv_U2jiCpmFQwIRHpFRLAHUO4R1jIJPEvT_QYTFp8"}, Size: "220"},
{Path: "Pygments-2.6.1.dist-info/AUTHORS", Digest: &pkg.Digest{"sha256", "PVpa2_Oku6BGuiUvutvuPnWGpzxqFy2I8-NIrqCvqUY"}, Size: "8449"}, {Path: "Pygments-2.6.1.dist-info/AUTHORS", Digest: &pkg.PythonFileDigest{"sha256", "PVpa2_Oku6BGuiUvutvuPnWGpzxqFy2I8-NIrqCvqUY"}, Size: "8449"},
{Path: "Pygments-2.6.1.dist-info/RECORD"}, {Path: "Pygments-2.6.1.dist-info/RECORD"},
{Path: "pygments/__pycache__/__init__.cpython-38.pyc"}, {Path: "pygments/__pycache__/__init__.cpython-38.pyc"},
{Path: "pygments/util.py", Digest: &pkg.Digest{"sha256", "586xXHiJGGZxqk5PMBu3vBhE68DLuAe5MBARWrSPGxA"}, Size: "10778"}, {Path: "pygments/util.py", Digest: &pkg.PythonFileDigest{"sha256", "586xXHiJGGZxqk5PMBu3vBhE68DLuAe5MBARWrSPGxA"}, Size: "10778"},
}, },
TopLevelPackages: []string{"pygments", "something_else"}, TopLevelPackages: []string{"pygments", "something_else"},
}, },

View File

@ -44,7 +44,7 @@ func parseWheelOrEggRecord(reader io.Reader) ([]pkg.PythonFileRecord, error) {
return nil, fmt.Errorf("unexpected python record digest: %q", item) return nil, fmt.Errorf("unexpected python record digest: %q", item)
} }
record.Digest = &pkg.Digest{ record.Digest = &pkg.PythonFileDigest{
Algorithm: fields[0], Algorithm: fields[0],
Value: fields[1], Value: fields[1],
} }

View File

@ -16,22 +16,22 @@ func TestParseWheelEggRecord(t *testing.T) {
{ {
Fixture: "test-fixtures/egg-info/RECORD", Fixture: "test-fixtures/egg-info/RECORD",
ExpectedMetadata: []pkg.PythonFileRecord{ ExpectedMetadata: []pkg.PythonFileRecord{
{Path: "requests-2.22.0.dist-info/INSTALLER", Digest: &pkg.Digest{"sha256", "zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg"}, Size: "4"}, {Path: "requests-2.22.0.dist-info/INSTALLER", Digest: &pkg.PythonFileDigest{"sha256", "zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg"}, Size: "4"},
{Path: "requests/__init__.py", Digest: &pkg.Digest{"sha256", "PnKCgjcTq44LaAMzB-7--B2FdewRrE8F_vjZeaG9NhA"}, Size: "3921"}, {Path: "requests/__init__.py", Digest: &pkg.PythonFileDigest{"sha256", "PnKCgjcTq44LaAMzB-7--B2FdewRrE8F_vjZeaG9NhA"}, Size: "3921"},
{Path: "requests/__pycache__/__version__.cpython-38.pyc"}, {Path: "requests/__pycache__/__version__.cpython-38.pyc"},
{Path: "requests/__pycache__/utils.cpython-38.pyc"}, {Path: "requests/__pycache__/utils.cpython-38.pyc"},
{Path: "requests/__version__.py", Digest: &pkg.Digest{"sha256", "Bm-GFstQaFezsFlnmEMrJDe8JNROz9n2XXYtODdvjjc"}, Size: "436"}, {Path: "requests/__version__.py", Digest: &pkg.PythonFileDigest{"sha256", "Bm-GFstQaFezsFlnmEMrJDe8JNROz9n2XXYtODdvjjc"}, Size: "436"},
{Path: "requests/utils.py", Digest: &pkg.Digest{"sha256", "LtPJ1db6mJff2TJSJWKi7rBpzjPS3mSOrjC9zRhoD3A"}, Size: "30049"}, {Path: "requests/utils.py", Digest: &pkg.PythonFileDigest{"sha256", "LtPJ1db6mJff2TJSJWKi7rBpzjPS3mSOrjC9zRhoD3A"}, Size: "30049"},
}, },
}, },
{ {
Fixture: "test-fixtures/dist-info/RECORD", Fixture: "test-fixtures/dist-info/RECORD",
ExpectedMetadata: []pkg.PythonFileRecord{ ExpectedMetadata: []pkg.PythonFileRecord{
{Path: "../../../bin/pygmentize", Digest: &pkg.Digest{"sha256", "dDhv_U2jiCpmFQwIRHpFRLAHUO4R1jIJPEvT_QYTFp8"}, Size: "220"}, {Path: "../../../bin/pygmentize", Digest: &pkg.PythonFileDigest{"sha256", "dDhv_U2jiCpmFQwIRHpFRLAHUO4R1jIJPEvT_QYTFp8"}, Size: "220"},
{Path: "Pygments-2.6.1.dist-info/AUTHORS", Digest: &pkg.Digest{"sha256", "PVpa2_Oku6BGuiUvutvuPnWGpzxqFy2I8-NIrqCvqUY"}, Size: "8449"}, {Path: "Pygments-2.6.1.dist-info/AUTHORS", Digest: &pkg.PythonFileDigest{"sha256", "PVpa2_Oku6BGuiUvutvuPnWGpzxqFy2I8-NIrqCvqUY"}, Size: "8449"},
{Path: "Pygments-2.6.1.dist-info/RECORD"}, {Path: "Pygments-2.6.1.dist-info/RECORD"},
{Path: "pygments/__pycache__/__init__.cpython-38.pyc"}, {Path: "pygments/__pycache__/__init__.cpython-38.pyc"},
{Path: "pygments/util.py", Digest: &pkg.Digest{"sha256", "586xXHiJGGZxqk5PMBu3vBhE68DLuAe5MBARWrSPGxA"}, Size: "10778"}, {Path: "pygments/util.py", Digest: &pkg.PythonFileDigest{"sha256", "586xXHiJGGZxqk5PMBu3vBhE68DLuAe5MBARWrSPGxA"}, Size: "10778"},
}, },
}, },
} }

View File

@ -51,9 +51,8 @@ func parseRpmDB(resolver source.FileResolver, dbLocation source.Location, reader
} }
p := pkg.Package{ p := pkg.Package{
Name: entry.Name, Name: entry.Name,
Version: fmt.Sprintf("%s-%s", entry.Version, entry.Release), // this is what engine does Version: fmt.Sprintf("%s-%s", entry.Version, entry.Release), // this is what engine does, instead of fmt.Sprintf("%d:%s-%s.%s", entry.Epoch, entry.Version, entry.Release, entry.Arch)
//Version: fmt.Sprintf("%d:%s-%s.%s", entry.Epoch, entry.Version, entry.Release, entry.Arch),
Locations: []source.Location{dbLocation}, Locations: []source.Location{dbLocation},
FoundBy: catalogerName, FoundBy: catalogerName,
Type: pkg.RpmPkg, Type: pkg.RpmPkg,

View File

@ -6,6 +6,7 @@ import (
hashiVer "github.com/hashicorp/go-version" hashiVer "github.com/hashicorp/go-version"
) )
// Distro represents a Linux Distribution.
type Distro struct { type Distro struct {
Type Type Type Type
Version *hashiVer.Version Version *hashiVer.Version
@ -20,6 +21,7 @@ func NewUnknownDistro() Distro {
} }
} }
// NewDistro creates a new Distro object populated with the given values.
func NewDistro(t Type, ver, like string) (Distro, error) { func NewDistro(t Type, ver, like string) (Distro, error) {
if ver == "" { if ver == "" {
return Distro{Type: t}, nil return Distro{Type: t}, nil
@ -36,6 +38,12 @@ func NewDistro(t Type, ver, like string) (Distro, error) {
}, nil }, nil
} }
// Name provides a string repr of the distro
func (d Distro) Name() string {
return string(d.Type)
}
// MajorVersion returns the major version value from the pseudo-semantically versioned distro version value.
func (d Distro) MajorVersion() string { func (d Distro) MajorVersion() string {
if d.Version == nil { if d.Version == nil {
return fmt.Sprint("(version unknown)") return fmt.Sprint("(version unknown)")
@ -43,10 +51,12 @@ func (d Distro) MajorVersion() string {
return fmt.Sprintf("%d", d.Version.Segments()[0]) return fmt.Sprintf("%d", d.Version.Segments()[0])
} }
// FullVersion returns the original user version value.
func (d Distro) FullVersion() string { func (d Distro) FullVersion() string {
return d.RawVersion return d.RawVersion
} }
// String returns a human-friendly representation of the Linux distribution.
func (d Distro) String() string { func (d Distro) String() string {
versionStr := "(version unknown)" versionStr := "(version unknown)"
if d.RawVersion != "" { if d.RawVersion != "" {
@ -54,8 +64,3 @@ func (d Distro) String() string {
} }
return fmt.Sprintf("%s %s", d.Type, versionStr) return fmt.Sprintf("%s %s", d.Type, versionStr)
} }
// Name provides a string repr of the distro
func (d Distro) Name() string {
return string(d.Type)
}

View File

@ -1,8 +1,10 @@
package distro package distro
// Type represents the different Linux distribution options
type Type string type Type string
const ( const (
// represents the set of valid/supported Linux Distributions
UnknownDistroType Type = "UnknownDistroType" UnknownDistroType Type = "UnknownDistroType"
Debian Type = "debian" Debian Type = "debian"
Ubuntu Type = "ubuntu" Ubuntu Type = "ubuntu"
@ -17,6 +19,7 @@ const (
OpenSuseLeap Type = "opensuseleap" OpenSuseLeap Type = "opensuseleap"
) )
// All contains all Linux distribution options
var All = []Type{ var All = []Type{
Debian, Debian,
Ubuntu, Ubuntu,
@ -46,6 +49,7 @@ var IDMapping = map[string]Type{
"opensuse-leap": OpenSuseLeap, "opensuse-leap": OpenSuseLeap,
} }
// String returns the string representation of the given Linux distribution.
func (t Type) String() string { func (t Type) String() string {
return string(t) return string(t)
} }

View File

@ -7,7 +7,12 @@ package event
import "github.com/wagoodman/go-partybus" import "github.com/wagoodman/go-partybus"
const ( const (
// AppUpdateAvailable is a partybus event that occurs when an application update is available
AppUpdateAvailable partybus.EventType = "syft-app-update-available" AppUpdateAvailable partybus.EventType = "syft-app-update-available"
CatalogerStarted partybus.EventType = "syft-cataloger-started-event"
CatalogerFinished partybus.EventType = "syft-cataloger-finished-event" // CatalogerStarted is a partybus event that occurs when the package cataloging has begun
CatalogerStarted partybus.EventType = "syft-cataloger-started-event"
// CatalogerFinished is a partybus event that occurs when the package cataloging has completed
CatalogerFinished partybus.EventType = "syft-cataloger-finished-event"
) )

View File

@ -1,8 +1,9 @@
/* /*
Defines the logging interface which is used throughout the syft library. Package logger defines the logging interface which is used throughout the syft library.
*/ */
package logger package logger
// Logger represents the behavior for logging within the syft library.
type Logger interface { type Logger interface {
Errorf(format string, args ...interface{}) Errorf(format string, args ...interface{})
Error(args ...interface{}) Error(args ...interface{})

View File

@ -35,6 +35,7 @@ type ApkFileRecord struct {
Checksum string `json:"checksum,omitempty"` Checksum string `json:"checksum,omitempty"`
} }
// PackageURL returns the PURL for the specific Alpine package (see https://github.com/package-url/purl-spec)
func (m ApkMetadata) PackageURL() string { func (m ApkMetadata) PackageURL() string {
pURL := packageurl.NewPackageURL( pURL := packageurl.NewPackageURL(
// note: this is currently a candidate and not technically within spec // note: this is currently a candidate and not technically within spec

View File

@ -17,11 +17,13 @@ type DpkgMetadata struct {
Files []DpkgFileRecord `json:"files"` Files []DpkgFileRecord `json:"files"`
} }
// DpkgFileRecord represents a single file attributed to a debian package.
type DpkgFileRecord struct { type DpkgFileRecord struct {
Path string `json:"path"` Path string `json:"path"`
MD5 string `json:"md5"` MD5 string `json:"md5"`
} }
// PackageURL returns the PURL for the specific Debian package (see https://github.com/package-url/purl-spec)
func (m DpkgMetadata) PackageURL(d distro.Distro) string { func (m DpkgMetadata) PackageURL(d distro.Distro) string {
pURL := packageurl.NewPackageURL( pURL := packageurl.NewPackageURL(
// TODO: replace with `packageurl.TypeDebian` upon merge of https://github.com/package-url/packageurl-go/pull/21 // TODO: replace with `packageurl.TypeDebian` upon merge of https://github.com/package-url/packageurl-go/pull/21

View File

@ -1,5 +1,6 @@
package pkg package pkg
// GemMetadata represents all metadata parsed from the gemspec file
type GemMetadata struct { type GemMetadata struct {
Name string `mapstructure:"name" json:"name"` Name string `mapstructure:"name" json:"name"`
Version string `mapstructure:"version" json:"version"` Version string `mapstructure:"version" json:"version"`

View File

@ -26,6 +26,7 @@ type JavaManifest struct {
NamedSections map[string]map[string]string `json:"namedSections,omitempty"` NamedSections map[string]map[string]string `json:"namedSections,omitempty"`
} }
// PackageURL returns the PURL for the specific Alpine package (see https://github.com/package-url/purl-spec)
func (m JavaMetadata) PackageURL() string { func (m JavaMetadata) PackageURL() string {
if m.PomProperties != nil { if m.PomProperties != nil {
pURL := packageurl.NewPackageURL( pURL := packageurl.NewPackageURL(

View File

@ -1,8 +1,10 @@
package pkg package pkg
// Language represents a single programming language.
type Language string type Language string
const ( const (
// the full set of supported programming languages
UnknownLanguage Language = "UnknownLanguage" UnknownLanguage Language = "UnknownLanguage"
Java Language = "java" Java Language = "java"
JavaScript Language = "javascript" JavaScript Language = "javascript"
@ -11,6 +13,7 @@ const (
Go Language = "go" Go Language = "go"
) )
// AllLanguages is a set of all programming languages detected by syft.
var AllLanguages = []Language{ var AllLanguages = []Language{
Java, Java,
JavaScript, JavaScript,
@ -19,6 +22,7 @@ var AllLanguages = []Language{
Go, Go,
} }
// String returns the string representation of the language.
func (l Language) String() string { func (l Language) String() string {
return string(l) return string(l)
} }

View File

@ -1,14 +1,16 @@
package pkg package pkg
// MetadataType represents the data shape stored within pkg.Package.Metadata.
type MetadataType string type MetadataType string
const ( const (
// this is the full set of data shapes that can be represented within the pkg.Package.Metadata field
UnknownMetadataType MetadataType = "UnknownMetadata" UnknownMetadataType MetadataType = "UnknownMetadata"
ApkMetadataType MetadataType = "apk-metadata" ApkMetadataType MetadataType = "ApkMetadata"
DpkgMetadataType MetadataType = "dpkg-metadata" DpkgMetadataType MetadataType = "DpkgMetadata"
GemMetadataType MetadataType = "gem-metadata" GemMetadataType MetadataType = "GemMetadata"
JavaMetadataType MetadataType = "java-metadata" JavaMetadataType MetadataType = "JavaMetadata"
NpmPackageJSONMetadataType MetadataType = "npm-package-json-metadata" NpmPackageJSONMetadataType MetadataType = "NpmPackageJsonMetadata"
RpmdbMetadataType MetadataType = "rpmdb-metadata" RpmdbMetadataType MetadataType = "RpmdbMetadata"
PythonPackageMetadataType MetadataType = "python-package-metadata" PythonPackageMetadataType MetadataType = "PythonPackageMetadata"
) )

View File

@ -14,6 +14,7 @@ import (
"github.com/package-url/packageurl-go" "github.com/package-url/packageurl-go"
) )
// ID represents a unique value for each package added to a package catalog.
type ID int64 type ID int64
// Package represents an application or library that has been bundled into a distributable format. // Package represents an application or library that has been bundled into a distributable format.

View File

@ -1,15 +1,16 @@
package pkg package pkg
type Digest struct { // PythonFileDigest represents the file metadata for a single file attributed to a python package.
type PythonFileDigest struct {
Algorithm string `json:"algorithm"` Algorithm string `json:"algorithm"`
Value string `json:"value"` Value string `json:"value"`
} }
// PythonFileRecord represents a single entry within a RECORD file for a python wheel or egg package // PythonFileRecord represents a single entry within a RECORD file for a python wheel or egg package
type PythonFileRecord struct { type PythonFileRecord struct {
Path string `json:"path"` Path string `json:"path"`
Digest *Digest `json:"digest,omitempty"` Digest *PythonFileDigest `json:"digest,omitempty"`
Size string `json:"size,omitempty"` Size string `json:"size,omitempty"`
} }
// PythonPackageMetadata represents all captured data for a python egg or wheel package. // PythonPackageMetadata represents all captured data for a python egg or wheel package.

View File

@ -21,6 +21,7 @@ type RpmdbMetadata struct {
Files []RpmdbFileRecord `json:"files"` Files []RpmdbFileRecord `json:"files"`
} }
// RpmdbFileRecord represents the file metadata for a single file attributed to a RPM package.
type RpmdbFileRecord struct { type RpmdbFileRecord struct {
Path string `json:"path"` Path string `json:"path"`
Mode RpmdbFileMode `json:"mode"` Mode RpmdbFileMode `json:"mode"`
@ -28,8 +29,10 @@ type RpmdbFileRecord struct {
SHA256 string `json:"sha256"` SHA256 string `json:"sha256"`
} }
// RpmdbFileMode is the raw file mode for a single file. This can be interpreted as the linux stat.h mode (see https://pubs.opengroup.org/onlinepubs/007908799/xsh/sysstat.h.html)
type RpmdbFileMode uint16 type RpmdbFileMode uint16
// PackageURL returns the PURL for the specific RHEL package (see https://github.com/package-url/purl-spec)
func (m RpmdbMetadata) PackageURL(d distro.Distro) string { func (m RpmdbMetadata) PackageURL(d distro.Distro) string {
pURL := packageurl.NewPackageURL( pURL := packageurl.NewPackageURL(
packageurl.TypeRPM, packageurl.TypeRPM,

View File

@ -6,6 +6,7 @@ import "github.com/package-url/packageurl-go"
type Type string type Type string
const ( const (
// the full set of supported packages
UnknownPkg Type = "UnknownPackage" UnknownPkg Type = "UnknownPackage"
ApkPkg Type = "apk" ApkPkg Type = "apk"
GemPkg Type = "gem" GemPkg Type = "gem"
@ -18,6 +19,7 @@ const (
GoModulePkg Type = "go-module" GoModulePkg Type = "go-module"
) )
// AllPkgs represents all supported package types
var AllPkgs = []Type{ var AllPkgs = []Type{
ApkPkg, ApkPkg,
GemPkg, GemPkg,
@ -30,6 +32,7 @@ var AllPkgs = []Type{
GoModulePkg, GoModulePkg,
} }
// PackageURLType returns the PURL package type for the current package.
func (t Type) PackageURLType() string { func (t Type) PackageURLType() string {
switch t { switch t {
case ApkPkg: case ApkPkg:

View File

@ -34,7 +34,6 @@ type artifactMetadataUnpacker struct {
} }
func NewArtifact(p *pkg.Package) (Artifact, error) { func NewArtifact(p *pkg.Package) (Artifact, error) {
return Artifact{ return Artifact{
artifactBasicMetadata: artifactBasicMetadata{ artifactBasicMetadata: artifactBasicMetadata{
Name: p.Name, Name: p.Name,
@ -67,6 +66,7 @@ func (a Artifact) ToPackage() pkg.Package {
} }
} }
// nolint:funlen
func (a *Artifact) UnmarshalJSON(b []byte) error { func (a *Artifact) UnmarshalJSON(b []byte) error {
var basic artifactBasicMetadata var basic artifactBasicMetadata
if err := json.Unmarshal(b, &basic); err != nil { if err := json.Unmarshal(b, &basic); err != nil {
@ -128,7 +128,6 @@ func (a *Artifact) UnmarshalJSON(b []byte) error {
// there may be packages with no metadata, which is OK // there may be packages with no metadata, which is OK
default: default:
return fmt.Errorf("unsupported package metadata type: %+v", a.MetadataType) return fmt.Errorf("unsupported package metadata type: %+v", a.MetadataType)
} }
return nil return nil

View File

@ -55,7 +55,6 @@ func (s *Source) UnmarshalJSON(b []byte) error {
s.Target = payload s.Target = payload
default: default:
return fmt.Errorf("unsupported package metadata type: %+v", s.Type) return fmt.Errorf("unsupported package metadata type: %+v", s.Type)
} }
return nil return nil

View File

@ -14,7 +14,7 @@
"MIT" "MIT"
], ],
"language": "python", "language": "python",
"metadataType": "python-package-metadata", "metadataType": "PythonPackageMetadata",
"metadata": { "metadata": {
"name": "package-1", "name": "package-1",
"version": "1.0.1", "version": "1.0.1",
@ -37,7 +37,7 @@
], ],
"licenses": null, "licenses": null,
"language": "", "language": "",
"metadataType": "dpkg-metadata", "metadataType": "DpkgMetadata",
"metadata": { "metadata": {
"package": "package-2", "package": "package-2",
"source": "", "source": "",

View File

@ -15,7 +15,7 @@
"MIT" "MIT"
], ],
"language": "python", "language": "python",
"metadataType": "python-package-metadata", "metadataType": "PythonPackageMetadata",
"metadata": { "metadata": {
"name": "package-1", "name": "package-1",
"version": "1.0.1", "version": "1.0.1",
@ -39,7 +39,7 @@
], ],
"licenses": null, "licenses": null,
"language": "", "language": "",
"metadataType": "dpkg-metadata", "metadataType": "DpkgMetadata",
"metadata": { "metadata": {
"package": "package-2", "package": "package-2",
"source": "", "source": "",

View File

@ -10,11 +10,13 @@ import (
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
// Presenter is a human-friendly text presenter to represent package and source data.
type Presenter struct { type Presenter struct {
catalog *pkg.Catalog catalog *pkg.Catalog
srcMetadata source.Metadata srcMetadata source.Metadata
} }
// NewPresenter creates a new presenter for the given set of catalog and image data.
func NewPresenter(catalog *pkg.Catalog, srcMetadata source.Metadata) *Presenter { func NewPresenter(catalog *pkg.Catalog, srcMetadata source.Metadata) *Presenter {
return &Presenter{ return &Presenter{
catalog: catalog, catalog: catalog,

View File

@ -102,6 +102,7 @@ func (r *AllLayersResolver) FilesByPath(paths ...string) ([]Location, error) {
} }
// FilesByGlob returns all file.References that match the given path glob pattern from any layer in the image. // FilesByGlob returns all file.References that match the given path glob pattern from any layer in the image.
// nolint:gocognit
func (r *AllLayersResolver) FilesByGlob(patterns ...string) ([]Location, error) { func (r *AllLayersResolver) FilesByGlob(patterns ...string) ([]Location, error) {
uniqueFileIDs := file.NewFileReferenceSet() uniqueFileIDs := file.NewFileReferenceSet()
uniqueLocations := make([]Location, 0) uniqueLocations := make([]Location, 0)
@ -141,6 +142,8 @@ func (r *AllLayersResolver) FilesByGlob(patterns ...string) ([]Location, error)
return uniqueLocations, nil return uniqueLocations, nil
} }
// RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference.
// This is helpful when attempting to find a file that is in the same layer or lower as another file.
func (r *AllLayersResolver) RelativeFileByPath(location Location, path string) *Location { func (r *AllLayersResolver) RelativeFileByPath(location Location, path string) *Location {
entry, err := r.img.FileCatalog.Get(location.ref) entry, err := r.img.FileCatalog.Get(location.ref)
if err != nil { if err != nil {
@ -157,13 +160,13 @@ func (r *AllLayersResolver) RelativeFileByPath(location Location, path string) *
return &relativeLocation return &relativeLocation
} }
// MultipleFileContentsByRef returns the file contents for all file.References relative to the image. Note that a // MultipleFileContentsByLocation returns the file contents for all file.References relative to the image. Note that a
// file.Reference is a path relative to a particular layer. // file.Reference is a path relative to a particular layer.
func (r *AllLayersResolver) MultipleFileContentsByLocation(locations []Location) (map[Location]string, error) { func (r *AllLayersResolver) MultipleFileContentsByLocation(locations []Location) (map[Location]string, error) {
return mapLocationRefs(r.img.MultipleFileContentsByRef, locations) return mapLocationRefs(r.img.MultipleFileContentsByRef, locations)
} }
// FileContentsByRef fetches file contents for a single file reference, irregardless of the source layer. // FileContentsByLocation fetches file contents for a single file reference, irregardless of the source layer.
// If the path does not exist an error is returned. // If the path does not exist an error is returned.
func (r *AllLayersResolver) FileContentsByLocation(location Location) (string, error) { func (r *AllLayersResolver) FileContentsByLocation(location Location) (string, error) {
return r.img.FileContentsByRef(location.ref) return r.img.FileContentsByRef(location.ref)

View File

@ -80,6 +80,9 @@ func (s DirectoryResolver) FilesByGlob(patterns ...string) ([]Location, error) {
return result, nil return result, nil
} }
// RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference.
// This is helpful when attempting to find a file that is in the same layer or lower as another file. For the
// DirectoryResolver, this is a simple path lookup.
func (s *DirectoryResolver) RelativeFileByPath(_ Location, path string) *Location { func (s *DirectoryResolver) RelativeFileByPath(_ Location, path string) *Location {
paths, err := s.FilesByPath(path) paths, err := s.FilesByPath(path)
if err != nil { if err != nil {
@ -92,7 +95,7 @@ func (s *DirectoryResolver) RelativeFileByPath(_ Location, path string) *Locatio
return &paths[0] return &paths[0]
} }
// MultipleFileContentsByRef returns the file contents for all file.References relative a directory. // MultipleFileContentsByLocation returns the file contents for all file.References relative a directory.
func (s DirectoryResolver) MultipleFileContentsByLocation(locations []Location) (map[Location]string, error) { func (s DirectoryResolver) MultipleFileContentsByLocation(locations []Location) (map[Location]string, error) {
refContents := make(map[Location]string) refContents := make(map[Location]string)
for _, location := range locations { for _, location := range locations {
@ -106,7 +109,7 @@ func (s DirectoryResolver) MultipleFileContentsByLocation(locations []Location)
return refContents, nil return refContents, nil
} }
// FileContentsByRef fetches file contents for a single file reference relative to a directory. // FileContentsByLocation fetches file contents for a single file reference relative to a directory.
// If the path does not exist an error is returned. // If the path does not exist an error is returned.
func (s DirectoryResolver) FileContentsByLocation(location Location) (string, error) { func (s DirectoryResolver) FileContentsByLocation(location Location) (string, error) {
contents, err := fileContents(location.Path) contents, err := fileContents(location.Path)

View File

@ -2,6 +2,8 @@ package source
import "github.com/anchore/stereoscope/pkg/image" import "github.com/anchore/stereoscope/pkg/image"
// ImageMetadata represents all static metadata that defines what a container image is. This is useful to later describe
// "what" was cataloged without needing the more complicated stereoscope Image objects or Resolver objects.
type ImageMetadata struct { type ImageMetadata struct {
UserInput string `json:"userInput"` UserInput string `json:"userInput"`
Scope Scope `json:"scope"` // specific perspective to catalog Scope Scope `json:"scope"` // specific perspective to catalog
@ -12,12 +14,14 @@ type ImageMetadata struct {
Tags []string `json:"tags"` Tags []string `json:"tags"`
} }
// LayerMetadata represents all static metadata that defines what a container image layer is.
type LayerMetadata struct { type LayerMetadata struct {
MediaType string `json:"mediaType"` MediaType string `json:"mediaType"`
Digest string `json:"digest"` Digest string `json:"digest"`
Size int64 `json:"size"` Size int64 `json:"size"`
} }
// NewImageMetadata creates a new ImageMetadata object populated from the given stereoscope Image object and user configuration.
func NewImageMetadata(img *image.Image, userInput string, scope Scope) ImageMetadata { func NewImageMetadata(img *image.Image, userInput string, scope Scope) ImageMetadata {
// populate artifacts... // populate artifacts...
tags := make([]string, len(img.Metadata.Tags)) tags := make([]string, len(img.Metadata.Tags))

View File

@ -104,6 +104,9 @@ func (r *ImageSquashResolver) FilesByGlob(patterns ...string) ([]Location, error
return uniqueLocations, nil return uniqueLocations, nil
} }
// RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference.
// This is helpful when attempting to find a file that is in the same layer or lower as another file. For the
// ImageSquashResolver, this is a simple path lookup.
func (r *ImageSquashResolver) RelativeFileByPath(_ Location, path string) *Location { func (r *ImageSquashResolver) RelativeFileByPath(_ Location, path string) *Location {
paths, err := r.FilesByPath(path) paths, err := r.FilesByPath(path)
if err != nil { if err != nil {
@ -116,13 +119,13 @@ func (r *ImageSquashResolver) RelativeFileByPath(_ Location, path string) *Locat
return &paths[0] return &paths[0]
} }
// MultipleFileContentsByRef returns the file contents for all file.References relative to the image. Note that a // MultipleFileContentsByLocation returns the file contents for all file.References relative to the image. Note that a
// file.Reference is a path relative to a particular layer, in this case only from the squashed representation. // file.Reference is a path relative to a particular layer, in this case only from the squashed representation.
func (r *ImageSquashResolver) MultipleFileContentsByLocation(locations []Location) (map[Location]string, error) { func (r *ImageSquashResolver) MultipleFileContentsByLocation(locations []Location) (map[Location]string, error) {
return mapLocationRefs(r.img.MultipleFileContentsByRef, locations) return mapLocationRefs(r.img.MultipleFileContentsByRef, locations)
} }
// FileContentsByRef fetches file contents for a single file reference, irregardless of the source layer. // FileContentsByLocation fetches file contents for a single file reference, irregardless of the source layer.
// If the path does not exist an error is returned. // If the path does not exist an error is returned.
func (r *ImageSquashResolver) FileContentsByLocation(location Location) (string, error) { func (r *ImageSquashResolver) FileContentsByLocation(location Location) (string, error) {
return r.img.FileContentsByRef(location.ref) return r.img.FileContentsByRef(location.ref)

View File

@ -7,18 +7,21 @@ import (
"github.com/anchore/stereoscope/pkg/image" "github.com/anchore/stereoscope/pkg/image"
) )
// Location represents a path relative to a particular filesystem.
type Location struct { type Location struct {
Path string `json:"path"` Path string `json:"path"` // The string path of the location (e.g. /etc/hosts)
FileSystemID string `json:"layerID,omitempty"` // TODO: comment FileSystemID string `json:"layerID,omitempty"` // An ID representing the filesystem. For container images this is a layer digest, directories or root filesystem this is blank.
ref file.Reference ref file.Reference // The file reference relative to the stereoscope.FileCatalog that has more information about this location.
} }
// NewLocation creates a new Location representing a path without denoting a filesystem or FileCatalog reference.
func NewLocation(path string) Location { func NewLocation(path string) Location {
return Location{ return Location{
Path: path, Path: path,
} }
} }
// NewLocationFromImage creates a new Location representing the given path (extracted from the ref) relative to the given image.
func NewLocationFromImage(ref file.Reference, img *image.Image) Location { func NewLocationFromImage(ref file.Reference, img *image.Image) Location {
entry, err := img.FileCatalog.Get(ref) entry, err := img.FileCatalog.Get(ref)
if err != nil { if err != nil {

View File

@ -1,5 +1,6 @@
package source package source
// Metadata represents any static source data that helps describe "what" was cataloged.
type Metadata struct { type Metadata struct {
Scheme Scheme // the source data scheme type (directory or image) Scheme Scheme // the source data scheme type (directory or image)
ImageMetadata ImageMetadata // all image info (image only) ImageMetadata ImageMetadata // all image info (image only)

View File

@ -19,7 +19,7 @@ type ContentResolver interface {
// TODO: we should consider refactoring to return a set of io.Readers or file.Openers instead of the full contents themselves (allow for optional buffering). // TODO: we should consider refactoring to return a set of io.Readers or file.Openers instead of the full contents themselves (allow for optional buffering).
} }
// FileResolver knows how to get file.References for given string paths and globs // FileResolver knows how to get file.References for given string paths and globs
type FileResolver interface { type FileResolver interface {
// FilesByPath fetches a set of file references which have the given path (for an image, there may be multiple matches) // FilesByPath fetches a set of file references which have the given path (for an image, there may be multiple matches)
FilesByPath(paths ...string) ([]Location, error) FilesByPath(paths ...string) ([]Location, error)

View File

@ -9,12 +9,16 @@ import (
"github.com/spf13/afero" "github.com/spf13/afero"
) )
// Scheme represents the optional prefixed string at the beginning of a user request (e.g. "docker:").
type Scheme string type Scheme string
const ( const (
UnknownScheme Scheme = "unknown-scheme" // UnknownScheme is the default scheme
DirectoryScheme Scheme = "directory-scheme" UnknownScheme Scheme = "UnknownScheme"
ImageScheme Scheme = "image-scheme" // DirectoryScheme indicates the source being cataloged is a directory on the root filesystem
DirectoryScheme Scheme = "DirectoryScheme"
// ImageScheme indicates the source being cataloged is a container image
ImageScheme Scheme = "ImageScheme"
) )
func detectScheme(fs afero.Fs, imageDetector sourceDetector, userInput string) (Scheme, string, error) { func detectScheme(fs afero.Fs, imageDetector sourceDetector, userInput string) (Scheme, string, error) {

View File

@ -2,19 +2,25 @@ package source
import "strings" import "strings"
// Scope indicates "how" or from "which perspectives" the source object should be cataloged from.
type Scope string type Scope string
const ( const (
UnknownScope Scope = "UnknownScope" // UnknownScope is the default scope
SquashedScope Scope = "Squashed" UnknownScope Scope = "UnknownScope"
// SquashedScope indicates to only catalog content visible from the squashed filesystem representation (what can be seen only within the container at runtime)
SquashedScope Scope = "Squashed"
// AllLayersScope indicates to catalog content on all layers, irregardless if it is visible from the container at runtime.
AllLayersScope Scope = "AllLayers" AllLayersScope Scope = "AllLayers"
) )
// AllScopes is a slice containing all possible scope options
var AllScopes = []Scope{ var AllScopes = []Scope{
SquashedScope, SquashedScope,
AllLayersScope, AllLayersScope,
} }
// ParseScope returns a scope as indicated from the given string.
func ParseScope(userStr string) Scope { func ParseScope(userStr string) Scope {
switch strings.ToLower(userStr) { switch strings.ToLower(userStr) {
case strings.ToLower(SquashedScope.String()): case strings.ToLower(SquashedScope.String()):