make updates due to linter update

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
Alex Goodman 2021-10-05 22:51:27 -04:00
parent 053768c6c6
commit 821210006d
No known key found for this signature in database
GPG Key ID: 5CB45AE22BAB7EA7
74 changed files with 191 additions and 225 deletions

View File

@ -4,14 +4,13 @@ import (
"fmt"
"os"
"github.com/spf13/cobra"
"github.com/anchore/stereoscope"
"github.com/anchore/syft/internal/config"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/logger"
"github.com/anchore/syft/syft"
"github.com/gookit/color"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/wagoodman/go-partybus"
)
@ -67,6 +66,7 @@ func initCmdAliasBindings() {
}
}
// nolint:forbidigo
func initAppConfig() {
cfg, err := config.LoadApplicationConfig(viper.GetViper(), persistentOpts)
if err != nil {

View File

@ -8,7 +8,6 @@ import (
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/filters"
"github.com/docker/docker/client"
"github.com/spf13/cobra"
)
@ -87,7 +86,7 @@ func dockerImageValidArgsFunction(cmd *cobra.Command, args []string, toComplete
}
func listLocalDockerImages(prefix string) ([]string, error) {
var repoTags = make([]string, 0)
repoTags := make([]string, 0)
ctx := context.Background()
cli, err := client.NewClientWithOpts(client.FromEnv, client.WithAPIVersionNegotiation())
if err != nil {

View File

@ -102,7 +102,7 @@ func init() {
}
func setPackageFlags(flags *pflag.FlagSet) {
///////// Formatting & Input options //////////////////////////////////////////////
// Formatting & Input options //////////////////////////////////////////////
flags.StringP(
"scope", "s", source.SquashedScope.String(),
@ -118,7 +118,7 @@ func setPackageFlags(flags *pflag.FlagSet) {
"file to write the report output to (default is STDOUT)",
)
///////// Upload options //////////////////////////////////////////////////////////
// Upload options //////////////////////////////////////////////////////////
flags.StringP(
"host", "H", "",
"the hostname or URL of the Anchore Enterprise instance to upload to",
@ -151,7 +151,7 @@ func setPackageFlags(flags *pflag.FlagSet) {
}
func bindPackagesConfigOptions(flags *pflag.FlagSet) error {
///////// Formatting & Input options //////////////////////////////////////////////
// Formatting & Input options //////////////////////////////////////////////
if err := viper.BindPFlag("package.cataloger.scope", flags.Lookup("scope")); err != nil {
return err
@ -165,7 +165,7 @@ func bindPackagesConfigOptions(flags *pflag.FlagSet) error {
return err
}
///////// Upload options //////////////////////////////////////////////////////////
// Upload options //////////////////////////////////////////////////////////
if err := viper.BindPFlag("anchore.host", flags.Lookup("host")); err != nil {
return err
@ -303,7 +303,7 @@ func runPackageSbomUpload(src *source.Source, s source.Metadata, catalog *pkg.Ca
}
if err := c.Import(context.Background(), importCfg); err != nil {
return fmt.Errorf("failed to upload results to host=%s: %+v", appConfig.Anchore.Host, err)
return fmt.Errorf("failed to upload results to host=%s: %w", appConfig.Anchore.Host, err)
}
return nil
}

View File

@ -17,7 +17,7 @@ func reportWriter() (io.Writer, func() error, error) {
case 0:
return os.Stdout, nop, nil
default:
reportFile, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644)
reportFile, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o644)
if err != nil {
return nil, nop, fmt.Errorf("unable to create report file: %w", err)
}

View File

@ -24,6 +24,7 @@ var rootCmd = &cobra.Command{
ValidArgsFunction: packagesCmd.ValidArgsFunction,
}
// nolint:forbidigo
func init() {
// set universal flags
rootCmd.PersistentFlags().StringVarP(&persistentOpts.ConfigPath, "config", "c", "", "application config file")

View File

@ -6,7 +6,6 @@ import (
"os"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/version"
"github.com/spf13/cobra"
)
@ -24,6 +23,7 @@ func init() {
rootCmd.AddCommand(versionCmd)
}
// nolint:forbidigo
func printVersion(_ *cobra.Command, _ []string) {
versionInfo := version.FromBuild()

View File

@ -6,8 +6,6 @@ import (
"fmt"
"time"
"github.com/antihax/optional"
"github.com/anchore/client-go/pkg/external"
"github.com/anchore/stereoscope/pkg/image"
"github.com/anchore/syft/internal/bus"
@ -15,6 +13,7 @@ import (
"github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
"github.com/antihax/optional"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
)
@ -64,7 +63,7 @@ func (c *Client) Import(ctx context.Context, cfg ImportConfig) error {
stage.Current = "starting session"
startOperation, _, err := c.client.ImportsApi.CreateOperation(authedCtx)
if err != nil {
var detail = "no details given"
detail := "no details given"
var openAPIErr external.GenericOpenAPIError
if errors.As(err, &openAPIErr) {
detail = string(openAPIErr.Body())
@ -106,7 +105,7 @@ func (c *Client) Import(ctx context.Context, cfg ImportConfig) error {
_, _, err = c.client.ImagesApi.AddImage(authedCtx, imageModel, &opts)
if err != nil {
var detail = "no details given"
detail := "no details given"
var openAPIErr external.GenericOpenAPIError
if errors.As(err, &openAPIErr) {
detail = string(openAPIErr.Body())
@ -122,7 +121,7 @@ func (c *Client) Import(ctx context.Context, cfg ImportConfig) error {
}
func addImageModel(imageMetadata image.Metadata, packageDigest, manifestDigest, dockerfileDigest, configDigest, sessionID string) external.ImageAnalysisRequest {
var tags = make([]string, len(imageMetadata.Tags))
tags := make([]string, len(imageMetadata.Tags))
for i, t := range imageMetadata.Tags {
tags[i] = t.String()
}

View File

@ -8,10 +8,9 @@ import (
"fmt"
"net/http"
"github.com/wagoodman/go-progress"
"github.com/anchore/client-go/pkg/external"
"github.com/anchore/syft/internal/log"
"github.com/wagoodman/go-progress"
)
type configImportAPI interface {

View File

@ -6,11 +6,9 @@ import (
"fmt"
"net/http"
"github.com/wagoodman/go-progress"
"github.com/anchore/syft/internal/log"
"github.com/anchore/client-go/pkg/external"
"github.com/anchore/syft/internal/log"
"github.com/wagoodman/go-progress"
)
type dockerfileImportAPI interface {

View File

@ -8,10 +8,9 @@ import (
"fmt"
"net/http"
"github.com/wagoodman/go-progress"
"github.com/anchore/client-go/pkg/external"
"github.com/anchore/syft/internal/log"
"github.com/wagoodman/go-progress"
)
type manifestImportAPI interface {

View File

@ -8,16 +8,13 @@ import (
"fmt"
"net/http"
"github.com/anchore/syft/internal/presenter/packages"
"github.com/wagoodman/go-progress"
"github.com/anchore/syft/syft/distro"
"github.com/anchore/syft/syft/source"
"github.com/anchore/client-go/pkg/external"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/presenter/packages"
"github.com/anchore/syft/syft/distro"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
"github.com/wagoodman/go-progress"
)
type packageSBOMImportAPI interface {

View File

@ -15,8 +15,10 @@ package bus
import "github.com/wagoodman/go-partybus"
var publisher partybus.Publisher
var active bool
var (
publisher partybus.Publisher
active bool
)
// SetPublisher sets the singleton event bus publisher. This is optional; if no bus is provided, the library will
// behave no differently than if a bus had been provided.

View File

@ -144,7 +144,6 @@ func (cfg *Application) parseConfigValues() error {
func (cfg Application) String() string {
// yaml is pretty human friendly (at least when compared to json)
appCfgStr, err := yaml.Marshal(&cfg)
if err != nil {
return err.Error()
}

View File

@ -4,7 +4,6 @@ import (
"os"
"github.com/anchore/stereoscope/pkg/image"
"github.com/spf13/viper"
)
@ -58,7 +57,7 @@ func hasNonEmptyCredentials(username, password, token string) bool {
}
func (cfg *registry) ToOptions() *image.RegistryOptions {
var auth = make([]image.RegistryCredentials, len(cfg.Auth))
auth := make([]image.RegistryCredentials, len(cfg.Auth))
for i, a := range cfg.Auth {
auth[i] = image.RegistryCredentials{
Authority: a.Authority,

View File

@ -1,6 +1,7 @@
package internal
import (
"errors"
"fmt"
"io"
"os"
@ -15,24 +16,23 @@ func CloseAndLogError(closer io.Closer, location string) {
}
}
type ErrPath struct {
type PathError struct {
Path string
Err error
}
func (e ErrPath) Error() string {
func (e PathError) Error() string {
return fmt.Sprintf("unable to observe contents of %+v: %v", e.Path, e.Err)
}
func IsErrPath(err error) bool {
_, ok := err.(ErrPath)
return ok
func IsPathError(err error) bool {
return errors.As(err, &PathError{})
}
func IsErrPathPermission(err error) bool {
pathErr, ok := err.(ErrPath)
if ok {
var pathErr *PathError
if errors.As(err, pathErr) {
return os.IsPermission(pathErr.Err)
}
return ok
return false
}

View File

@ -24,12 +24,12 @@ const (
const perFileReadLimit = 2 * GB
type errZipSlipDetected struct {
type zipSlipDetectedError struct {
Prefix string
JoinArgs []string
}
func (e *errZipSlipDetected) Error() string {
func (e *zipSlipDetectedError) Error() string {
return fmt.Sprintf("paths are not allowed to resolve outside of the root prefix (%q). Destination: %q", e.Prefix, e.JoinArgs)
}
@ -197,7 +197,7 @@ func safeJoin(prefix string, dest ...string) (string, error) {
joinResult := filepath.Join(append([]string{prefix}, dest...)...)
cleanJoinResult := filepath.Clean(joinResult)
if !strings.HasPrefix(cleanJoinResult, filepath.Clean(prefix)) {
return "", &errZipSlipDetected{
return "", &zipSlipDetectedError{
Prefix: prefix,
JoinArgs: dest,
}

View File

@ -299,7 +299,7 @@ func TestSafeJoin(t *testing.T) {
"../../../etc/passwd",
},
expected: "",
errAssertion: assertErrorAs(&errZipSlipDetected{}),
errAssertion: assertErrorAs(&zipSlipDetectedError{}),
},
{
prefix: "/a/place",
@ -308,7 +308,7 @@ func TestSafeJoin(t *testing.T) {
"../",
},
expected: "",
errAssertion: assertErrorAs(&errZipSlipDetected{}),
errAssertion: assertErrorAs(&zipSlipDetectedError{}),
},
{
prefix: "/a/place",
@ -316,7 +316,7 @@ func TestSafeJoin(t *testing.T) {
"../",
},
expected: "",
errAssertion: assertErrorAs(&errZipSlipDetected{}),
errAssertion: assertErrorAs(&zipSlipDetectedError{}),
},
}

View File

@ -3,6 +3,7 @@ package file
import (
"archive/zip"
"encoding/binary"
"errors"
"fmt"
"io"
"os"
@ -91,7 +92,7 @@ func findArchiveStartOffset(r io.ReaderAt, size int64) (startOfArchive uint64, e
bLen = size
}
buf = make([]byte, int(bLen))
if _, err := r.ReadAt(buf, size-bLen); err != nil && err != io.EOF {
if _, err := r.ReadAt(buf, size-bLen); err != nil && !errors.Is(err, io.EOF) {
return 0, err
}
if p := findSignatureInBlock(buf); p >= 0 {

View File

@ -11,7 +11,7 @@ import (
prefixed "github.com/x-cray/logrus-prefixed-formatter"
)
const defaultLogFilePermissions fs.FileMode = 0644
const defaultLogFilePermissions fs.FileMode = 0o644
// LogrusConfig contains all configurable values for the Logrus logger
type LogrusConfig struct {

View File

@ -38,18 +38,18 @@ func NewJSONPackages(catalog *pkg.Catalog) ([]JSONPackage, error) {
// NewJSONPackage crates a new JSONPackage from the given pkg.Package.
func NewJSONPackage(p *pkg.Package) (JSONPackage, error) {
var cpes = make([]string, len(p.CPEs))
cpes := make([]string, len(p.CPEs))
for i, c := range p.CPEs {
cpes[i] = c.BindToFmtString()
}
// ensure collections are never nil for presentation reasons
var locations = make([]source.Location, 0)
locations := make([]source.Location, 0)
if p.Locations != nil {
locations = p.Locations
}
var licenses = make([]string, 0)
licenses := make([]string, 0)
if p.Licenses != nil {
licenses = p.Licenses
}

View File

@ -10,7 +10,7 @@ type CreationInfo struct {
Created time.Time `json:"created"`
// Identify who (or what, in the case of a tool) created the SPDX file. If the SPDX file was created by an
// individual, indicate the person's name. If the SPDX file was created on behalf of a company or organization,
//indicate the entity name. If the SPDX file was created using a software tool, indicate the name and version
// indicate the entity name. If the SPDX file was created using a software tool, indicate the name and version
// for that tool. If multiple participants or tools were involved, use multiple instances of this field. Person
// name or organization name may be designated as “anonymous” if appropriate.
Creators []string `json:"creators"`

View File

@ -135,7 +135,7 @@ func getSPDXHomepage(p *pkg.Package) string {
}
func getSPDXSourceInfo(p *pkg.Package) string {
answer := ""
var answer string
switch p.Type {
case pkg.RpmPkg:
answer = "acquired package info from RPM DB"

View File

@ -132,10 +132,10 @@ func newSPDXJsonElements(catalog *pkg.Catalog) ([]spdx22.Package, []spdx22.File,
func cleanSPDXName(name string) string {
// remove # according to specification
name = strings.Replace(name, "#", "-", -1)
name = strings.ReplaceAll(name, "#", "-")
// remove : for url construction
name = strings.Replace(name, ":", "-", -1)
name = strings.ReplaceAll(name, ":", "-")
// clean relative pathing
return path.Clean(name)

View File

@ -5,9 +5,8 @@ import (
"io"
"time"
"github.com/anchore/syft/internal/spdxlicense"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/spdxlicense"
"github.com/anchore/syft/internal/version"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"

View File

@ -6,9 +6,8 @@ import (
"sort"
"strings"
"github.com/olekukonko/tablewriter"
"github.com/anchore/syft/syft/pkg"
"github.com/olekukonko/tablewriter"
)
type TablePresenter struct {

View File

@ -2,7 +2,6 @@ package packages
import (
"fmt"
"io"
"text/tabwriter"
@ -32,7 +31,7 @@ func (pres *TextPresenter) Present(output io.Writer) error {
switch pres.srcMetadata.Scheme {
case source.DirectoryScheme:
fmt.Fprintln(w, fmt.Sprintf("[Path: %s]", pres.srcMetadata.Path))
fmt.Fprintf(w, "[Path: %s]\n", pres.srcMetadata.Path)
case source.ImageScheme:
fmt.Fprintln(w, "[Image]")
@ -51,7 +50,7 @@ func (pres *TextPresenter) Present(output io.Writer) error {
// populate artifacts...
rows := 0
for _, p := range pres.catalog.Sorted() {
fmt.Fprintln(w, fmt.Sprintf("[%s]", p.Name))
fmt.Fprintf(w, "[%s]\n", p.Name)
fmt.Fprintln(w, " Version:\t", p.Version)
fmt.Fprintln(w, " Type:\t", string(p.Type))
fmt.Fprintln(w, " Found by:\t", p.FoundBy)

View File

@ -6,7 +6,6 @@ import (
"strconv"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/source"
)

View File

@ -2,6 +2,7 @@ package main
import (
"encoding/json"
"fmt"
"log"
"net/http"
"os"
@ -49,14 +50,20 @@ type LicenseList struct {
}
func main() {
if err := run(); err != nil {
os.Exit(1)
}
}
func run() error {
resp, err := http.Get(url)
if err != nil {
log.Fatalf("unable to get licenses list: %+v", err)
return fmt.Errorf("unable to get licenses list: %w", err)
}
var result LicenseList
if err = json.NewDecoder(resp.Body).Decode(&result); err != nil {
log.Fatalf("unable to decode license list: %+v", err)
return fmt.Errorf("unable to decode license list: %w", err)
}
defer func() {
if err := resp.Body.Close(); err != nil {
@ -66,7 +73,7 @@ func main() {
f, err := os.Create(source)
if err != nil {
log.Fatalf("unable to create %q: %+v", source, err)
return fmt.Errorf("unable to create %q: %w", source, err)
}
defer func() {
if err := f.Close(); err != nil {
@ -89,8 +96,9 @@ func main() {
})
if err != nil {
log.Fatalf("unable to generate template: %+v", err)
return fmt.Errorf("unable to generate template: %w", err)
}
return nil
}
// Parsing the provided SPDX license list necessitates a two pass approach.
@ -104,7 +112,7 @@ func main() {
// We also sort the licenses for the second pass so that cases like `GPL-1` associate to `GPL-1.0` and not `GPL-1.1`.
func processSPDXLicense(result LicenseList) map[string]string {
// first pass build map
var licenseIDs = make(map[string]string)
licenseIDs := make(map[string]string)
for _, l := range result.Licenses {
cleanID := strings.ToLower(l.ID)
if _, exists := licenseIDs[cleanID]; exists {

View File

@ -7,11 +7,10 @@ import (
"sync"
"github.com/anchore/syft/internal"
"github.com/gookit/color"
"github.com/wagoodman/jotframe/pkg/frame"
syftEventParsers "github.com/anchore/syft/syft/event/parsers"
"github.com/gookit/color"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/jotframe/pkg/frame"
)
// handleCatalogerPresenterReady is a UI function for processing the CatalogerFinished bus event, displaying the catalog

View File

@ -12,11 +12,13 @@ import (
const valueNotProvided = "[not provided]"
// all variables here are provided as build-time arguments, with clear default values
var version = valueNotProvided
var gitCommit = valueNotProvided
var gitTreeState = valueNotProvided
var buildDate = valueNotProvided
var platform = fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH)
var (
version = valueNotProvided
gitCommit = valueNotProvided
gitTreeState = valueNotProvided
buildDate = valueNotProvided
platform = fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH)
)
// Version defines the application version details (generally from build information)
type Version struct {

View File

@ -1,3 +1,4 @@
// nolint:forbidigo
package main
import (
@ -45,11 +46,7 @@ func build() *jsonschema.Schema {
reflector := &jsonschema.Reflector{
AllowAdditionalProperties: true,
TypeNamer: func(r reflect.Type) string {
name := r.Name()
if strings.HasPrefix(name, "JSON") {
name = strings.TrimPrefix(name, "JSON")
}
return name
return strings.TrimPrefix(r.Name(), "JSON")
},
}
documentSchema := reflector.ReflectFromType(reflect.TypeOf(&poweruser.JSONDocument{}))
@ -74,7 +71,7 @@ func build() *jsonschema.Schema {
// ensure the generated list of names is stable between runs
sort.Strings(metadataNames)
var metadataTypes = []map[string]string{
metadataTypes := []map[string]string{
// allow for no metadata to be provided
{"type": "null"},
}
@ -93,13 +90,13 @@ func build() *jsonschema.Schema {
}
func encode(schema *jsonschema.Schema) []byte {
var newSchemaBuffer = new(bytes.Buffer)
newSchemaBuffer := new(bytes.Buffer)
enc := json.NewEncoder(newSchemaBuffer)
// prevent > and < from being escaped in the payload
enc.SetEscapeHTML(false)
enc.SetIndent("", " ")
err := enc.Encode(&schema)
if err != nil {
if err := enc.Encode(&schema); err != nil {
panic(err)
}

View File

@ -6,7 +6,6 @@ import (
"strings"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/source"
)

View File

@ -6,29 +6,26 @@ package parsers
import (
"fmt"
"github.com/anchore/syft/syft/presenter"
"github.com/anchore/syft/syft/file"
"github.com/wagoodman/go-progress"
"github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg/cataloger"
"github.com/anchore/syft/syft/presenter"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
)
type ErrBadPayload struct {
type badPayloadError struct {
Type partybus.EventType
Field string
Value interface{}
}
func (e *ErrBadPayload) Error() string {
func (e *badPayloadError) Error() string {
return fmt.Sprintf("event='%s' has bad event payload field='%v': '%+v'", string(e.Type), e.Field, e.Value)
}
func newPayloadErr(t partybus.EventType, field string, value interface{}) error {
return &ErrBadPayload{
return &badPayloadError{
Type: t,
Field: field,
Value: value,

View File

@ -6,7 +6,6 @@ import (
"io"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/source"
)
@ -65,7 +64,7 @@ func (i *ContentsCataloger) catalogLocation(resolver source.FileResolver, locati
buf := &bytes.Buffer{}
if _, err = io.Copy(base64.NewEncoder(base64.StdEncoding, buf), contentReader); err != nil {
return "", internal.ErrPath{Path: location.RealPath, Err: err}
return "", internal.PathError{Path: location.RealPath, Err: err}
}
return buf.String(), nil

View File

@ -8,15 +8,12 @@ import (
"strings"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/event"
"github.com/anchore/syft/syft/source"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/go-progress"
"github.com/anchore/syft/syft/source"
)
type DigestsCataloger struct {
@ -72,7 +69,7 @@ func (i *DigestsCataloger) catalogLocation(resolver source.FileResolver, locatio
size, err := io.Copy(io.MultiWriter(writers...), contentReader)
if err != nil {
return nil, internal.ErrPath{Path: location.RealPath, Err: err}
return nil, internal.PathError{Path: location.RealPath, Err: err}
}
if size == 0 {
@ -99,7 +96,7 @@ func DigestAlgorithmName(hash crypto.Hash) string {
func CleanDigestAlgorithmName(name string) string {
lower := strings.ToLower(name)
return strings.Replace(lower, "-", "", -1)
return strings.ReplaceAll(lower, "-", "")
}
func digestsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manual) {

View File

@ -11,7 +11,7 @@ import (
// GenerateSearchPatterns takes a set of named base patterns, a set of additional named patterns and an name exclusion list and generates a final
// set of regular expressions (indexed by name). The sets are aggregated roughly as such: (base - excluded) + additional.
func GenerateSearchPatterns(basePatterns map[string]string, additionalPatterns map[string]string, excludePatternNames []string) (map[string]*regexp.Regexp, error) {
var regexObjs = make(map[string]*regexp.Regexp)
regexObjs := make(map[string]*regexp.Regexp)
var errs error
addFn := func(name, pattern string) {

View File

@ -9,8 +9,7 @@ import (
"github.com/wagoodman/go-progress"
)
type MetadataCataloger struct {
}
type MetadataCataloger struct{}
func NewMetadataCataloger() *MetadataCataloger {
return &MetadataCataloger{}

View File

@ -9,7 +9,6 @@ import (
"sort"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/event"
@ -82,7 +81,7 @@ func (i *SecretsCataloger) catalogLocation(resolver source.FileResolver, locatio
// TODO: in the future we can swap out search strategies here
secrets, err := catalogLocationByLine(resolver, location, i.patterns)
if err != nil {
return nil, internal.ErrPath{Path: location.RealPath, Err: err}
return nil, internal.PathError{Path: location.RealPath, Err: err}
}
if i.revealValues {

View File

@ -9,7 +9,6 @@ import (
"regexp"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/source"
)
@ -20,7 +19,7 @@ func catalogLocationByLine(resolver source.FileResolver, location source.Locatio
}
defer internal.CloseAndLogError(readCloser, location.VirtualPath)
var scanner = bufio.NewReader(readCloser)
scanner := bufio.NewReader(readCloser)
var position int64
var allSecrets []SearchResult
var lineNo int64
@ -30,7 +29,7 @@ func catalogLocationByLine(resolver source.FileResolver, location source.Locatio
var line []byte
// TODO: we're at risk of large memory usage for very long lines
line, readErr = scanner.ReadBytes('\n')
if readErr != nil && readErr != io.EOF {
if readErr != nil && !errors.Is(readErr, io.EOF) {
return nil, readErr
}
@ -119,9 +118,9 @@ func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *r
}
// lineNoOfSecret are the number of lines which occur before the start of the secret value
var lineNoOfSecret = lineNo + int64(reader.newlinesBefore(start))
lineNoOfSecret := lineNo + int64(reader.newlinesBefore(start))
// lineOffsetOfSecret are the number of bytes that occur after the last newline but before the secret value.
var lineOffsetOfSecret = start - reader.newlinePositionBefore(start)
lineOffsetOfSecret := start - reader.newlinePositionBefore(start)
if lineNoOfSecret == lineNo {
// the secret value starts in the same line as the overall match, so we must consider that line offset
lineOffsetOfSecret += lineOffset

View File

@ -3,13 +3,12 @@ package pkg
import (
"sort"
"github.com/anchore/syft/syft/file"
"github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/file"
"github.com/scylladb/go-set/strset"
)
const ApkDbGlob = "**/lib/apk/db/installed"
const ApkDBGlob = "**/lib/apk/db/installed"
var _ FileOwner = (*ApkMetadata)(nil)

View File

@ -5,7 +5,6 @@ import (
"sync"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
)
@ -67,8 +66,7 @@ func (c *Catalog) Add(p Package) {
c.lock.Lock()
defer c.lock.Unlock()
_, exists := c.byID[p.ID]
if exists {
if _, exists := c.byID[p.ID]; exists {
log.Errorf("package ID already exists in the catalog : id=%+v %+v", p.ID, p)
return
}

View File

@ -11,7 +11,7 @@ import (
// NewApkdbCataloger returns a new Alpine DB cataloger object.
func NewApkdbCataloger() *common.GenericCataloger {
globParsers := map[string]common.ParserFn{
pkg.ApkDbGlob: parseApkDB,
pkg.ApkDBGlob: parseApkDB,
}
return common.NewGenericCataloger(nil, globParsers, "apkdb-cataloger")

View File

@ -8,9 +8,8 @@ import (
"strconv"
"strings"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
"github.com/mitchellh/mapstructure"

View File

@ -29,20 +29,20 @@ var defaultCandidateAdditions = buildCandidateLookup(
{
// example image: docker.io/nuxeo:latest
pkg.JavaPkg,
candidateKey{PkgName: "elasticsearch"}, //, Vendor: "elasticsearch"},
candidateKey{PkgName: "elasticsearch"}, // , Vendor: "elasticsearch"},
candidateAddition{AdditionalVendors: []string{"elastic"}},
},
{
// example image: docker.io/kaazing-gateway:latest
pkg.JavaPkg,
candidateKey{PkgName: "log4j"}, //, Vendor: "apache-software-foundation"},
candidateKey{PkgName: "log4j"}, // , Vendor: "apache-software-foundation"},
candidateAddition{AdditionalVendors: []string{"apache"}},
},
{
// example image: cassandra:latest
pkg.JavaPkg,
candidateKey{PkgName: "apache-cassandra"}, //, Vendor: "apache"},
candidateKey{PkgName: "apache-cassandra"}, // , Vendor: "apache"},
candidateAddition{AdditionalProducts: []string{"cassandra"}},
},

View File

@ -66,7 +66,7 @@ func (c *GenericCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package,
// SelectFiles takes a set of file trees and resolves and file references of interest for future cataloging
func (c *GenericCataloger) selectFiles(resolver source.FilePathResolver) map[source.Location]ParserFn {
var parserByLocation = make(map[source.Location]ParserFn)
parserByLocation := make(map[source.Location]ParserFn)
// select by exact path
for path, parser := range c.pathParsers {

View File

@ -11,7 +11,6 @@ import (
"sort"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
@ -36,9 +35,9 @@ func (c *Cataloger) Name() string {
}
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing dpkg support files.
// nolint:funlen
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, error) {
dbFileMatches, err := resolver.FilesByGlob(pkg.DpkgDbGlob)
dbFileMatches, err := resolver.FilesByGlob(pkg.DpkgDBGlob)
if err != nil {
return nil, fmt.Errorf("failed to find dpkg status files's by glob: %w", err)
}
@ -80,20 +79,21 @@ func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pk
// get license information from the copyright file
copyrightReader, copyrightLocation := fetchCopyrightContents(resolver, dbLocation, p)
if copyrightReader != nil {
if copyrightReader != nil && copyrightLocation != nil {
defer internal.CloseAndLogError(copyrightReader, copyrightLocation.VirtualPath)
// attach the licenses
p.Licenses = parseLicensesFromCopyright(copyrightReader)
// keep a record of the file where this was discovered
if copyrightLocation != nil {
p.Locations = append(p.Locations, *copyrightLocation)
}
}
}
func mergeFileListing(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) {
metadata := p.Metadata.(pkg.DpkgMetadata)
metadata, ok := p.Metadata.(pkg.DpkgMetadata)
if !ok {
log.Warnf("unable to get DPKG metadata while merging file info")
}
// get file listing (package files + additional config files)
files, infoLocations := getAdditionalFileListing(resolver, dbLocation, p)
@ -122,34 +122,30 @@ loopNewFiles:
func getAdditionalFileListing(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) ([]pkg.DpkgFileRecord, []source.Location) {
// ensure the default value for a collection is never nil since this may be shown as JSON
var files = make([]pkg.DpkgFileRecord, 0)
files := make([]pkg.DpkgFileRecord, 0)
var locations []source.Location
md5Reader, md5Location := fetchMd5Contents(resolver, dbLocation, p)
if md5Reader != nil {
if md5Reader != nil && md5Location != nil {
defer internal.CloseAndLogError(md5Reader, md5Location.VirtualPath)
// attach the file list
files = append(files, parseDpkgMD5Info(md5Reader)...)
// keep a record of the file where this was discovered
if md5Location != nil {
locations = append(locations, *md5Location)
}
}
conffilesReader, conffilesLocation := fetchConffileContents(resolver, dbLocation, p)
if conffilesReader != nil {
if conffilesReader != nil && conffilesLocation != nil {
defer internal.CloseAndLogError(conffilesReader, conffilesLocation.VirtualPath)
// attach the file list
files = append(files, parseDpkgConffileInfo(md5Reader)...)
// keep a record of the file where this was discovered
if conffilesLocation != nil {
locations = append(locations, *conffilesLocation)
}
}
return files, locations
}
@ -228,7 +224,10 @@ func fetchCopyrightContents(resolver source.FileResolver, dbLocation source.Loca
}
func md5Key(p *pkg.Package) string {
metadata := p.Metadata.(pkg.DpkgMetadata)
metadata, ok := p.Metadata.(pkg.DpkgMetadata)
if !ok {
log.Warnf("unable to get DPKG metadata while fetching md5 key")
}
contentKey := p.Name
if metadata.Architecture != "" && metadata.Architecture != "all" {

View File

@ -10,7 +10,6 @@ import (
"strings"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/pkg"
"github.com/mitchellh/mapstructure"
)
@ -23,7 +22,7 @@ var (
// parseDpkgStatus is a parser function for Debian DB status contents, returning all Debian packages listed.
func parseDpkgStatus(reader io.Reader) ([]pkg.Package, error) {
buffedReader := bufio.NewReader(reader)
var packages = make([]pkg.Package, 0)
packages := make([]pkg.Package, 0)
continueProcessing := true
for continueProcessing {
@ -152,7 +151,7 @@ func extractSourceVersion(source string) (string, string) {
// handleNewKeyValue parse a new key-value pair from the given unprocessed line
func handleNewKeyValue(line string) (string, interface{}, error) {
if i := strings.Index(line, ":"); i > 0 {
var key = strings.TrimSpace(line[0:i])
key := strings.TrimSpace(line[0:i])
// mapstruct cant handle "-"
key = strings.ReplaceAll(key, "-", "")
val := strings.TrimSpace(line[i+1:])

View File

@ -6,7 +6,6 @@ import (
"strings"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/pkg"
)

View File

@ -6,9 +6,8 @@ import (
"path"
"strings"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/file"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
)
@ -81,7 +80,7 @@ func newJavaArchiveParser(virtualPath string, reader io.Reader, detectNested boo
// parse the loaded archive and return all packages found.
func (j *archiveParser) parse() ([]pkg.Package, error) {
var pkgs = make([]pkg.Package, 0)
pkgs := make([]pkg.Package, 0)
// find the parent package from the java manifest
parentPkg, err := j.discoverMainPackage()
@ -190,7 +189,7 @@ func (j *archiveParser) discoverPkgsFromAllMavenFiles(parentPkg *pkg.Package) ([
// discoverPkgsFromNestedArchives finds Java archives within Java archives, returning all listed Java packages found and
// associating each discovered package to the given parent package.
func (j *archiveParser) discoverPkgsFromNestedArchives(parentPkg *pkg.Package) ([]pkg.Package, error) {
var pkgs = make([]pkg.Package, 0)
pkgs := make([]pkg.Package, 0)
// search and parse pom.properties files & fetch the contents
openers, err := file.ExtractFromZipToUniqueTempFile(j.archivePath, j.contentPath, j.fileManifest.GlobMatch(archiveFormatGlobs...)...)
@ -326,7 +325,10 @@ func packageIdentitiesMatch(p pkg.Package, parentPkg *pkg.Package) bool {
return true
}
metadata := p.Metadata.(pkg.JavaMetadata)
metadata, ok := p.Metadata.(pkg.JavaMetadata)
if !ok {
log.Warnf("unable to get java metadata while determining package identities")
}
// the virtual path matches...
if parentPkg.Metadata.(pkg.JavaMetadata).VirtualPath == metadata.VirtualPath {

View File

@ -8,7 +8,6 @@ import (
"strings"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/pkg"
)

View File

@ -26,7 +26,7 @@ func saveArchiveToTmp(reader io.Reader) (string, string, func(), error) {
archivePath := filepath.Join(tempDir, "archive")
contentDir := filepath.Join(tempDir, "contents")
err = os.Mkdir(contentDir, 0755)
err = os.Mkdir(contentDir, 0o755)
if err != nil {
return contentDir, "", cleanupFn, fmt.Errorf("unable to create processing tempdir: %w", err)
}

View File

@ -7,14 +7,11 @@ import (
"io"
"regexp"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal"
"github.com/mitchellh/mapstructure"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
"github.com/mitchellh/mapstructure"
)
// integrity check
@ -168,7 +165,7 @@ func parsePackageJSON(_ string, reader io.Reader) ([]pkg.Package, error) {
for {
var p PackageJSON
if err := dec.Decode(&p); err == io.EOF {
if err := dec.Decode(&p); errors.Is(err, io.EOF) {
break
} else if err != nil {
return nil, fmt.Errorf("failed to parse package.json file: %w", err)

View File

@ -2,6 +2,7 @@ package javascript
import (
"encoding/json"
"errors"
"fmt"
"io"
@ -40,7 +41,7 @@ func parsePackageLock(path string, reader io.Reader) ([]pkg.Package, error) {
for {
var lock PackageLock
if err := dec.Decode(&lock); err == io.EOF {
if err := dec.Decode(&lock); errors.Is(err, io.EOF) {
break
} else if err != nil {
return nil, fmt.Errorf("failed to parse package-lock.json file: %w", err)

View File

@ -20,9 +20,9 @@ func generatePackageURL(p pkg.Package, d *distro.Distro) string {
}
}
var purlType = p.Type.PackageURLType()
var name = p.Name
var namespace = ""
purlType := p.Type.PackageURLType()
name := p.Name
namespace := ""
switch {
case purlType == "":

View File

@ -6,9 +6,7 @@ import (
"path/filepath"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
)
@ -149,7 +147,7 @@ func (c *PackageCataloger) fetchTopLevelPackages(resolver source.FileResolver, m
// assembleEggOrWheelMetadata discovers and accumulates python package metadata from multiple file sources and returns a single metadata object as well as a list of files where the metadata was derived from.
func (c *PackageCataloger) assembleEggOrWheelMetadata(resolver source.FileResolver, metadataLocation source.Location) (*pkg.PythonPackageMetadata, []source.Location, error) {
var sources = []source.Location{metadataLocation}
sources := []source.Location{metadataLocation}
metadataContents, err := resolver.FileContentsByLocation(metadataLocation)
if err != nil {

View File

@ -2,6 +2,7 @@ package python
import (
"encoding/json"
"errors"
"fmt"
"io"
"strings"
@ -43,7 +44,7 @@ func parsePipfileLock(_ string, reader io.Reader) ([]pkg.Package, error) {
for {
var lock PipfileLock
if err := dec.Decode(&lock); err == io.EOF {
if err := dec.Decode(&lock); errors.Is(err, io.EOF) {
break
} else if err != nil {
return nil, fmt.Errorf("failed to parse Pipfile.lock file: %w", err)

View File

@ -16,13 +16,13 @@ var _ common.ParserFn = parsePoetryLock
func parsePoetryLock(_ string, reader io.Reader) ([]pkg.Package, error) {
tree, err := toml.LoadReader(reader)
if err != nil {
return nil, fmt.Errorf("unable to load poetry.lock for parsing: %v", err)
return nil, fmt.Errorf("unable to load poetry.lock for parsing: %w", err)
}
metadata := PoetryMetadata{}
err = tree.Unmarshal(&metadata)
if err != nil {
return nil, fmt.Errorf("unable to parse poetry.lock: %v", err)
return nil, fmt.Errorf("unable to parse poetry.lock: %w", err)
}
return metadata.Pkgs(), nil

View File

@ -9,10 +9,8 @@ import (
"github.com/anchore/syft/internal/file"
"github.com/anchore/syft/internal/log"
"github.com/mitchellh/mapstructure"
"github.com/anchore/syft/syft/pkg"
"github.com/mitchellh/mapstructure"
)
// parseWheelOrEggMetadata takes a Python Egg or Wheel (which share the same format and values for our purposes),

View File

@ -2,6 +2,7 @@ package python
import (
"encoding/csv"
"errors"
"fmt"
"io"
"strings"
@ -17,7 +18,7 @@ func parseWheelOrEggRecord(reader io.Reader) ([]pkg.PythonFileRecord, error) {
for {
recordList, err := r.Read()
if err == io.EOF {
if errors.Is(err, io.EOF) {
break
}
if err != nil {

View File

@ -7,7 +7,6 @@ import (
"fmt"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
)
@ -28,7 +27,7 @@ func (c *Cataloger) Name() string {
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing rpm db installation.
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, error) {
fileMatches, err := resolver.FilesByGlob(pkg.RpmDbGlob)
fileMatches, err := resolver.FilesByGlob(pkg.RpmDBGlob)
if err != nil {
return nil, fmt.Errorf("failed to find rpmdb's by glob: %w", err)
}

View File

@ -6,11 +6,10 @@ import (
"io/ioutil"
"os"
"github.com/anchore/syft/syft/file"
rpmdb "github.com/anchore/go-rpmdb/pkg"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
)
@ -90,10 +89,10 @@ func toELVersion(metadata pkg.RpmdbMetadata) string {
}
func extractRpmdbFileRecords(resolver source.FilePathResolver, entry *rpmdb.PackageInfo) []pkg.RpmdbFileRecord {
var records = make([]pkg.RpmdbFileRecord, 0)
records := make([]pkg.RpmdbFileRecord, 0)
for _, record := range entry.Files {
//only persist RPMDB file records which exist in the image/directory, otherwise ignore them
// only persist RPMDB file records which exist in the image/directory, otherwise ignore them
if resolver.HasPath(record.Path) {
records = append(records, pkg.RpmdbFileRecord{
Path: record.Path,

View File

@ -9,11 +9,9 @@ import (
"strings"
"github.com/anchore/syft/internal"
"github.com/mitchellh/mapstructure"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common"
"github.com/mitchellh/mapstructure"
)
// integrity check
@ -62,7 +60,7 @@ func processList(s string) []string {
func parseGemSpecEntries(_ string, reader io.Reader) ([]pkg.Package, error) {
var pkgs []pkg.Package
var fields = make(map[string]interface{})
fields := make(map[string]interface{})
scanner := bufio.NewScanner(reader)
for scanner.Scan() {

View File

@ -16,13 +16,13 @@ var _ common.ParserFn = parseCargoLock
func parseCargoLock(_ string, reader io.Reader) ([]pkg.Package, error) {
tree, err := toml.LoadReader(reader)
if err != nil {
return nil, fmt.Errorf("unable to load Cargo.lock for parsing: %v", err)
return nil, fmt.Errorf("unable to load Cargo.lock for parsing: %w", err)
}
metadata := CargoMetadata{}
err = tree.Unmarshal(&metadata)
if err != nil {
return nil, fmt.Errorf("unable to parse Cargo.lock: %v", err)
return nil, fmt.Errorf("unable to parse Cargo.lock: %w", err)
}
return metadata.Pkgs(), nil

View File

@ -3,14 +3,13 @@ package pkg
import (
"sort"
"github.com/anchore/syft/syft/file"
"github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/distro"
"github.com/anchore/syft/syft/file"
"github.com/scylladb/go-set/strset"
)
const DpkgDbGlob = "**/var/lib/dpkg/{status,status.d/**}"
const DpkgDBGlob = "**/var/lib/dpkg/{status,status.d/**}"
var _ FileOwner = (*DpkgMetadata)(nil)

View File

@ -9,9 +9,9 @@ import (
var globsForbiddenFromBeingOwned = []string{
// any OS DBs should automatically be ignored to prevent cyclic issues (e.g. the "rpm" RPM owns the path to the
// RPM DB, so if not ignored that package would own all other packages on the system).
ApkDbGlob,
DpkgDbGlob,
RpmDbGlob,
ApkDBGlob,
DpkgDBGlob,
RpmDBGlob,
// DEB packages share common copyright info between, this does not mean that sharing these paths implies ownership.
"/usr/share/doc/**/copyright",
}
@ -21,7 +21,7 @@ type ownershipByFilesMetadata struct {
}
func ownershipByFilesRelationships(catalog *Catalog) []Relationship {
var relationships = findOwnershipByFilesRelationships(catalog)
relationships := findOwnershipByFilesRelationships(catalog)
var edges []Relationship
for parent, children := range relationships {
@ -43,7 +43,7 @@ func ownershipByFilesRelationships(catalog *Catalog) []Relationship {
// findOwnershipByFilesRelationships find overlaps in file ownership with a file that defines another package. Specifically, a .Location.Path of
// a package is found to be owned by another (from the owner's .Metadata.Files[]).
func findOwnershipByFilesRelationships(catalog *Catalog) map[ID]map[ID]*strset.Set {
var relationships = make(map[ID]map[ID]*strset.Set)
relationships := make(map[ID]map[ID]*strset.Set)
if catalog == nil {
return relationships

View File

@ -5,15 +5,13 @@ import (
"sort"
"strconv"
"github.com/anchore/syft/syft/file"
"github.com/scylladb/go-set/strset"
"github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/distro"
"github.com/anchore/syft/syft/file"
"github.com/scylladb/go-set/strset"
)
const RpmDbGlob = "**/var/lib/rpm/Packages"
const RpmDBGlob = "**/var/lib/rpm/Packages"
var _ FileOwner = (*RpmdbMetadata)(nil)

View File

@ -25,7 +25,7 @@ func newAllLayersResolver(img *image.Image) (*allLayersResolver, error) {
return nil, fmt.Errorf("the image does not contain any layers")
}
var layers = make([]int, 0)
layers := make([]int, 0)
for idx := range img.Layers {
layers = append(layers, idx)
}
@ -120,7 +120,7 @@ func (r *allLayersResolver) FilesByPath(paths ...string) ([]Location, error) {
}
// FilesByGlob returns all file.References that match the given path glob pattern from any layer in the image.
// nolint:gocognit
func (r *allLayersResolver) FilesByGlob(patterns ...string) ([]Location, error) {
uniqueFileIDs := file.NewFileReferenceSet()
uniqueLocations := make([]Location, 0)

View File

@ -204,7 +204,7 @@ func (r directoryResolver) String() string {
// FilesByPath returns all file.References that match the given paths from the directory.
func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error) {
var references = make([]Location, 0)
references := make([]Location, 0)
for _, userPath := range userPaths {
userStrPath, err := r.requestPath(userPath)

View File

@ -4,11 +4,9 @@ import (
"os"
"syscall"
"github.com/anchore/syft/internal/log"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/stereoscope/pkg/image"
"github.com/anchore/syft/internal/log"
)
type FileMetadata struct {

View File

@ -3,10 +3,9 @@ package source
import (
"fmt"
"github.com/anchore/syft/internal/log"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/stereoscope/pkg/image"
"github.com/anchore/syft/internal/log"
)
// Location represents a path relative to a particular filesystem resolved to a specific file.Reference. This struct is used as a key

View File

@ -35,7 +35,7 @@ func NewMockResolverForPaths(paths ...string) *MockResolver {
func NewMockResolverForPathsWithMetadata(metadata map[Location]FileMetadata) *MockResolver {
var locations []Location
var mimeTypeIndex = make(map[string][]Location)
mimeTypeIndex := make(map[string][]Location)
for l, m := range metadata {
locations = append(locations, l)
mimeTypeIndex[m.MIMEType] = append(mimeTypeIndex[m.MIMEType], l)

View File

@ -46,6 +46,7 @@ func detectScheme(fs afero.Fs, imageDetector sourceDetector, userInput string) (
fileMeta, err := fs.Stat(dirLocation)
if err != nil {
// nolint: nilerr
return UnknownScheme, source, "", nil
}

View File

@ -21,11 +21,13 @@ import (
"github.com/wagoodman/jotframe/pkg/frame"
)
const maxBarWidth = 50
const statusSet = components.SpinnerDotSet
const completedStatus = "✔"
const tileFormat = color.Bold
const interval = 150 * time.Millisecond
const (
maxBarWidth = 50
statusSet = components.SpinnerDotSet
completedStatus = "✔"
tileFormat = color.Bold
interval = 150 * time.Millisecond
)
// StatusTitleColumn is the column index in a given row where status text will be displayed.
const StatusTitleColumn = 31
@ -184,7 +186,7 @@ func PullDockerImageHandler(ctx context.Context, fr *frame.Frame, event partybus
}
// FetchImageHandler periodically writes a the image save and write-to-disk process in the form of a progress bar.
// nolint:dupl
func FetchImageHandler(ctx context.Context, fr *frame.Frame, event partybus.Event, wg *sync.WaitGroup) error {
_, prog, err := stereoEventParsers.ParseFetchImage(event)
if err != nil {
@ -313,7 +315,7 @@ func PackageCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, event
}
// SecretsCatalogerStartedHandler shows the intermittent secrets searching progress.
// nolint:dupl
func SecretsCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, event partybus.Event, wg *sync.WaitGroup) error {
prog, err := syftEventParsers.ParseSecretsCatalogingStarted(event)
if err != nil {
@ -401,7 +403,7 @@ func FileMetadataCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, e
}
// FileIndexingStartedHandler shows the intermittent indexing progress from a directory resolver.
// nolint:dupl
func FileIndexingStartedHandler(ctx context.Context, fr *frame.Frame, event partybus.Event, wg *sync.WaitGroup) error {
path, prog, err := syftEventParsers.ParseFileIndexingStarted(event)
if err != nil {
@ -487,7 +489,7 @@ func FileDigestsCatalogerStartedHandler(ctx context.Context, fr *frame.Frame, ev
}
// ImportStartedHandler shows the intermittent upload progress to Anchore Enterprise.
// nolint:dupl
func ImportStartedHandler(ctx context.Context, fr *frame.Frame, event partybus.Event, wg *sync.WaitGroup) error {
host, prog, err := syftEventParsers.ParseImportStarted(event)
if err != nil {

View File

@ -16,8 +16,7 @@ import (
)
// Handler is an aggregated event handler for the set of supported events (PullDockerImage, ReadImage, FetchImage, PackageCatalogerStarted)
type Handler struct {
}
type Handler struct{}
// NewHandler returns an empty Handler
func NewHandler() *Handler {