account for repoDigests data in power-user json testing

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
Alex Goodman 2021-03-29 13:12:55 -04:00
parent db35186c7d
commit cd925e5664
No known key found for this signature in database
GPG Key ID: 5CB45AE22BAB7EA7
5 changed files with 64 additions and 67 deletions

View File

@ -1,6 +1,9 @@
package cmd package cmd
import ( import (
"crypto"
"fmt"
"github.com/anchore/syft/internal/presenter/poweruser" "github.com/anchore/syft/internal/presenter/poweruser"
"github.com/anchore/syft/syft" "github.com/anchore/syft/syft"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
@ -80,7 +83,26 @@ func catalogFileDigestTask() (powerUserTask, error) {
return nil, nil return nil, nil
} }
digestsCataloger, err := file.NewDigestsCataloger(appConfig.FileMetadata.Digests) supportedHashAlgorithms := make(map[string]crypto.Hash)
for _, h := range []crypto.Hash{
crypto.MD5,
crypto.SHA1,
crypto.SHA256,
} {
supportedHashAlgorithms[file.DigestAlgorithmName(h)] = h
}
var hashes []crypto.Hash
for _, hashStr := range appConfig.FileMetadata.Digests {
name := file.CleanDigestAlgorithmName(hashStr)
hashObj, ok := supportedHashAlgorithms[name]
if !ok {
return nil, fmt.Errorf("unsupported hash algorithm: %s", hashStr)
}
hashes = append(hashes, hashObj)
}
digestsCataloger, err := file.NewDigestsCataloger(hashes)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -47,6 +47,7 @@ func TestJSONPresenter(t *testing.T) {
Metadata: pkg.PythonPackageMetadata{ Metadata: pkg.PythonPackageMetadata{
Name: "package-1", Name: "package-1",
Version: "1.0.1", Version: "1.0.1",
Files: []pkg.PythonFileRecord{},
}, },
PURL: "a-purl-1", PURL: "a-purl-1",
CPEs: []pkg.CPE{ CPEs: []pkg.CPE{
@ -68,6 +69,7 @@ func TestJSONPresenter(t *testing.T) {
Metadata: pkg.DpkgMetadata{ Metadata: pkg.DpkgMetadata{
Package: "package-2", Package: "package-2",
Version: "2.0.1", Version: "2.0.1",
Files: []pkg.DpkgFileRecord{},
}, },
PURL: "a-purl-2", PURL: "a-purl-2",
CPEs: []pkg.CPE{ CPEs: []pkg.CPE{
@ -76,8 +78,12 @@ func TestJSONPresenter(t *testing.T) {
}) })
cfg := JSONDocumentConfig{ cfg := JSONDocumentConfig{
ApplicationConfig: config.Application{}, ApplicationConfig: config.Application{
PackageCatalog: catalog, FileMetadata: config.FileMetadata{
Digests: []string{"sha256"},
},
},
PackageCatalog: catalog,
FileMetadata: map[source.Location]source.FileMetadata{ FileMetadata: map[source.Location]source.FileMetadata{
source.NewLocation("/a/place"): { source.NewLocation("/a/place"): {
Mode: 0775, Mode: 0775,
@ -149,6 +155,7 @@ func TestJSONPresenter(t *testing.T) {
}, },
RawManifest: []byte("eyJzY2hlbWFWZXJzaW9uIjoyLCJtZWRpYVR5cGUiOiJh..."), RawManifest: []byte("eyJzY2hlbWFWZXJzaW9uIjoyLCJtZWRpYVR5cGUiOiJh..."),
RawConfig: []byte("eyJhcmNoaXRlY3R1cmUiOiJhbWQ2NCIsImNvbmZp..."), RawConfig: []byte("eyJhcmNoaXRlY3R1cmUiOiJhbWQ2NCIsImNvbmZp..."),
RepoDigests: []string{},
}, },
}, },
} }
@ -162,7 +169,7 @@ func TestJSONPresenter(t *testing.T) {
testutils.UpdateGoldenFileContents(t, actual) testutils.UpdateGoldenFileContents(t, actual)
} }
var expected = testutils.GetGoldenFileContents(t) expected := testutils.GetGoldenFileContents(t)
if !bytes.Equal(expected, actual) { if !bytes.Equal(expected, actual) {
dmp := diffmatchpatch.New() dmp := diffmatchpatch.New()

View File

@ -115,7 +115,7 @@
"architecture": "", "architecture": "",
"maintainer": "", "maintainer": "",
"installedSize": 0, "installedSize": 0,
"files": null "files": []
} }
} }
], ],
@ -145,6 +145,7 @@
], ],
"manifest": "ZXlKelkyaGxiV0ZXWlhKemFXOXVJam95TENKdFpXUnBZVlI1Y0dVaU9pSmguLi4=", "manifest": "ZXlKelkyaGxiV0ZXWlhKemFXOXVJam95TENKdFpXUnBZVlI1Y0dVaU9pSmguLi4=",
"config": "ZXlKaGNtTm9hWFJsWTNSMWNtVWlPaUpoYldRMk5DSXNJbU52Ym1acC4uLg==", "config": "ZXlKaGNtTm9hWFJsWTNSMWNtVWlPaUpoYldRMk5DSXNJbU52Ym1acC4uLg==",
"repoDigests": [],
"scope": "" "scope": ""
} }
}, },
@ -187,7 +188,9 @@
"enabled": false, "enabled": false,
"scope": "" "scope": ""
}, },
"digests": null "digests": [
"sha256"
]
} }
} }
}, },

View File

@ -10,33 +10,11 @@ import (
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
var supportedHashAlgorithms = make(map[string]crypto.Hash)
type DigestsCataloger struct { type DigestsCataloger struct {
hashes []crypto.Hash hashes []crypto.Hash
} }
func init() { func NewDigestsCataloger(hashes []crypto.Hash) (*DigestsCataloger, error) {
for _, h := range []crypto.Hash{
crypto.MD5,
crypto.SHA1,
crypto.SHA256,
} {
supportedHashAlgorithms[cleanAlgorithmName(h.String())] = h
}
}
func NewDigestsCataloger(hashAlgorithms []string) (*DigestsCataloger, error) {
var hashes []crypto.Hash
for _, hashStr := range hashAlgorithms {
name := cleanAlgorithmName(hashStr)
hashObj, ok := supportedHashAlgorithms[name]
if !ok {
return nil, fmt.Errorf("unsupported hash algorithm: %s", hashStr)
}
hashes = append(hashes, hashObj)
}
return &DigestsCataloger{ return &DigestsCataloger{
hashes: hashes, hashes: hashes,
}, nil }, nil
@ -84,7 +62,7 @@ func (i *DigestsCataloger) catalogLocation(resolver source.FileResolver, locatio
// file type but a body is still allowed. // file type but a body is still allowed.
for idx, hasher := range hashers { for idx, hasher := range hashers {
result[idx] = Digest{ result[idx] = Digest{
Algorithm: cleanAlgorithmName(i.hashes[idx].String()), Algorithm: DigestAlgorithmName(i.hashes[idx]),
Value: fmt.Sprintf("%+x", hasher.Sum(nil)), Value: fmt.Sprintf("%+x", hasher.Sum(nil)),
} }
} }
@ -92,7 +70,11 @@ func (i *DigestsCataloger) catalogLocation(resolver source.FileResolver, locatio
return result, nil return result, nil
} }
func cleanAlgorithmName(name string) string { func DigestAlgorithmName(hash crypto.Hash) string {
return CleanDigestAlgorithmName(hash.String())
}
func CleanDigestAlgorithmName(name string) string {
lower := strings.ToLower(name) lower := strings.ToLower(name)
return strings.Replace(lower, "-", "", -1) return strings.Replace(lower, "-", "", -1)
} }

View File

@ -33,7 +33,7 @@ func testDigests(t testing.TB, files []string, hashes ...crypto.Hash) map[source
h := hash.New() h := hash.New()
h.Write(b) h.Write(b)
digests[source.NewLocation(f)] = append(digests[source.NewLocation(f)], Digest{ digests[source.NewLocation(f)] = append(digests[source.NewLocation(f)], Digest{
Algorithm: cleanAlgorithmName(hash.String()), Algorithm: CleanDigestAlgorithmName(hash.String()),
Value: fmt.Sprintf("%x", h.Sum(nil)), Value: fmt.Sprintf("%x", h.Sum(nil)),
}) })
} }
@ -46,40 +46,27 @@ func TestDigestsCataloger_SimpleContents(t *testing.T) {
regularFiles := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"} regularFiles := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"}
tests := []struct { tests := []struct {
name string name string
algorithms []string digests []crypto.Hash
files []string files []string
expected map[source.Location][]Digest expected map[source.Location][]Digest
constructorErr bool catalogErr bool
catalogErr bool
}{ }{
{ {
name: "bad algorithm", name: "md5",
algorithms: []string{"sha-nothing"}, digests: []crypto.Hash{crypto.MD5},
files: regularFiles, files: regularFiles,
constructorErr: true, expected: testDigests(t, regularFiles, crypto.MD5),
}, },
{ {
name: "unsupported algorithm", name: "md5-sha1-sha256",
algorithms: []string{"sha512"}, digests: []crypto.Hash{crypto.MD5, crypto.SHA1, crypto.SHA256},
files: regularFiles, files: regularFiles,
constructorErr: true, expected: testDigests(t, regularFiles, crypto.MD5, crypto.SHA1, crypto.SHA256),
},
{
name: "md5",
algorithms: []string{"md5"},
files: regularFiles,
expected: testDigests(t, regularFiles, crypto.MD5),
},
{
name: "md5-sha1-sha256",
algorithms: []string{"md5", "sha1", "sha256"},
files: regularFiles,
expected: testDigests(t, regularFiles, crypto.MD5, crypto.SHA1, crypto.SHA256),
}, },
{ {
name: "directory returns error", name: "directory returns error",
algorithms: []string{"md5"}, digests: []crypto.Hash{crypto.MD5},
files: []string{"test-fixtures/last"}, files: []string{"test-fixtures/last"},
catalogErr: true, catalogErr: true,
}, },
@ -87,13 +74,9 @@ func TestDigestsCataloger_SimpleContents(t *testing.T) {
for _, test := range tests { for _, test := range tests {
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
c, err := NewDigestsCataloger(test.algorithms) c, err := NewDigestsCataloger(test.digests)
if err != nil && !test.constructorErr { if err != nil {
t.Fatalf("could not create cataloger (but should have been able to): %+v", err) t.Fatalf("could not create cataloger: %+v", err)
} else if err == nil && test.constructorErr {
t.Fatalf("expected constructor error but did not get one")
} else if test.constructorErr && err != nil {
return
} }
resolver := source.NewMockResolverForPaths(test.files...) resolver := source.NewMockResolverForPaths(test.files...)
@ -161,7 +144,7 @@ func TestDigestsCataloger_MixFileTypes(t *testing.T) {
for _, test := range tests { for _, test := range tests {
t.Run(test.path, func(t *testing.T) { t.Run(test.path, func(t *testing.T) {
c, err := NewDigestsCataloger([]string{"md5"}) c, err := NewDigestsCataloger([]crypto.Hash{crypto.MD5})
if err != nil { if err != nil {
t.Fatalf("unable to get cataloger: %+v", err) t.Fatalf("unable to get cataloger: %+v", err)
} }