mirror of
https://github.com/anchore/syft.git
synced 2025-11-17 16:33:21 +01:00
Introduce a single SBOM document (#606)
* [wip] single sbom doc Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * fix tests Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * fix more tests Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * fix linting Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * update cli tests Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * remove scope in import path Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * swap SPDX tag-value formatter to single sbom document Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * bust CLI cache Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * update fixture to byte diff Signed-off-by: Christopher Angelo Phillips <christopher.phillips@anchore.com> * byte for byte Signed-off-by: Christopher Angelo Phillips <christopher.phillips@anchore.com> * bust the cache Signed-off-by: Christopher Angelo Phillips <christopher.phillips@anchore.com> * who needs cache Signed-off-by: Christopher Angelo Phillips <christopher.phillips@anchore.com> * add jar for testing Signed-off-by: Christopher Angelo Phillips <christopher.phillips@anchore.com> * no more bit flips Signed-off-by: Christopher Angelo Phillips <christopher.phillips@anchore.com> * update apk with the delta for image and directory cases Signed-off-by: Christopher Angelo Phillips <christopher.phillips@anchore.com> * restore cache workflow Signed-off-by: Christopher Angelo Phillips <christopher.phillips@anchore.com> Co-authored-by: Christopher Angelo Phillips <christopher.phillips@anchore.com>
This commit is contained in:
parent
6d0ee326d8
commit
bb0f35bac4
2
Makefile
2
Makefile
@ -18,7 +18,7 @@ SUCCESS := $(BOLD)$(GREEN)
|
|||||||
COVERAGE_THRESHOLD := 62
|
COVERAGE_THRESHOLD := 62
|
||||||
# CI cache busting values; change these if you want CI to not use previous stored cache
|
# CI cache busting values; change these if you want CI to not use previous stored cache
|
||||||
INTEGRATION_CACHE_BUSTER="88738d2f"
|
INTEGRATION_CACHE_BUSTER="88738d2f"
|
||||||
CLI_CACHE_BUSTER="789bacdf"
|
CLI_CACHE_BUSTER="9a2c03cf"
|
||||||
BOOTSTRAP_CACHE="c7afb99ad"
|
BOOTSTRAP_CACHE="c7afb99ad"
|
||||||
|
|
||||||
## Build variables
|
## Build variables
|
||||||
|
|||||||
@ -18,6 +18,7 @@ import (
|
|||||||
"github.com/anchore/syft/syft/event"
|
"github.com/anchore/syft/syft/event"
|
||||||
"github.com/anchore/syft/syft/format"
|
"github.com/anchore/syft/syft/format"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
"github.com/pkg/profile"
|
"github.com/pkg/profile"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
@ -261,21 +262,29 @@ func packagesExecWorker(userInput string) <-chan error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if appConfig.Anchore.Host != "" {
|
if appConfig.Anchore.Host != "" {
|
||||||
if err := runPackageSbomUpload(src, src.Metadata, catalog, d, appConfig.Package.Cataloger.ScopeOpt); err != nil {
|
if err := runPackageSbomUpload(src, src.Metadata, catalog, d); err != nil {
|
||||||
errs <- err
|
errs <- err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sbomResult := sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
PackageCatalog: catalog,
|
||||||
|
Distro: d,
|
||||||
|
},
|
||||||
|
Source: src.Metadata,
|
||||||
|
}
|
||||||
|
|
||||||
bus.Publish(partybus.Event{
|
bus.Publish(partybus.Event{
|
||||||
Type: event.PresenterReady,
|
Type: event.PresenterReady,
|
||||||
Value: f.Presenter(catalog, &src.Metadata, d, appConfig.Package.Cataloger.ScopeOpt),
|
Value: f.Presenter(sbomResult),
|
||||||
})
|
})
|
||||||
}()
|
}()
|
||||||
return errs
|
return errs
|
||||||
}
|
}
|
||||||
|
|
||||||
func runPackageSbomUpload(src *source.Source, s source.Metadata, catalog *pkg.Catalog, d *distro.Distro, scope source.Scope) error {
|
func runPackageSbomUpload(src *source.Source, s source.Metadata, catalog *pkg.Catalog, d *distro.Distro) error {
|
||||||
log.Infof("uploading results to %s", appConfig.Anchore.Host)
|
log.Infof("uploading results to %s", appConfig.Anchore.Host)
|
||||||
|
|
||||||
if src.Metadata.Scheme != source.ImageScheme {
|
if src.Metadata.Scheme != source.ImageScheme {
|
||||||
@ -315,7 +324,6 @@ func runPackageSbomUpload(src *source.Source, s source.Metadata, catalog *pkg.Ca
|
|||||||
Distro: d,
|
Distro: d,
|
||||||
Dockerfile: dockerfileContents,
|
Dockerfile: dockerfileContents,
|
||||||
OverwriteExistingUpload: appConfig.Anchore.OverwriteExistingImage,
|
OverwriteExistingUpload: appConfig.Anchore.OverwriteExistingImage,
|
||||||
Scope: scope,
|
|
||||||
Timeout: appConfig.Anchore.ImportTimeout,
|
Timeout: appConfig.Anchore.ImportTimeout,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -4,6 +4,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/stereoscope"
|
"github.com/anchore/stereoscope"
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/bus"
|
"github.com/anchore/syft/internal/bus"
|
||||||
@ -107,9 +109,8 @@ func powerUserExecWorker(userInput string) <-chan error {
|
|||||||
}
|
}
|
||||||
defer cleanup()
|
defer cleanup()
|
||||||
|
|
||||||
analysisResults := poweruser.JSONDocumentConfig{
|
analysisResults := sbom.SBOM{
|
||||||
SourceMetadata: src.Metadata,
|
Source: src.Metadata,
|
||||||
ApplicationConfig: *appConfig,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
wg := &sync.WaitGroup{}
|
wg := &sync.WaitGroup{}
|
||||||
@ -117,7 +118,7 @@ func powerUserExecWorker(userInput string) <-chan error {
|
|||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func(task powerUserTask) {
|
go func(task powerUserTask) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
if err = task(&analysisResults, src); err != nil {
|
if err = task(&analysisResults.Artifacts, src); err != nil {
|
||||||
errs <- err
|
errs <- err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -128,7 +129,7 @@ func powerUserExecWorker(userInput string) <-chan error {
|
|||||||
|
|
||||||
bus.Publish(partybus.Event{
|
bus.Publish(partybus.Event{
|
||||||
Type: event.PresenterReady,
|
Type: event.PresenterReady,
|
||||||
Value: poweruser.NewJSONPresenter(analysisResults),
|
Value: poweruser.NewJSONPresenter(analysisResults, *appConfig),
|
||||||
})
|
})
|
||||||
}()
|
}()
|
||||||
return errs
|
return errs
|
||||||
|
|||||||
@ -4,13 +4,14 @@ import (
|
|||||||
"crypto"
|
"crypto"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/presenter/poweruser"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft"
|
"github.com/anchore/syft/syft"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
)
|
)
|
||||||
|
|
||||||
type powerUserTask func(*poweruser.JSONDocumentConfig, *source.Source) error
|
type powerUserTask func(*sbom.Artifacts, *source.Source) error
|
||||||
|
|
||||||
func powerUserTasks() ([]powerUserTask, error) {
|
func powerUserTasks() ([]powerUserTask, error) {
|
||||||
var tasks []powerUserTask
|
var tasks []powerUserTask
|
||||||
@ -42,7 +43,7 @@ func catalogPackagesTask() (powerUserTask, error) {
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||||
packageCatalog, theDistro, err := syft.CatalogPackages(src, appConfig.Package.Cataloger.ScopeOpt)
|
packageCatalog, theDistro, err := syft.CatalogPackages(src, appConfig.Package.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -64,7 +65,7 @@ func catalogFileMetadataTask() (powerUserTask, error) {
|
|||||||
|
|
||||||
metadataCataloger := file.NewMetadataCataloger()
|
metadataCataloger := file.NewMetadataCataloger()
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||||
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
|
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -110,7 +111,7 @@ func catalogFileDigestsTask() (powerUserTask, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||||
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
|
resolver, err := src.FileResolver(appConfig.FileMetadata.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -142,7 +143,7 @@ func catalogSecretsTask() (powerUserTask, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||||
resolver, err := src.FileResolver(appConfig.Secrets.Cataloger.ScopeOpt)
|
resolver, err := src.FileResolver(appConfig.Secrets.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -170,7 +171,7 @@ func catalogFileClassificationsTask() (powerUserTask, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||||
resolver, err := src.FileResolver(appConfig.FileClassification.Cataloger.ScopeOpt)
|
resolver, err := src.FileResolver(appConfig.FileClassification.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -197,7 +198,7 @@ func catalogContentsTask() (powerUserTask, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
task := func(results *poweruser.JSONDocumentConfig, src *source.Source) error {
|
task := func(results *sbom.Artifacts, src *source.Source) error {
|
||||||
resolver, err := src.FileResolver(appConfig.FileContents.Cataloger.ScopeOpt)
|
resolver, err := src.FileResolver(appConfig.FileContents.Cataloger.ScopeOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|||||||
@ -26,7 +26,6 @@ type ImportConfig struct {
|
|||||||
Distro *distro.Distro
|
Distro *distro.Distro
|
||||||
Dockerfile []byte
|
Dockerfile []byte
|
||||||
OverwriteExistingUpload bool
|
OverwriteExistingUpload bool
|
||||||
Scope source.Scope
|
|
||||||
Timeout uint
|
Timeout uint
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -74,7 +73,7 @@ func (c *Client) Import(ctx context.Context, cfg ImportConfig) error {
|
|||||||
prog.N++
|
prog.N++
|
||||||
sessionID := startOperation.Uuid
|
sessionID := startOperation.Uuid
|
||||||
|
|
||||||
packageDigest, err := importPackageSBOM(authedCtx, c.client.ImportsApi, sessionID, cfg.SourceMetadata, cfg.Catalog, cfg.Distro, cfg.Scope, stage)
|
packageDigest, err := importPackageSBOM(authedCtx, c.client.ImportsApi, sessionID, cfg.SourceMetadata, cfg.Catalog, cfg.Distro, stage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to import Package SBOM: %w", err)
|
return fmt.Errorf("failed to import Package SBOM: %w", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,6 +8,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/syftjson"
|
"github.com/anchore/syft/internal/formats/syftjson"
|
||||||
|
|
||||||
"github.com/wagoodman/go-progress"
|
"github.com/wagoodman/go-progress"
|
||||||
@ -24,10 +26,19 @@ type packageSBOMImportAPI interface {
|
|||||||
ImportImagePackages(context.Context, string, external.ImagePackageManifest) (external.ImageImportContentResponse, *http.Response, error)
|
ImportImagePackages(context.Context, string, external.ImagePackageManifest) (external.ImageImportContentResponse, *http.Response, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
func packageSbomModel(s source.Metadata, catalog *pkg.Catalog, d *distro.Distro, scope source.Scope) (*external.ImagePackageManifest, error) {
|
func packageSbomModel(srcMetadata source.Metadata, catalog *pkg.Catalog, d *distro.Distro) (*external.ImagePackageManifest, error) {
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
|
|
||||||
err := syftjson.Format().Presenter(catalog, &s, d, scope).Present(&buf)
|
// TODO: once the top-level API is refactored and SBOMs are the unit of work, then this function will be passed an SBOM and there would be no more need to create an SBOM object here.
|
||||||
|
s := sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
PackageCatalog: catalog,
|
||||||
|
Distro: d,
|
||||||
|
},
|
||||||
|
Source: srcMetadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
err := syftjson.Format().Presenter(s).Present(&buf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to serialize results: %w", err)
|
return nil, fmt.Errorf("unable to serialize results: %w", err)
|
||||||
}
|
}
|
||||||
@ -41,11 +52,11 @@ func packageSbomModel(s source.Metadata, catalog *pkg.Catalog, d *distro.Distro,
|
|||||||
return &model, nil
|
return &model, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func importPackageSBOM(ctx context.Context, api packageSBOMImportAPI, sessionID string, s source.Metadata, catalog *pkg.Catalog, d *distro.Distro, scope source.Scope, stage *progress.Stage) (string, error) {
|
func importPackageSBOM(ctx context.Context, api packageSBOMImportAPI, sessionID string, s source.Metadata, catalog *pkg.Catalog, d *distro.Distro, stage *progress.Stage) (string, error) {
|
||||||
log.Debug("importing package SBOM")
|
log.Debug("importing package SBOM")
|
||||||
stage.Current = "package SBOM"
|
stage.Current = "package SBOM"
|
||||||
|
|
||||||
model, err := packageSbomModel(s, catalog, d, scope)
|
model, err := packageSbomModel(s, catalog, d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", fmt.Errorf("unable to create PackageSBOM model: %w", err)
|
return "", fmt.Errorf("unable to create PackageSBOM model: %w", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,6 +9,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/client-go/pkg/external"
|
"github.com/anchore/client-go/pkg/external"
|
||||||
"github.com/anchore/syft/internal/formats/syftjson"
|
"github.com/anchore/syft/internal/formats/syftjson"
|
||||||
syftjsonModel "github.com/anchore/syft/internal/formats/syftjson/model"
|
syftjsonModel "github.com/anchore/syft/internal/formats/syftjson/model"
|
||||||
@ -72,7 +74,7 @@ func TestPackageSbomToModel(t *testing.T) {
|
|||||||
|
|
||||||
c := pkg.NewCatalog(p)
|
c := pkg.NewCatalog(p)
|
||||||
|
|
||||||
model, err := packageSbomModel(m, c, &d, source.AllLayersScope)
|
model, err := packageSbomModel(m, c, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("unable to generate model from source material: %+v", err)
|
t.Fatalf("unable to generate model from source material: %+v", err)
|
||||||
}
|
}
|
||||||
@ -84,8 +86,16 @@ func TestPackageSbomToModel(t *testing.T) {
|
|||||||
t.Fatalf("unable to marshal model: %+v", err)
|
t.Fatalf("unable to marshal model: %+v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
s := sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
PackageCatalog: c,
|
||||||
|
Distro: &d,
|
||||||
|
},
|
||||||
|
Source: m,
|
||||||
|
}
|
||||||
|
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
pres := syftjson.Format().Presenter(c, &m, &d, source.AllLayersScope)
|
pres := syftjson.Format().Presenter(s)
|
||||||
if err := pres.Present(&buf); err != nil {
|
if err := pres.Present(&buf); err != nil {
|
||||||
t.Fatalf("unable to get expected json: %+v", err)
|
t.Fatalf("unable to get expected json: %+v", err)
|
||||||
}
|
}
|
||||||
@ -187,7 +197,7 @@ func TestPackageSbomImport(t *testing.T) {
|
|||||||
|
|
||||||
d, _ := distro.NewDistro(distro.CentOS, "8.0", "")
|
d, _ := distro.NewDistro(distro.CentOS, "8.0", "")
|
||||||
|
|
||||||
theModel, err := packageSbomModel(m, catalog, &d, source.AllLayersScope)
|
theModel, err := packageSbomModel(m, catalog, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("could not get sbom model: %+v", err)
|
t.Fatalf("could not get sbom model: %+v", err)
|
||||||
}
|
}
|
||||||
@ -226,7 +236,7 @@ func TestPackageSbomImport(t *testing.T) {
|
|||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
t.Run(test.name, func(t *testing.T) {
|
t.Run(test.name, func(t *testing.T) {
|
||||||
|
|
||||||
digest, err := importPackageSBOM(context.TODO(), test.api, sessionID, m, catalog, &d, source.AllLayersScope, &progress.Stage{})
|
digest, err := importPackageSBOM(context.TODO(), test.api, sessionID, m, catalog, &d, &progress.Stage{})
|
||||||
|
|
||||||
// validate error handling
|
// validate error handling
|
||||||
if err != nil && !test.expectsError {
|
if err != nil && !test.expectsError {
|
||||||
|
|||||||
@ -11,6 +11,7 @@ import (
|
|||||||
"github.com/anchore/stereoscope/pkg/imagetest"
|
"github.com/anchore/stereoscope/pkg/imagetest"
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/distro"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
"github.com/sergi/go-diff/diffmatchpatch"
|
"github.com/sergi/go-diff/diffmatchpatch"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@ -90,7 +91,7 @@ func AssertPresenterAgainstGoldenSnapshot(t *testing.T, pres presenter.Presenter
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func ImageInput(t testing.TB, testImage string, options ...ImageOption) (*pkg.Catalog, source.Metadata, *distro.Distro) {
|
func ImageInput(t testing.TB, testImage string, options ...ImageOption) sbom.SBOM {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
catalog := pkg.NewCatalog()
|
catalog := pkg.NewCatalog()
|
||||||
var cfg imageCfg
|
var cfg imageCfg
|
||||||
@ -117,7 +118,13 @@ func ImageInput(t testing.TB, testImage string, options ...ImageOption) (*pkg.Ca
|
|||||||
dist, err := distro.NewDistro(distro.Debian, "1.2.3", "like!")
|
dist, err := distro.NewDistro(distro.Debian, "1.2.3", "like!")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
return catalog, src.Metadata, &dist
|
return sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
PackageCatalog: catalog,
|
||||||
|
Distro: &dist,
|
||||||
|
},
|
||||||
|
Source: src.Metadata,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func populateImageCatalog(catalog *pkg.Catalog, img *image.Image) {
|
func populateImageCatalog(catalog *pkg.Catalog, img *image.Image) {
|
||||||
@ -167,7 +174,7 @@ func populateImageCatalog(catalog *pkg.Catalog, img *image.Image) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func DirectoryInput(t testing.TB) (*pkg.Catalog, source.Metadata, *distro.Distro) {
|
func DirectoryInput(t testing.TB) sbom.SBOM {
|
||||||
catalog := newDirectoryCatalog()
|
catalog := newDirectoryCatalog()
|
||||||
|
|
||||||
dist, err := distro.NewDistro(distro.Debian, "1.2.3", "like!")
|
dist, err := distro.NewDistro(distro.Debian, "1.2.3", "like!")
|
||||||
@ -176,7 +183,13 @@ func DirectoryInput(t testing.TB) (*pkg.Catalog, source.Metadata, *distro.Distro
|
|||||||
src, err := source.NewFromDirectory("/some/path")
|
src, err := source.NewFromDirectory("/some/path")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
return catalog, src.Metadata, &dist
|
return sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
PackageCatalog: catalog,
|
||||||
|
Distro: &dist,
|
||||||
|
},
|
||||||
|
Source: src.Metadata,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func newDirectoryCatalog() *pkg.Catalog {
|
func newDirectoryCatalog() *pkg.Catalog {
|
||||||
|
|||||||
@ -4,13 +4,10 @@ import (
|
|||||||
"encoding/xml"
|
"encoding/xml"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func encoder(output io.Writer, catalog *pkg.Catalog, srcMetadata *source.Metadata, d *distro.Distro, scope source.Scope) error {
|
func encoder(output io.Writer, s sbom.SBOM) error {
|
||||||
enc := xml.NewEncoder(output)
|
enc := xml.NewEncoder(output)
|
||||||
enc.Indent("", " ")
|
enc.Indent("", " ")
|
||||||
|
|
||||||
@ -19,7 +16,7 @@ func encoder(output io.Writer, catalog *pkg.Catalog, srcMetadata *source.Metadat
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = enc.Encode(toFormatModel(catalog, srcMetadata, d, scope))
|
err = enc.Encode(toFormatModel(s))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,17 +5,14 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/common/testutils"
|
"github.com/anchore/syft/internal/formats/common/testutils"
|
||||||
)
|
)
|
||||||
|
|
||||||
var updateCycloneDx = flag.Bool("update-cyclonedx", false, "update the *.golden files for cyclone-dx presenters")
|
var updateCycloneDx = flag.Bool("update-cyclonedx", false, "update the *.golden files for cyclone-dx presenters")
|
||||||
|
|
||||||
func TestCycloneDxDirectoryPresenter(t *testing.T) {
|
func TestCycloneDxDirectoryPresenter(t *testing.T) {
|
||||||
catalog, metadata, _ := testutils.DirectoryInput(t)
|
|
||||||
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
||||||
Format().Presenter(catalog, &metadata, nil, source.SquashedScope),
|
Format().Presenter(testutils.DirectoryInput(t)),
|
||||||
*updateCycloneDx,
|
*updateCycloneDx,
|
||||||
cycloneDxRedactor,
|
cycloneDxRedactor,
|
||||||
)
|
)
|
||||||
@ -23,9 +20,8 @@ func TestCycloneDxDirectoryPresenter(t *testing.T) {
|
|||||||
|
|
||||||
func TestCycloneDxImagePresenter(t *testing.T) {
|
func TestCycloneDxImagePresenter(t *testing.T) {
|
||||||
testImage := "image-simple"
|
testImage := "image-simple"
|
||||||
catalog, metadata, _ := testutils.ImageInput(t, testImage)
|
|
||||||
testutils.AssertPresenterAgainstGoldenImageSnapshot(t,
|
testutils.AssertPresenterAgainstGoldenImageSnapshot(t,
|
||||||
Format().Presenter(catalog, &metadata, nil, source.SquashedScope),
|
Format().Presenter(testutils.ImageInput(t, testImage)),
|
||||||
testImage,
|
testImage,
|
||||||
*updateCycloneDx,
|
*updateCycloneDx,
|
||||||
cycloneDxRedactor,
|
cycloneDxRedactor,
|
||||||
|
|||||||
@ -4,28 +4,29 @@ import (
|
|||||||
"encoding/xml"
|
"encoding/xml"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/formats/cyclonedx12xml/model"
|
"github.com/anchore/syft/internal/formats/cyclonedx12xml/model"
|
||||||
"github.com/anchore/syft/internal/version"
|
"github.com/anchore/syft/internal/version"
|
||||||
"github.com/anchore/syft/syft/distro"
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
)
|
)
|
||||||
|
|
||||||
// toFormatModel creates and populates a new in-memory representation of a CycloneDX 1.2 document
|
// toFormatModel creates and populates a new in-memory representation of a CycloneDX 1.2 document
|
||||||
func toFormatModel(catalog *pkg.Catalog, srcMetadata *source.Metadata, _ *distro.Distro, _ source.Scope) model.Document {
|
func toFormatModel(s sbom.SBOM) model.Document {
|
||||||
versionInfo := version.FromBuild()
|
versionInfo := version.FromBuild()
|
||||||
|
|
||||||
doc := model.Document{
|
doc := model.Document{
|
||||||
XMLNs: "http://cyclonedx.org/schema/bom/1.2",
|
XMLNs: "http://cyclonedx.org/schema/bom/1.2",
|
||||||
Version: 1,
|
Version: 1,
|
||||||
SerialNumber: uuid.New().URN(),
|
SerialNumber: uuid.New().URN(),
|
||||||
BomDescriptor: toBomDescriptor(internal.ApplicationName, versionInfo.Version, srcMetadata),
|
BomDescriptor: toBomDescriptor(internal.ApplicationName, versionInfo.Version, s.Source),
|
||||||
}
|
}
|
||||||
|
|
||||||
// attach components
|
// attach components
|
||||||
for _, p := range catalog.Sorted() {
|
for _, p := range s.Artifacts.PackageCatalog.Sorted() {
|
||||||
doc.Components = append(doc.Components, toComponent(p))
|
doc.Components = append(doc.Components, toComponent(p))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -43,7 +44,7 @@ func toComponent(p *pkg.Package) model.Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewBomDescriptor returns a new BomDescriptor tailored for the current time and "syft" tool details.
|
// NewBomDescriptor returns a new BomDescriptor tailored for the current time and "syft" tool details.
|
||||||
func toBomDescriptor(name, version string, srcMetadata *source.Metadata) *model.BomDescriptor {
|
func toBomDescriptor(name, version string, srcMetadata source.Metadata) *model.BomDescriptor {
|
||||||
return &model.BomDescriptor{
|
return &model.BomDescriptor{
|
||||||
XMLName: xml.Name{},
|
XMLName: xml.Name{},
|
||||||
Timestamp: time.Now().Format(time.RFC3339),
|
Timestamp: time.Now().Format(time.RFC3339),
|
||||||
@ -58,10 +59,7 @@ func toBomDescriptor(name, version string, srcMetadata *source.Metadata) *model.
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func toBomDescriptorComponent(srcMetadata *source.Metadata) *model.BomDescriptorComponent {
|
func toBomDescriptorComponent(srcMetadata source.Metadata) *model.BomDescriptorComponent {
|
||||||
if srcMetadata == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
switch srcMetadata.Scheme {
|
switch srcMetadata.Scheme {
|
||||||
case source.ImageScheme:
|
case source.ImageScheme:
|
||||||
return &model.BomDescriptorComponent{
|
return &model.BomDescriptorComponent{
|
||||||
|
|||||||
@ -4,16 +4,13 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const anchoreNamespace = "https://anchore.com/syft"
|
const anchoreNamespace = "https://anchore.com/syft"
|
||||||
|
|
||||||
func encoder(output io.Writer, catalog *pkg.Catalog, srcMetadata *source.Metadata, d *distro.Distro, scope source.Scope) error {
|
func encoder(output io.Writer, s sbom.SBOM) error {
|
||||||
doc := toFormatModel(catalog, srcMetadata, d, scope)
|
doc := toFormatModel(s)
|
||||||
|
|
||||||
enc := json.NewEncoder(output)
|
enc := json.NewEncoder(output)
|
||||||
// prevent > and < from being escaped in the payload
|
// prevent > and < from being escaped in the payload
|
||||||
|
|||||||
@ -5,18 +5,14 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/common/testutils"
|
"github.com/anchore/syft/internal/formats/common/testutils"
|
||||||
"github.com/anchore/syft/syft/format"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var updateSpdxJson = flag.Bool("update-spdx-json", false, "update the *.golden files for spdx-json presenters")
|
var updateSpdxJson = flag.Bool("update-spdx-json", false, "update the *.golden files for spdx-json presenters")
|
||||||
|
|
||||||
func TestSPDXJSONDirectoryPresenter(t *testing.T) {
|
func TestSPDXJSONDirectoryPresenter(t *testing.T) {
|
||||||
catalog, metadata, distro := testutils.DirectoryInput(t)
|
|
||||||
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
||||||
format.NewPresenter(encoder, catalog, &metadata, distro, source.UnknownScope),
|
Format().Presenter(testutils.DirectoryInput(t)),
|
||||||
*updateSpdxJson,
|
*updateSpdxJson,
|
||||||
spdxJsonRedactor,
|
spdxJsonRedactor,
|
||||||
)
|
)
|
||||||
@ -24,9 +20,8 @@ func TestSPDXJSONDirectoryPresenter(t *testing.T) {
|
|||||||
|
|
||||||
func TestSPDXJSONImagePresenter(t *testing.T) {
|
func TestSPDXJSONImagePresenter(t *testing.T) {
|
||||||
testImage := "image-simple"
|
testImage := "image-simple"
|
||||||
catalog, metadata, distro := testutils.ImageInput(t, testImage, testutils.FromSnapshot())
|
|
||||||
testutils.AssertPresenterAgainstGoldenImageSnapshot(t,
|
testutils.AssertPresenterAgainstGoldenImageSnapshot(t,
|
||||||
format.NewPresenter(encoder, catalog, &metadata, distro, source.SquashedScope),
|
Format().Presenter(testutils.ImageInput(t, testImage, testutils.FromSnapshot())),
|
||||||
testImage,
|
testImage,
|
||||||
*updateSpdxJson,
|
*updateSpdxJson,
|
||||||
spdxJsonRedactor,
|
spdxJsonRedactor,
|
||||||
|
|||||||
@ -1,7 +1,5 @@
|
|||||||
package model
|
package model
|
||||||
|
|
||||||
import "github.com/anchore/syft/syft/source"
|
|
||||||
|
|
||||||
// derived from:
|
// derived from:
|
||||||
// - https://spdx.github.io/spdx-spec/appendix-III-RDF-data-model-implementation-and-identifier-syntax/
|
// - https://spdx.github.io/spdx-spec/appendix-III-RDF-data-model-implementation-and-identifier-syntax/
|
||||||
// - https://github.com/spdx/spdx-spec/blob/v2.2/schemas/spdx-schema.json
|
// - https://github.com/spdx/spdx-spec/blob/v2.2/schemas/spdx-schema.json
|
||||||
@ -13,8 +11,6 @@ type Document struct {
|
|||||||
// One instance is required for each SPDX file produced. It provides the necessary information for forward
|
// One instance is required for each SPDX file produced. It provides the necessary information for forward
|
||||||
// and backward compatibility for processing tools.
|
// and backward compatibility for processing tools.
|
||||||
CreationInfo CreationInfo `json:"creationInfo"`
|
CreationInfo CreationInfo `json:"creationInfo"`
|
||||||
// SyftSourceData contains information about what is being described in this SPDX document (e.g. a container image, a directory, etc)
|
|
||||||
SyftSourceData *source.Metadata `json:"syftSourceData,omitempty"`
|
|
||||||
// 2.2: Data License; should be "CC0-1.0"
|
// 2.2: Data License; should be "CC0-1.0"
|
||||||
// Cardinality: mandatory, one
|
// Cardinality: mandatory, one
|
||||||
// License expression for dataLicense. Compliance with the SPDX specification includes populating the SPDX
|
// License expression for dataLicense. Compliance with the SPDX specification includes populating the SPDX
|
||||||
|
|||||||
@ -3,31 +3,15 @@
|
|||||||
"name": "/some/path",
|
"name": "/some/path",
|
||||||
"spdxVersion": "SPDX-2.2",
|
"spdxVersion": "SPDX-2.2",
|
||||||
"creationInfo": {
|
"creationInfo": {
|
||||||
"created": "2021-10-22T19:25:38.33537Z",
|
"created": "2021-10-29T16:26:08.995826Z",
|
||||||
"creators": [
|
"creators": [
|
||||||
"Organization: Anchore, Inc",
|
"Organization: Anchore, Inc",
|
||||||
"Tool: syft-[not provided]"
|
"Tool: syft-[not provided]"
|
||||||
],
|
],
|
||||||
"licenseListVersion": "3.14"
|
"licenseListVersion": "3.14"
|
||||||
},
|
},
|
||||||
"syftSourceData": {
|
|
||||||
"Scheme": "DirectoryScheme",
|
|
||||||
"ImageMetadata": {
|
|
||||||
"userInput": "",
|
|
||||||
"imageID": "",
|
|
||||||
"manifestDigest": "",
|
|
||||||
"mediaType": "",
|
|
||||||
"tags": null,
|
|
||||||
"imageSize": 0,
|
|
||||||
"layers": null,
|
|
||||||
"manifest": null,
|
|
||||||
"config": null,
|
|
||||||
"repoDigests": null
|
|
||||||
},
|
|
||||||
"Path": "/some/path"
|
|
||||||
},
|
|
||||||
"dataLicense": "CC0-1.0",
|
"dataLicense": "CC0-1.0",
|
||||||
"documentNamespace": "https:/anchore.com/syft/dir/some/path-a868c45f-e62b-473f-9dd3-b72994be6294",
|
"documentNamespace": "https:/anchore.com/syft/dir/some/path-5362d380-914a-458f-b059-d8d27899574c",
|
||||||
"packages": [
|
"packages": [
|
||||||
{
|
{
|
||||||
"SPDXID": "SPDXRef-Package-python-package-1-1.0.1",
|
"SPDXID": "SPDXRef-Package-python-package-1-1.0.1",
|
||||||
|
|||||||
@ -3,44 +3,15 @@
|
|||||||
"name": "user-image-input",
|
"name": "user-image-input",
|
||||||
"spdxVersion": "SPDX-2.2",
|
"spdxVersion": "SPDX-2.2",
|
||||||
"creationInfo": {
|
"creationInfo": {
|
||||||
"created": "2021-10-22T19:25:38.341582Z",
|
"created": "2021-10-29T16:26:09.001799Z",
|
||||||
"creators": [
|
"creators": [
|
||||||
"Organization: Anchore, Inc",
|
"Organization: Anchore, Inc",
|
||||||
"Tool: syft-[not provided]"
|
"Tool: syft-[not provided]"
|
||||||
],
|
],
|
||||||
"licenseListVersion": "3.14"
|
"licenseListVersion": "3.14"
|
||||||
},
|
},
|
||||||
"syftSourceData": {
|
|
||||||
"Scheme": "ImageScheme",
|
|
||||||
"ImageMetadata": {
|
|
||||||
"userInput": "user-image-input",
|
|
||||||
"imageID": "sha256:2480160b55bec40c44d3b145c7b2c1c47160db8575c3dcae086d76b9370ae7ca",
|
|
||||||
"manifestDigest": "sha256:2731251dc34951c0e50fcc643b4c5f74922dad1a5d98f302b504cf46cd5d9368",
|
|
||||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
|
||||||
"tags": [
|
|
||||||
"stereoscope-fixture-image-simple:85066c51088bdd274f7a89e99e00490f666c49e72ffc955707cd6e18f0e22c5b"
|
|
||||||
],
|
|
||||||
"imageSize": 38,
|
|
||||||
"layers": [
|
|
||||||
{
|
|
||||||
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
|
|
||||||
"digest": "sha256:fb6beecb75b39f4bb813dbf177e501edd5ddb3e69bb45cedeb78c676ee1b7a59",
|
|
||||||
"size": 22
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
|
|
||||||
"digest": "sha256:319b588ce64253a87b533c8ed01cf0025e0eac98e7b516e12532957e1244fdec",
|
|
||||||
"size": 16
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"manifest": "eyJzY2hlbWFWZXJzaW9uIjoyLCJtZWRpYVR5cGUiOiJhcHBsaWNhdGlvbi92bmQuZG9ja2VyLmRpc3RyaWJ1dGlvbi5tYW5pZmVzdC52Mitqc29uIiwiY29uZmlnIjp7Im1lZGlhVHlwZSI6ImFwcGxpY2F0aW9uL3ZuZC5kb2NrZXIuY29udGFpbmVyLmltYWdlLnYxK2pzb24iLCJzaXplIjo2NjcsImRpZ2VzdCI6InNoYTI1NjoyNDgwMTYwYjU1YmVjNDBjNDRkM2IxNDVjN2IyYzFjNDcxNjBkYjg1NzVjM2RjYWUwODZkNzZiOTM3MGFlN2NhIn0sImxheWVycyI6W3sibWVkaWFUeXBlIjoiYXBwbGljYXRpb24vdm5kLmRvY2tlci5pbWFnZS5yb290ZnMuZGlmZi50YXIuZ3ppcCIsInNpemUiOjIwNDgsImRpZ2VzdCI6InNoYTI1NjpmYjZiZWVjYjc1YjM5ZjRiYjgxM2RiZjE3N2U1MDFlZGQ1ZGRiM2U2OWJiNDVjZWRlYjc4YzY3NmVlMWI3YTU5In0seyJtZWRpYVR5cGUiOiJhcHBsaWNhdGlvbi92bmQuZG9ja2VyLmltYWdlLnJvb3Rmcy5kaWZmLnRhci5nemlwIiwic2l6ZSI6MjA0OCwiZGlnZXN0Ijoic2hhMjU2OjMxOWI1ODhjZTY0MjUzYTg3YjUzM2M4ZWQwMWNmMDAyNWUwZWFjOThlN2I1MTZlMTI1MzI5NTdlMTI0NGZkZWMifV19",
|
|
||||||
"config": "eyJhcmNoaXRlY3R1cmUiOiJhbWQ2NCIsImNvbmZpZyI6eyJFbnYiOlsiUEFUSD0vdXNyL2xvY2FsL3NiaW46L3Vzci9sb2NhbC9iaW46L3Vzci9zYmluOi91c3IvYmluOi9zYmluOi9iaW4iXSwiV29ya2luZ0RpciI6Ii8iLCJPbkJ1aWxkIjpudWxsfSwiY3JlYXRlZCI6IjIwMjEtMTAtMDRUMTE6NDA6MDAuNjM4Mzk0NVoiLCJoaXN0b3J5IjpbeyJjcmVhdGVkIjoiMjAyMS0xMC0wNFQxMTo0MDowMC41OTA3MzE2WiIsImNyZWF0ZWRfYnkiOiJBREQgZmlsZS0xLnR4dCAvc29tZWZpbGUtMS50eHQgIyBidWlsZGtpdCIsImNvbW1lbnQiOiJidWlsZGtpdC5kb2NrZXJmaWxlLnYwIn0seyJjcmVhdGVkIjoiMjAyMS0xMC0wNFQxMTo0MDowMC42MzgzOTQ1WiIsImNyZWF0ZWRfYnkiOiJBREQgZmlsZS0yLnR4dCAvc29tZWZpbGUtMi50eHQgIyBidWlsZGtpdCIsImNvbW1lbnQiOiJidWlsZGtpdC5kb2NrZXJmaWxlLnYwIn1dLCJvcyI6ImxpbnV4Iiwicm9vdGZzIjp7InR5cGUiOiJsYXllcnMiLCJkaWZmX2lkcyI6WyJzaGEyNTY6ZmI2YmVlY2I3NWIzOWY0YmI4MTNkYmYxNzdlNTAxZWRkNWRkYjNlNjliYjQ1Y2VkZWI3OGM2NzZlZTFiN2E1OSIsInNoYTI1NjozMTliNTg4Y2U2NDI1M2E4N2I1MzNjOGVkMDFjZjAwMjVlMGVhYzk4ZTdiNTE2ZTEyNTMyOTU3ZTEyNDRmZGVjIl19fQ==",
|
|
||||||
"repoDigests": []
|
|
||||||
},
|
|
||||||
"Path": ""
|
|
||||||
},
|
|
||||||
"dataLicense": "CC0-1.0",
|
"dataLicense": "CC0-1.0",
|
||||||
"documentNamespace": "https:/anchore.com/syft/image/user-image-input-7c996682-9cdf-45cd-b70b-e771d740c9ed",
|
"documentNamespace": "https:/anchore.com/syft/image/user-image-input-3ad8571c-513f-4fce-944e-5125353c3186",
|
||||||
"packages": [
|
"packages": [
|
||||||
{
|
{
|
||||||
"SPDXID": "SPDXRef-Package-python-package-1-1.0.1",
|
"SPDXID": "SPDXRef-Package-python-package-1-1.0.1",
|
||||||
|
|||||||
@ -6,7 +6,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/formats/common/spdxhelpers"
|
"github.com/anchore/syft/internal/formats/common/spdxhelpers"
|
||||||
@ -19,9 +19,9 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// toFormatModel creates and populates a new JSON document struct that follows the SPDX 2.2 spec from the given cataloging results.
|
// toFormatModel creates and populates a new JSON document struct that follows the SPDX 2.2 spec from the given cataloging results.
|
||||||
func toFormatModel(catalog *pkg.Catalog, srcMetadata *source.Metadata, _ *distro.Distro, _ source.Scope) model.Document {
|
func toFormatModel(s sbom.SBOM) model.Document {
|
||||||
name := documentName(srcMetadata)
|
name := documentName(s.Source)
|
||||||
packages, files, relationships := extractFromCatalog(catalog)
|
packages, files, relationships := extractFromCatalog(s.Artifacts.PackageCatalog)
|
||||||
|
|
||||||
return model.Document{
|
return model.Document{
|
||||||
Element: model.Element{
|
Element: model.Element{
|
||||||
@ -39,38 +39,33 @@ func toFormatModel(catalog *pkg.Catalog, srcMetadata *source.Metadata, _ *distro
|
|||||||
LicenseListVersion: spdxlicense.Version,
|
LicenseListVersion: spdxlicense.Version,
|
||||||
},
|
},
|
||||||
DataLicense: "CC0-1.0",
|
DataLicense: "CC0-1.0",
|
||||||
DocumentNamespace: documentNamespace(name, srcMetadata),
|
DocumentNamespace: documentNamespace(name, s.Source),
|
||||||
Packages: packages,
|
Packages: packages,
|
||||||
Files: files,
|
Files: files,
|
||||||
Relationships: relationships,
|
Relationships: relationships,
|
||||||
// TODO: add scope
|
|
||||||
SyftSourceData: srcMetadata,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func documentName(srcMetadata *source.Metadata) string {
|
func documentName(srcMetadata source.Metadata) string {
|
||||||
if srcMetadata != nil {
|
|
||||||
switch srcMetadata.Scheme {
|
switch srcMetadata.Scheme {
|
||||||
case source.ImageScheme:
|
case source.ImageScheme:
|
||||||
return cleanSPDXName(srcMetadata.ImageMetadata.UserInput)
|
return cleanSPDXName(srcMetadata.ImageMetadata.UserInput)
|
||||||
case source.DirectoryScheme:
|
case source.DirectoryScheme:
|
||||||
return cleanSPDXName(srcMetadata.Path)
|
return cleanSPDXName(srcMetadata.Path)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
// TODO: is this alright?
|
// TODO: is this alright?
|
||||||
return uuid.Must(uuid.NewRandom()).String()
|
return uuid.Must(uuid.NewRandom()).String()
|
||||||
}
|
}
|
||||||
|
|
||||||
func documentNamespace(name string, srcMetadata *source.Metadata) string {
|
func documentNamespace(name string, srcMetadata source.Metadata) string {
|
||||||
input := "unknown-source-type"
|
input := "unknown-source-type"
|
||||||
if srcMetadata != nil {
|
|
||||||
switch srcMetadata.Scheme {
|
switch srcMetadata.Scheme {
|
||||||
case source.ImageScheme:
|
case source.ImageScheme:
|
||||||
input = "image"
|
input = "image"
|
||||||
case source.DirectoryScheme:
|
case source.DirectoryScheme:
|
||||||
input = "dir"
|
input = "dir"
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
uniqueID := uuid.Must(uuid.NewRandom())
|
uniqueID := uuid.Must(uuid.NewRandom())
|
||||||
identifier := path.Join(input, uniqueID.String())
|
identifier := path.Join(input, uniqueID.String())
|
||||||
|
|||||||
@ -3,15 +3,11 @@ package spdx22tagvalue
|
|||||||
import (
|
import (
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
"github.com/spdx/tools-golang/tvsaver"
|
"github.com/spdx/tools-golang/tvsaver"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func encoder(output io.Writer, catalog *pkg.Catalog, srcMetadata *source.Metadata, d *distro.Distro, scope source.Scope) error {
|
func encoder(output io.Writer, s sbom.SBOM) error {
|
||||||
model := toFormatModel(catalog, srcMetadata, d, scope)
|
model := toFormatModel(s)
|
||||||
return tvsaver.Save2_2(&model, output)
|
return tvsaver.Save2_2(&model, output)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,18 +5,15 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/common/testutils"
|
"github.com/anchore/syft/internal/formats/common/testutils"
|
||||||
"github.com/anchore/syft/syft/format"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var updateSpdxTagValue = flag.Bool("update-spdx-tv", false, "update the *.golden files for spdx-tv presenters")
|
var updateSpdxTagValue = flag.Bool("update-spdx-tv", false, "update the *.golden files for spdx-tv presenters")
|
||||||
|
|
||||||
func TestSPDXTagValueDirectoryPresenter(t *testing.T) {
|
func TestSPDXTagValueDirectoryPresenter(t *testing.T) {
|
||||||
catalog, metadata, d := testutils.DirectoryInput(t)
|
|
||||||
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
||||||
format.NewPresenter(encoder, catalog, &metadata, d, source.UnknownScope),
|
Format().Presenter(testutils.DirectoryInput(t)),
|
||||||
*updateSpdxTagValue,
|
*updateSpdxTagValue,
|
||||||
spdxTagValueRedactor,
|
spdxTagValueRedactor,
|
||||||
)
|
)
|
||||||
@ -24,9 +21,8 @@ func TestSPDXTagValueDirectoryPresenter(t *testing.T) {
|
|||||||
|
|
||||||
func TestSPDXTagValueImagePresenter(t *testing.T) {
|
func TestSPDXTagValueImagePresenter(t *testing.T) {
|
||||||
testImage := "image-simple"
|
testImage := "image-simple"
|
||||||
catalog, metadata, d := testutils.ImageInput(t, testImage, testutils.FromSnapshot())
|
|
||||||
testutils.AssertPresenterAgainstGoldenImageSnapshot(t,
|
testutils.AssertPresenterAgainstGoldenImageSnapshot(t,
|
||||||
format.NewPresenter(encoder, catalog, &metadata, d, source.SquashedScope),
|
Format().Presenter(testutils.ImageInput(t, testImage, testutils.FromSnapshot())),
|
||||||
testImage,
|
testImage,
|
||||||
*updateSpdxTagValue,
|
*updateSpdxTagValue,
|
||||||
spdxTagValueRedactor,
|
spdxTagValueRedactor,
|
||||||
|
|||||||
@ -4,19 +4,19 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/formats/common/spdxhelpers"
|
"github.com/anchore/syft/internal/formats/common/spdxhelpers"
|
||||||
"github.com/anchore/syft/internal/spdxlicense"
|
"github.com/anchore/syft/internal/spdxlicense"
|
||||||
"github.com/anchore/syft/internal/version"
|
"github.com/anchore/syft/internal/version"
|
||||||
"github.com/anchore/syft/syft/distro"
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
"github.com/spdx/tools-golang/spdx"
|
"github.com/spdx/tools-golang/spdx"
|
||||||
)
|
)
|
||||||
|
|
||||||
// toFormatModel creates and populates a new JSON document struct that follows the SPDX 2.2 spec from the given cataloging results.
|
// toFormatModel creates and populates a new JSON document struct that follows the SPDX 2.2 spec from the given cataloging results.
|
||||||
// nolint:funlen
|
// nolint:funlen
|
||||||
func toFormatModel(catalog *pkg.Catalog, srcMetadata *source.Metadata, _ *distro.Distro, _ source.Scope) spdx.Document2_2 {
|
func toFormatModel(s sbom.SBOM) spdx.Document2_2 {
|
||||||
return spdx.Document2_2{
|
return spdx.Document2_2{
|
||||||
CreationInfo: &spdx.CreationInfo2_2{
|
CreationInfo: &spdx.CreationInfo2_2{
|
||||||
// 2.1: SPDX Version; should be in the format "SPDX-2.2"
|
// 2.1: SPDX Version; should be in the format "SPDX-2.2"
|
||||||
@ -33,7 +33,7 @@ func toFormatModel(catalog *pkg.Catalog, srcMetadata *source.Metadata, _ *distro
|
|||||||
|
|
||||||
// 2.4: Document Name
|
// 2.4: Document Name
|
||||||
// Cardinality: mandatory, one
|
// Cardinality: mandatory, one
|
||||||
DocumentName: srcMetadata.ImageMetadata.UserInput,
|
DocumentName: s.Source.ImageMetadata.UserInput,
|
||||||
|
|
||||||
// 2.5: Document Namespace
|
// 2.5: Document Namespace
|
||||||
// Cardinality: mandatory, one
|
// Cardinality: mandatory, one
|
||||||
@ -52,7 +52,7 @@ func toFormatModel(catalog *pkg.Catalog, srcMetadata *source.Metadata, _ *distro
|
|||||||
// In many cases, the URI will point to a web accessible document, but this should not be assumed
|
// In many cases, the URI will point to a web accessible document, but this should not be assumed
|
||||||
// to be the case.
|
// to be the case.
|
||||||
|
|
||||||
DocumentNamespace: fmt.Sprintf("https://anchore.com/syft/image/%s", srcMetadata.ImageMetadata.UserInput),
|
DocumentNamespace: fmt.Sprintf("https://anchore.com/syft/image/%s", s.Source.ImageMetadata.UserInput),
|
||||||
|
|
||||||
// 2.6: External Document References
|
// 2.6: External Document References
|
||||||
// Cardinality: optional, one or many
|
// Cardinality: optional, one or many
|
||||||
@ -81,7 +81,7 @@ func toFormatModel(catalog *pkg.Catalog, srcMetadata *source.Metadata, _ *distro
|
|||||||
// Cardinality: optional, one
|
// Cardinality: optional, one
|
||||||
DocumentComment: "",
|
DocumentComment: "",
|
||||||
},
|
},
|
||||||
Packages: toFormatPackages(catalog),
|
Packages: toFormatPackages(s.Artifacts.PackageCatalog),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -5,19 +5,18 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/syftjson/model"
|
"github.com/anchore/syft/internal/formats/syftjson/model"
|
||||||
"github.com/anchore/syft/syft/distro"
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func decoder(reader io.Reader) (*pkg.Catalog, *source.Metadata, *distro.Distro, source.Scope, error) {
|
func decoder(reader io.Reader) (*sbom.SBOM, error) {
|
||||||
dec := json.NewDecoder(reader)
|
dec := json.NewDecoder(reader)
|
||||||
|
|
||||||
var doc model.Document
|
var doc model.Document
|
||||||
err := dec.Decode(&doc)
|
err := dec.Decode(&doc)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, source.UnknownScope, fmt.Errorf("unable to decode syft-json: %w", err)
|
return nil, fmt.Errorf("unable to decode syft-json: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return toSyftModel(doc)
|
return toSyftModel(doc)
|
||||||
|
|||||||
@ -5,8 +5,6 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/common/testutils"
|
"github.com/anchore/syft/internal/formats/common/testutils"
|
||||||
"github.com/go-test/deep"
|
"github.com/go-test/deep"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@ -14,20 +12,20 @@ import (
|
|||||||
|
|
||||||
func TestEncodeDecodeCycle(t *testing.T) {
|
func TestEncodeDecodeCycle(t *testing.T) {
|
||||||
testImage := "image-simple"
|
testImage := "image-simple"
|
||||||
originalCatalog, originalMetadata, _ := testutils.ImageInput(t, testImage)
|
originalSBOM := testutils.ImageInput(t, testImage)
|
||||||
|
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
assert.NoError(t, encoder(&buf, originalCatalog, &originalMetadata, nil, source.SquashedScope))
|
assert.NoError(t, encoder(&buf, originalSBOM))
|
||||||
|
|
||||||
actualCatalog, actualMetadata, _, _, err := decoder(bytes.NewReader(buf.Bytes()))
|
actualSBOM, err := decoder(bytes.NewReader(buf.Bytes()))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
for _, d := range deep.Equal(originalMetadata, *actualMetadata) {
|
for _, d := range deep.Equal(originalSBOM.Source, actualSBOM.Source) {
|
||||||
t.Errorf("metadata difference: %+v", d)
|
t.Errorf("metadata difference: %+v", d)
|
||||||
}
|
}
|
||||||
|
|
||||||
actualPackages := actualCatalog.Sorted()
|
actualPackages := actualSBOM.Artifacts.PackageCatalog.Sorted()
|
||||||
for idx, p := range originalCatalog.Sorted() {
|
for idx, p := range originalSBOM.Artifacts.PackageCatalog.Sorted() {
|
||||||
if !assert.Equal(t, p.Name, actualPackages[idx].Name) {
|
if !assert.Equal(t, p.Name, actualPackages[idx].Name) {
|
||||||
t.Errorf("different package at idx=%d: %s vs %s", idx, p.Name, actualPackages[idx].Name)
|
t.Errorf("different package at idx=%d: %s vs %s", idx, p.Name, actualPackages[idx].Name)
|
||||||
continue
|
continue
|
||||||
|
|||||||
@ -4,15 +4,12 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func encoder(output io.Writer, catalog *pkg.Catalog, srcMetadata *source.Metadata, d *distro.Distro, scope source.Scope) error {
|
func encoder(output io.Writer, s sbom.SBOM) error {
|
||||||
// TODO: application config not available yet
|
// TODO: application config not available yet
|
||||||
doc := ToFormatModel(catalog, srcMetadata, d, scope, nil)
|
doc := ToFormatModel(s, nil)
|
||||||
|
|
||||||
enc := json.NewEncoder(output)
|
enc := json.NewEncoder(output)
|
||||||
// prevent > and < from being escaped in the payload
|
// prevent > and < from being escaped in the payload
|
||||||
|
|||||||
@ -4,27 +4,22 @@ import (
|
|||||||
"flag"
|
"flag"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/common/testutils"
|
"github.com/anchore/syft/internal/formats/common/testutils"
|
||||||
"github.com/anchore/syft/syft/format"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var updateJson = flag.Bool("update-json", false, "update the *.golden files for json presenters")
|
var updateJson = flag.Bool("update-json", false, "update the *.golden files for json presenters")
|
||||||
|
|
||||||
func TestDirectoryPresenter(t *testing.T) {
|
func TestDirectoryPresenter(t *testing.T) {
|
||||||
catalog, metadata, distro := testutils.DirectoryInput(t)
|
|
||||||
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
||||||
format.NewPresenter(encoder, catalog, &metadata, distro, source.SquashedScope),
|
Format().Presenter(testutils.DirectoryInput(t)),
|
||||||
*updateJson,
|
*updateJson,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestImagePresenter(t *testing.T) {
|
func TestImagePresenter(t *testing.T) {
|
||||||
testImage := "image-simple"
|
testImage := "image-simple"
|
||||||
catalog, metadata, distro := testutils.ImageInput(t, testImage, testutils.FromSnapshot())
|
|
||||||
testutils.AssertPresenterAgainstGoldenImageSnapshot(t,
|
testutils.AssertPresenterAgainstGoldenImageSnapshot(t,
|
||||||
format.NewPresenter(encoder, catalog, &metadata, distro, source.SquashedScope),
|
Format().Presenter(testutils.ImageInput(t, testImage, testutils.FromSnapshot())),
|
||||||
testImage,
|
testImage,
|
||||||
*updateJson,
|
*updateJson,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -20,11 +20,6 @@ type sourceUnpacker struct {
|
|||||||
Target json.RawMessage `json:"target"`
|
Target json.RawMessage `json:"target"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ImageSource struct {
|
|
||||||
source.ImageMetadata
|
|
||||||
Scope source.Scope `json:"scope"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnmarshalJSON populates a source object from JSON bytes.
|
// UnmarshalJSON populates a source object from JSON bytes.
|
||||||
func (s *Source) UnmarshalJSON(b []byte) error {
|
func (s *Source) UnmarshalJSON(b []byte) error {
|
||||||
var unpacker sourceUnpacker
|
var unpacker sourceUnpacker
|
||||||
@ -43,7 +38,7 @@ func (s *Source) UnmarshalJSON(b []byte) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
case "image":
|
case "image":
|
||||||
var payload ImageSource
|
var payload source.ImageMetadata
|
||||||
if err := json.Unmarshal(unpacker.Target, &payload); err != nil {
|
if err := json.Unmarshal(unpacker.Target, &payload); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -88,8 +88,7 @@
|
|||||||
],
|
],
|
||||||
"manifest": "eyJzY2hlbWFWZXJzaW9uIjoyLCJtZWRpYVR5cGUiOiJhcHBsaWNhdGlvbi92bmQuZG9ja2VyLmRpc3RyaWJ1dGlvbi5tYW5pZmVzdC52Mitqc29uIiwiY29uZmlnIjp7Im1lZGlhVHlwZSI6ImFwcGxpY2F0aW9uL3ZuZC5kb2NrZXIuY29udGFpbmVyLmltYWdlLnYxK2pzb24iLCJzaXplIjo2NjcsImRpZ2VzdCI6InNoYTI1NjoyNDgwMTYwYjU1YmVjNDBjNDRkM2IxNDVjN2IyYzFjNDcxNjBkYjg1NzVjM2RjYWUwODZkNzZiOTM3MGFlN2NhIn0sImxheWVycyI6W3sibWVkaWFUeXBlIjoiYXBwbGljYXRpb24vdm5kLmRvY2tlci5pbWFnZS5yb290ZnMuZGlmZi50YXIuZ3ppcCIsInNpemUiOjIwNDgsImRpZ2VzdCI6InNoYTI1NjpmYjZiZWVjYjc1YjM5ZjRiYjgxM2RiZjE3N2U1MDFlZGQ1ZGRiM2U2OWJiNDVjZWRlYjc4YzY3NmVlMWI3YTU5In0seyJtZWRpYVR5cGUiOiJhcHBsaWNhdGlvbi92bmQuZG9ja2VyLmltYWdlLnJvb3Rmcy5kaWZmLnRhci5nemlwIiwic2l6ZSI6MjA0OCwiZGlnZXN0Ijoic2hhMjU2OjMxOWI1ODhjZTY0MjUzYTg3YjUzM2M4ZWQwMWNmMDAyNWUwZWFjOThlN2I1MTZlMTI1MzI5NTdlMTI0NGZkZWMifV19",
|
"manifest": "eyJzY2hlbWFWZXJzaW9uIjoyLCJtZWRpYVR5cGUiOiJhcHBsaWNhdGlvbi92bmQuZG9ja2VyLmRpc3RyaWJ1dGlvbi5tYW5pZmVzdC52Mitqc29uIiwiY29uZmlnIjp7Im1lZGlhVHlwZSI6ImFwcGxpY2F0aW9uL3ZuZC5kb2NrZXIuY29udGFpbmVyLmltYWdlLnYxK2pzb24iLCJzaXplIjo2NjcsImRpZ2VzdCI6InNoYTI1NjoyNDgwMTYwYjU1YmVjNDBjNDRkM2IxNDVjN2IyYzFjNDcxNjBkYjg1NzVjM2RjYWUwODZkNzZiOTM3MGFlN2NhIn0sImxheWVycyI6W3sibWVkaWFUeXBlIjoiYXBwbGljYXRpb24vdm5kLmRvY2tlci5pbWFnZS5yb290ZnMuZGlmZi50YXIuZ3ppcCIsInNpemUiOjIwNDgsImRpZ2VzdCI6InNoYTI1NjpmYjZiZWVjYjc1YjM5ZjRiYjgxM2RiZjE3N2U1MDFlZGQ1ZGRiM2U2OWJiNDVjZWRlYjc4YzY3NmVlMWI3YTU5In0seyJtZWRpYVR5cGUiOiJhcHBsaWNhdGlvbi92bmQuZG9ja2VyLmltYWdlLnJvb3Rmcy5kaWZmLnRhci5nemlwIiwic2l6ZSI6MjA0OCwiZGlnZXN0Ijoic2hhMjU2OjMxOWI1ODhjZTY0MjUzYTg3YjUzM2M4ZWQwMWNmMDAyNWUwZWFjOThlN2I1MTZlMTI1MzI5NTdlMTI0NGZkZWMifV19",
|
||||||
"config": "eyJhcmNoaXRlY3R1cmUiOiJhbWQ2NCIsImNvbmZpZyI6eyJFbnYiOlsiUEFUSD0vdXNyL2xvY2FsL3NiaW46L3Vzci9sb2NhbC9iaW46L3Vzci9zYmluOi91c3IvYmluOi9zYmluOi9iaW4iXSwiV29ya2luZ0RpciI6Ii8iLCJPbkJ1aWxkIjpudWxsfSwiY3JlYXRlZCI6IjIwMjEtMTAtMDRUMTE6NDA6MDAuNjM4Mzk0NVoiLCJoaXN0b3J5IjpbeyJjcmVhdGVkIjoiMjAyMS0xMC0wNFQxMTo0MDowMC41OTA3MzE2WiIsImNyZWF0ZWRfYnkiOiJBREQgZmlsZS0xLnR4dCAvc29tZWZpbGUtMS50eHQgIyBidWlsZGtpdCIsImNvbW1lbnQiOiJidWlsZGtpdC5kb2NrZXJmaWxlLnYwIn0seyJjcmVhdGVkIjoiMjAyMS0xMC0wNFQxMTo0MDowMC42MzgzOTQ1WiIsImNyZWF0ZWRfYnkiOiJBREQgZmlsZS0yLnR4dCAvc29tZWZpbGUtMi50eHQgIyBidWlsZGtpdCIsImNvbW1lbnQiOiJidWlsZGtpdC5kb2NrZXJmaWxlLnYwIn1dLCJvcyI6ImxpbnV4Iiwicm9vdGZzIjp7InR5cGUiOiJsYXllcnMiLCJkaWZmX2lkcyI6WyJzaGEyNTY6ZmI2YmVlY2I3NWIzOWY0YmI4MTNkYmYxNzdlNTAxZWRkNWRkYjNlNjliYjQ1Y2VkZWI3OGM2NzZlZTFiN2E1OSIsInNoYTI1NjozMTliNTg4Y2U2NDI1M2E4N2I1MzNjOGVkMDFjZjAwMjVlMGVhYzk4ZTdiNTE2ZTEyNTMyOTU3ZTEyNDRmZGVjIl19fQ==",
|
"config": "eyJhcmNoaXRlY3R1cmUiOiJhbWQ2NCIsImNvbmZpZyI6eyJFbnYiOlsiUEFUSD0vdXNyL2xvY2FsL3NiaW46L3Vzci9sb2NhbC9iaW46L3Vzci9zYmluOi91c3IvYmluOi9zYmluOi9iaW4iXSwiV29ya2luZ0RpciI6Ii8iLCJPbkJ1aWxkIjpudWxsfSwiY3JlYXRlZCI6IjIwMjEtMTAtMDRUMTE6NDA6MDAuNjM4Mzk0NVoiLCJoaXN0b3J5IjpbeyJjcmVhdGVkIjoiMjAyMS0xMC0wNFQxMTo0MDowMC41OTA3MzE2WiIsImNyZWF0ZWRfYnkiOiJBREQgZmlsZS0xLnR4dCAvc29tZWZpbGUtMS50eHQgIyBidWlsZGtpdCIsImNvbW1lbnQiOiJidWlsZGtpdC5kb2NrZXJmaWxlLnYwIn0seyJjcmVhdGVkIjoiMjAyMS0xMC0wNFQxMTo0MDowMC42MzgzOTQ1WiIsImNyZWF0ZWRfYnkiOiJBREQgZmlsZS0yLnR4dCAvc29tZWZpbGUtMi50eHQgIyBidWlsZGtpdCIsImNvbW1lbnQiOiJidWlsZGtpdC5kb2NrZXJmaWxlLnYwIn1dLCJvcyI6ImxpbnV4Iiwicm9vdGZzIjp7InR5cGUiOiJsYXllcnMiLCJkaWZmX2lkcyI6WyJzaGEyNTY6ZmI2YmVlY2I3NWIzOWY0YmI4MTNkYmYxNzdlNTAxZWRkNWRkYjNlNjliYjQ1Y2VkZWI3OGM2NzZlZTFiN2E1OSIsInNoYTI1NjozMTliNTg4Y2U2NDI1M2E4N2I1MzNjOGVkMDFjZjAwMjVlMGVhYzk4ZTdiNTE2ZTEyNTMyOTU3ZTEyNDRmZGVjIl19fQ==",
|
||||||
"repoDigests": [],
|
"repoDigests": []
|
||||||
"scope": "Squashed"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"distro": {
|
"distro": {
|
||||||
|
|||||||
@ -3,6 +3,8 @@ package syftjson
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/formats/syftjson/model"
|
"github.com/anchore/syft/internal/formats/syftjson/model"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
@ -13,17 +15,17 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// TODO: this is export4ed for the use of the power-user command (temp)
|
// TODO: this is export4ed for the use of the power-user command (temp)
|
||||||
func ToFormatModel(catalog *pkg.Catalog, srcMetadata *source.Metadata, d *distro.Distro, scope source.Scope, applicationConfig interface{}) model.Document {
|
func ToFormatModel(s sbom.SBOM, applicationConfig interface{}) model.Document {
|
||||||
src, err := toSourceModel(srcMetadata, scope)
|
src, err := toSourceModel(s.Source)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warnf("unable to create syft-json source object: %+v", err)
|
log.Warnf("unable to create syft-json source object: %+v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return model.Document{
|
return model.Document{
|
||||||
Artifacts: toPackageModels(catalog),
|
Artifacts: toPackageModels(s.Artifacts.PackageCatalog),
|
||||||
ArtifactRelationships: toRelationshipModel(pkg.NewRelationships(catalog)),
|
ArtifactRelationships: toRelationshipModel(pkg.NewRelationships(s.Artifacts.PackageCatalog)),
|
||||||
Source: src,
|
Source: src,
|
||||||
Distro: toDistroModel(d),
|
Distro: toDistroModel(s.Artifacts.Distro),
|
||||||
Descriptor: model.Descriptor{
|
Descriptor: model.Descriptor{
|
||||||
Name: internal.ApplicationName,
|
Name: internal.ApplicationName,
|
||||||
Version: version.FromBuild().Version,
|
Version: version.FromBuild().Version,
|
||||||
@ -99,15 +101,12 @@ func toRelationshipModel(relationships []pkg.Relationship) []model.Relationship
|
|||||||
}
|
}
|
||||||
|
|
||||||
// toSourceModel creates a new source object to be represented into JSON.
|
// toSourceModel creates a new source object to be represented into JSON.
|
||||||
func toSourceModel(src *source.Metadata, scope source.Scope) (model.Source, error) {
|
func toSourceModel(src source.Metadata) (model.Source, error) {
|
||||||
switch src.Scheme {
|
switch src.Scheme {
|
||||||
case source.ImageScheme:
|
case source.ImageScheme:
|
||||||
return model.Source{
|
return model.Source{
|
||||||
Type: "image",
|
Type: "image",
|
||||||
Target: model.ImageSource{
|
Target: src.ImageMetadata,
|
||||||
ImageMetadata: src.ImageMetadata,
|
|
||||||
Scope: scope,
|
|
||||||
},
|
|
||||||
}, nil
|
}, nil
|
||||||
case source.DirectoryScheme:
|
case source.DirectoryScheme:
|
||||||
return model.Source{
|
return model.Source{
|
||||||
|
|||||||
@ -5,35 +5,39 @@ import (
|
|||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/distro"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
)
|
)
|
||||||
|
|
||||||
func toSyftModel(doc model.Document) (*pkg.Catalog, *source.Metadata, *distro.Distro, source.Scope, error) {
|
func toSyftModel(doc model.Document) (*sbom.SBOM, error) {
|
||||||
dist, err := distro.NewDistro(distro.Type(doc.Distro.Name), doc.Distro.Version, doc.Distro.IDLike)
|
dist, err := distro.NewDistro(distro.Type(doc.Distro.Name), doc.Distro.Version, doc.Distro.IDLike)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, source.UnknownScope, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
srcMetadata, scope := toSyftSourceData(doc.Source)
|
return &sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
return toSyftCatalog(doc.Artifacts), srcMetadata, &dist, scope, nil
|
PackageCatalog: toSyftCatalog(doc.Artifacts),
|
||||||
|
Distro: &dist,
|
||||||
|
},
|
||||||
|
Source: *toSyftSourceData(doc.Source),
|
||||||
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func toSyftSourceData(s model.Source) (*source.Metadata, source.Scope) {
|
func toSyftSourceData(s model.Source) *source.Metadata {
|
||||||
switch s.Type {
|
switch s.Type {
|
||||||
case "directory":
|
case "directory":
|
||||||
return &source.Metadata{
|
return &source.Metadata{
|
||||||
Scheme: source.DirectoryScheme,
|
Scheme: source.DirectoryScheme,
|
||||||
Path: s.Target.(string),
|
Path: s.Target.(string),
|
||||||
}, source.UnknownScope
|
}
|
||||||
case "image":
|
case "image":
|
||||||
parsedSource := s.Target.(model.ImageSource)
|
|
||||||
return &source.Metadata{
|
return &source.Metadata{
|
||||||
Scheme: source.ImageScheme,
|
Scheme: source.ImageScheme,
|
||||||
ImageMetadata: parsedSource.ImageMetadata,
|
ImageMetadata: s.Target.(source.ImageMetadata),
|
||||||
}, parsedSource.Scope
|
|
||||||
}
|
}
|
||||||
return nil, source.UnknownScope
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func toSyftCatalog(pkgs []model.Package) *pkg.Catalog {
|
func toSyftCatalog(pkgs []model.Package) *pkg.Catalog {
|
||||||
|
|||||||
@ -6,18 +6,16 @@ import (
|
|||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/olekukonko/tablewriter"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/olekukonko/tablewriter"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func encoder(output io.Writer, catalog *pkg.Catalog, _ *source.Metadata, _ *distro.Distro, _ source.Scope) error {
|
func encoder(output io.Writer, s sbom.SBOM) error {
|
||||||
var rows [][]string
|
var rows [][]string
|
||||||
|
|
||||||
columns := []string{"Name", "Version", "Type"}
|
columns := []string{"Name", "Version", "Type"}
|
||||||
for _, p := range catalog.Sorted() {
|
for _, p := range s.Artifacts.PackageCatalog.Sorted() {
|
||||||
row := []string{
|
row := []string{
|
||||||
p.Name,
|
p.Name,
|
||||||
p.Version,
|
p.Version,
|
||||||
|
|||||||
@ -5,17 +5,14 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/common/testutils"
|
"github.com/anchore/syft/internal/formats/common/testutils"
|
||||||
"github.com/anchore/syft/syft/format"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
"github.com/go-test/deep"
|
"github.com/go-test/deep"
|
||||||
)
|
)
|
||||||
|
|
||||||
var updateTableGoldenFiles = flag.Bool("update-table", false, "update the *.golden files for table format")
|
var updateTableGoldenFiles = flag.Bool("update-table", false, "update the *.golden files for table format")
|
||||||
|
|
||||||
func TestTablePresenter(t *testing.T) {
|
func TestTablePresenter(t *testing.T) {
|
||||||
catalog, metadata, distro := testutils.DirectoryInput(t)
|
|
||||||
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
||||||
format.NewPresenter(encoder, catalog, &metadata, distro, source.SquashedScope),
|
Format().Presenter(testutils.DirectoryInput(t)),
|
||||||
*updateTableGoldenFiles,
|
*updateTableGoldenFiles,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,23 +5,23 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"text/tabwriter"
|
"text/tabwriter"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
)
|
)
|
||||||
|
|
||||||
func encoder(output io.Writer, catalog *pkg.Catalog, srcMetadata *source.Metadata, _ *distro.Distro, _ source.Scope) error {
|
func encoder(output io.Writer, s sbom.SBOM) error {
|
||||||
// init the tabular writer
|
// init the tabular writer
|
||||||
w := new(tabwriter.Writer)
|
w := new(tabwriter.Writer)
|
||||||
w.Init(output, 0, 8, 0, '\t', tabwriter.AlignRight)
|
w.Init(output, 0, 8, 0, '\t', tabwriter.AlignRight)
|
||||||
|
|
||||||
switch srcMetadata.Scheme {
|
switch s.Source.Scheme {
|
||||||
case source.DirectoryScheme:
|
case source.DirectoryScheme:
|
||||||
fmt.Fprintf(w, "[Path: %s]\n", srcMetadata.Path)
|
fmt.Fprintf(w, "[Path: %s]\n", s.Source.Path)
|
||||||
case source.ImageScheme:
|
case source.ImageScheme:
|
||||||
fmt.Fprintln(w, "[Image]")
|
fmt.Fprintln(w, "[Image]")
|
||||||
|
|
||||||
for idx, l := range srcMetadata.ImageMetadata.Layers {
|
for idx, l := range s.Source.ImageMetadata.Layers {
|
||||||
fmt.Fprintln(w, " Layer:\t", idx)
|
fmt.Fprintln(w, " Layer:\t", idx)
|
||||||
fmt.Fprintln(w, " Digest:\t", l.Digest)
|
fmt.Fprintln(w, " Digest:\t", l.Digest)
|
||||||
fmt.Fprintln(w, " Size:\t", l.Size)
|
fmt.Fprintln(w, " Size:\t", l.Size)
|
||||||
@ -30,12 +30,12 @@ func encoder(output io.Writer, catalog *pkg.Catalog, srcMetadata *source.Metadat
|
|||||||
w.Flush()
|
w.Flush()
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("unsupported source: %T", srcMetadata.Scheme)
|
return fmt.Errorf("unsupported source: %T", s.Source.Scheme)
|
||||||
}
|
}
|
||||||
|
|
||||||
// populate artifacts...
|
// populate artifacts...
|
||||||
rows := 0
|
rows := 0
|
||||||
for _, p := range catalog.Sorted() {
|
for _, p := range s.Artifacts.PackageCatalog.Sorted() {
|
||||||
fmt.Fprintf(w, "[%s]\n", p.Name)
|
fmt.Fprintf(w, "[%s]\n", p.Name)
|
||||||
fmt.Fprintln(w, " Version:\t", p.Version)
|
fmt.Fprintln(w, " Version:\t", p.Version)
|
||||||
fmt.Fprintln(w, " Type:\t", string(p.Type))
|
fmt.Fprintln(w, " Type:\t", string(p.Type))
|
||||||
|
|||||||
@ -4,27 +4,22 @@ import (
|
|||||||
"flag"
|
"flag"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/common/testutils"
|
"github.com/anchore/syft/internal/formats/common/testutils"
|
||||||
"github.com/anchore/syft/syft/format"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var updateTextPresenterGoldenFiles = flag.Bool("update-text", false, "update the *.golden files for text presenters")
|
var updateTextPresenterGoldenFiles = flag.Bool("update-text", false, "update the *.golden files for text presenters")
|
||||||
|
|
||||||
func TestTextDirectoryPresenter(t *testing.T) {
|
func TestTextDirectoryPresenter(t *testing.T) {
|
||||||
catalog, metadata, d := testutils.DirectoryInput(t)
|
|
||||||
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
||||||
format.NewPresenter(encoder, catalog, &metadata, d, source.UnknownScope),
|
Format().Presenter(testutils.DirectoryInput(t)),
|
||||||
*updateTextPresenterGoldenFiles,
|
*updateTextPresenterGoldenFiles,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTextImagePresenter(t *testing.T) {
|
func TestTextImagePresenter(t *testing.T) {
|
||||||
testImage := "image-simple"
|
testImage := "image-simple"
|
||||||
catalog, metadata, d := testutils.ImageInput(t, testImage, testutils.FromSnapshot())
|
|
||||||
testutils.AssertPresenterAgainstGoldenImageSnapshot(t,
|
testutils.AssertPresenterAgainstGoldenImageSnapshot(t,
|
||||||
format.NewPresenter(encoder, catalog, &metadata, d, source.SquashedScope),
|
Format().Presenter(testutils.ImageInput(t, testImage, testutils.FromSnapshot())),
|
||||||
testImage,
|
testImage,
|
||||||
*updateTextPresenterGoldenFiles,
|
*updateTextPresenterGoldenFiles,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package poweruser
|
|||||||
import (
|
import (
|
||||||
"github.com/anchore/syft/internal/formats/syftjson"
|
"github.com/anchore/syft/internal/formats/syftjson"
|
||||||
"github.com/anchore/syft/internal/formats/syftjson/model"
|
"github.com/anchore/syft/internal/formats/syftjson/model"
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
)
|
)
|
||||||
|
|
||||||
type JSONDocument struct {
|
type JSONDocument struct {
|
||||||
@ -18,17 +19,17 @@ type JSONDocument struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewJSONDocument creates and populates a new JSON document struct from the given cataloging results.
|
// NewJSONDocument creates and populates a new JSON document struct from the given cataloging results.
|
||||||
func NewJSONDocument(config JSONDocumentConfig) (JSONDocument, error) {
|
func NewJSONDocument(s sbom.SBOM, appConfig interface{}) (JSONDocument, error) {
|
||||||
fileMetadata, err := NewJSONFileMetadata(config.FileMetadata, config.FileDigests)
|
fileMetadata, err := NewJSONFileMetadata(s.Artifacts.FileMetadata, s.Artifacts.FileDigests)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return JSONDocument{}, err
|
return JSONDocument{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return JSONDocument{
|
return JSONDocument{
|
||||||
FileClassifications: NewJSONFileClassifications(config.FileClassifications),
|
FileClassifications: NewJSONFileClassifications(s.Artifacts.FileClassifications),
|
||||||
FileContents: NewJSONFileContents(config.FileContents),
|
FileContents: NewJSONFileContents(s.Artifacts.FileContents),
|
||||||
FileMetadata: fileMetadata,
|
FileMetadata: fileMetadata,
|
||||||
Secrets: NewJSONSecrets(config.Secrets),
|
Secrets: NewJSONSecrets(s.Artifacts.Secrets),
|
||||||
Document: syftjson.ToFormatModel(config.PackageCatalog, &config.SourceMetadata, config.Distro, config.ApplicationConfig.Package.Cataloger.ScopeOpt, config.ApplicationConfig),
|
Document: syftjson.ToFormatModel(s, appConfig),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,23 +3,27 @@ package poweruser
|
|||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
)
|
)
|
||||||
|
|
||||||
// JSONPresenter is a JSON presentation object for the syft results
|
// JSONPresenter is a JSON presentation object for the syft results
|
||||||
type JSONPresenter struct {
|
type JSONPresenter struct {
|
||||||
config JSONDocumentConfig
|
sbom sbom.SBOM
|
||||||
|
config interface{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewJSONPresenter creates a new JSON presenter object for the given cataloging results.
|
// NewJSONPresenter creates a new JSON presenter object for the given cataloging results.
|
||||||
func NewJSONPresenter(config JSONDocumentConfig) *JSONPresenter {
|
func NewJSONPresenter(s sbom.SBOM, appConfig interface{}) *JSONPresenter {
|
||||||
return &JSONPresenter{
|
return &JSONPresenter{
|
||||||
config: config,
|
sbom: s,
|
||||||
|
config: appConfig,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Present the PackageCatalog results to the given writer.
|
// Present the PackageCatalog results to the given writer.
|
||||||
func (p *JSONPresenter) Present(output io.Writer) error {
|
func (p *JSONPresenter) Present(output io.Writer) error {
|
||||||
doc, err := NewJSONDocument(p.config)
|
doc, err := NewJSONDocument(p.sbom, p.config)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,6 +5,8 @@ import (
|
|||||||
"flag"
|
"flag"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/sergi/go-diff/diffmatchpatch"
|
"github.com/sergi/go-diff/diffmatchpatch"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
@ -77,12 +79,14 @@ func TestJSONPresenter(t *testing.T) {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
cfg := JSONDocumentConfig{
|
appConfig := config.Application{
|
||||||
ApplicationConfig: config.Application{
|
|
||||||
FileMetadata: config.FileMetadata{
|
FileMetadata: config.FileMetadata{
|
||||||
Digests: []string{"sha256"},
|
Digests: []string{"sha256"},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
|
|
||||||
|
cfg := sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
PackageCatalog: catalog,
|
PackageCatalog: catalog,
|
||||||
FileMetadata: map[source.Location]source.FileMetadata{
|
FileMetadata: map[source.Location]source.FileMetadata{
|
||||||
source.NewLocation("/a/place"): {
|
source.NewLocation("/a/place"): {
|
||||||
@ -133,7 +137,8 @@ func TestJSONPresenter(t *testing.T) {
|
|||||||
RawVersion: "7",
|
RawVersion: "7",
|
||||||
IDLike: "rhel",
|
IDLike: "rhel",
|
||||||
},
|
},
|
||||||
SourceMetadata: source.Metadata{
|
},
|
||||||
|
Source: source.Metadata{
|
||||||
Scheme: source.ImageScheme,
|
Scheme: source.ImageScheme,
|
||||||
ImageMetadata: source.ImageMetadata{
|
ImageMetadata: source.ImageMetadata{
|
||||||
UserInput: "user-image-input",
|
UserInput: "user-image-input",
|
||||||
@ -163,7 +168,7 @@ func TestJSONPresenter(t *testing.T) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := NewJSONPresenter(cfg).Present(&buffer); err != nil {
|
if err := NewJSONPresenter(cfg, appConfig).Present(&buffer); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
actual := buffer.Bytes()
|
actual := buffer.Bytes()
|
||||||
|
|||||||
@ -157,8 +157,7 @@
|
|||||||
],
|
],
|
||||||
"manifest": "ZXlKelkyaGxiV0ZXWlhKemFXOXVJam95TENKdFpXUnBZVlI1Y0dVaU9pSmguLi4=",
|
"manifest": "ZXlKelkyaGxiV0ZXWlhKemFXOXVJam95TENKdFpXUnBZVlI1Y0dVaU9pSmguLi4=",
|
||||||
"config": "ZXlKaGNtTm9hWFJsWTNSMWNtVWlPaUpoYldRMk5DSXNJbU52Ym1acC4uLg==",
|
"config": "ZXlKaGNtTm9hWFJsWTNSMWNtVWlPaUpoYldRMk5DSXNJbU52Ym1acC4uLg==",
|
||||||
"repoDigests": [],
|
"repoDigests": []
|
||||||
"scope": ""
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"distro": {
|
"distro": {
|
||||||
|
|||||||
@ -5,23 +5,21 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats"
|
"github.com/anchore/syft/internal/formats"
|
||||||
"github.com/anchore/syft/syft/distro"
|
|
||||||
"github.com/anchore/syft/syft/format"
|
"github.com/anchore/syft/syft/format"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Encode takes all SBOM elements and a format option and encodes an SBOM document.
|
// Encode takes all SBOM elements and a format option and encodes an SBOM document.
|
||||||
// TODO: encapsulate input data into common sbom document object
|
func Encode(s sbom.SBOM, option format.Option) ([]byte, error) {
|
||||||
func Encode(catalog *pkg.Catalog, metadata *source.Metadata, dist *distro.Distro, scope source.Scope, option format.Option) ([]byte, error) {
|
|
||||||
f := formats.ByOption(option)
|
f := formats.ByOption(option)
|
||||||
if f == nil {
|
if f == nil {
|
||||||
return nil, fmt.Errorf("unsupported format: %+v", option)
|
return nil, fmt.Errorf("unsupported format: %+v", option)
|
||||||
}
|
}
|
||||||
buff := bytes.Buffer{}
|
buff := bytes.Buffer{}
|
||||||
|
|
||||||
if err := f.Encode(&buff, catalog, dist, metadata, scope); err != nil {
|
if err := f.Encode(&buff, s); err != nil {
|
||||||
return nil, fmt.Errorf("unable to encode sbom: %w", err)
|
return nil, fmt.Errorf("unable to encode sbom: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -29,20 +27,19 @@ func Encode(catalog *pkg.Catalog, metadata *source.Metadata, dist *distro.Distro
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Decode takes a reader for an SBOM and generates all internal SBOM elements.
|
// Decode takes a reader for an SBOM and generates all internal SBOM elements.
|
||||||
// TODO: encapsulate return data into common sbom document object
|
func Decode(reader io.Reader) (*sbom.SBOM, format.Option, error) {
|
||||||
func Decode(reader io.Reader) (*pkg.Catalog, *source.Metadata, *distro.Distro, source.Scope, format.Option, error) {
|
|
||||||
by, err := io.ReadAll(reader)
|
by, err := io.ReadAll(reader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, source.UnknownScope, format.UnknownFormatOption, fmt.Errorf("unable to read sbom: %w", err)
|
return nil, format.UnknownFormatOption, fmt.Errorf("unable to read sbom: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
f, err := formats.Identify(by)
|
f, err := formats.Identify(by)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, source.UnknownScope, format.UnknownFormatOption, fmt.Errorf("unable to detect format: %w", err)
|
return nil, format.UnknownFormatOption, fmt.Errorf("unable to detect format: %w", err)
|
||||||
}
|
}
|
||||||
if f == nil {
|
if f == nil {
|
||||||
return nil, nil, nil, source.UnknownScope, format.UnknownFormatOption, fmt.Errorf("unable to identify format")
|
return nil, format.UnknownFormatOption, fmt.Errorf("unable to identify format")
|
||||||
}
|
}
|
||||||
c, m, d, s, err := f.Decode(bytes.NewReader(by))
|
s, err := f.Decode(bytes.NewReader(by))
|
||||||
return c, m, d, s, f.Option, err
|
return s, f.Option, err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,6 +4,8 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/go-test/deep"
|
"github.com/go-test/deep"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/format"
|
"github.com/anchore/syft/syft/format"
|
||||||
@ -35,14 +37,22 @@ func TestEncodeDecodeEncodeCycleComparison(t *testing.T) {
|
|||||||
}
|
}
|
||||||
originalCatalog, d, err := CatalogPackages(&src, source.SquashedScope)
|
originalCatalog, d, err := CatalogPackages(&src, source.SquashedScope)
|
||||||
|
|
||||||
by1, err := Encode(originalCatalog, &src.Metadata, d, source.SquashedScope, test.format)
|
originalSBOM := sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
PackageCatalog: originalCatalog,
|
||||||
|
Distro: d,
|
||||||
|
},
|
||||||
|
Source: src.Metadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
by1, err := Encode(originalSBOM, test.format)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
newCatalog, newMetadata, newDistro, newScope, newFormat, err := Decode(bytes.NewReader(by1))
|
newSBOM, newFormat, err := Decode(bytes.NewReader(by1))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, test.format, newFormat)
|
assert.Equal(t, test.format, newFormat)
|
||||||
|
|
||||||
by2, err := Encode(newCatalog, newMetadata, newDistro, newScope, test.format)
|
by2, err := Encode(*newSBOM, test.format)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
for _, diff := range deep.Equal(by1, by2) {
|
for _, diff := range deep.Equal(by1, by2) {
|
||||||
t.Errorf(diff)
|
t.Errorf(diff)
|
||||||
|
|||||||
@ -3,11 +3,8 @@ package format
|
|||||||
import (
|
import (
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Decoder is a function that can convert an SBOM document of a specific format from a reader into Syft native objects.
|
// Decoder is a function that can convert an SBOM document of a specific format from a reader into Syft native objects.
|
||||||
type Decoder func(reader io.Reader) (*pkg.Catalog, *source.Metadata, *distro.Distro, source.Scope, error)
|
type Decoder func(reader io.Reader) (*sbom.SBOM, error)
|
||||||
|
|||||||
@ -3,10 +3,8 @@ package format
|
|||||||
import (
|
import (
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Encoder is a function that can transform Syft native objects into an SBOM document of a specific format written to the given writer.
|
// Encoder is a function that can transform Syft native objects into an SBOM document of a specific format written to the given writer.
|
||||||
type Encoder func(io.Writer, *pkg.Catalog, *source.Metadata, *distro.Distro, source.Scope) error
|
type Encoder func(io.Writer, sbom.SBOM) error
|
||||||
|
|||||||
@ -4,10 +4,7 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -32,16 +29,16 @@ func NewFormat(option Option, encoder Encoder, decoder Decoder, validator Valida
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f Format) Encode(output io.Writer, catalog *pkg.Catalog, d *distro.Distro, metadata *source.Metadata, scope source.Scope) error {
|
func (f Format) Encode(output io.Writer, s sbom.SBOM) error {
|
||||||
if f.encoder == nil {
|
if f.encoder == nil {
|
||||||
return ErrEncodingNotSupported
|
return ErrEncodingNotSupported
|
||||||
}
|
}
|
||||||
return f.encoder(output, catalog, metadata, d, scope)
|
return f.encoder(output, s)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f Format) Decode(reader io.Reader) (*pkg.Catalog, *source.Metadata, *distro.Distro, source.Scope, error) {
|
func (f Format) Decode(reader io.Reader) (*sbom.SBOM, error) {
|
||||||
if f.decoder == nil {
|
if f.decoder == nil {
|
||||||
return nil, nil, nil, source.UnknownScope, ErrDecodingNotSupported
|
return nil, ErrDecodingNotSupported
|
||||||
}
|
}
|
||||||
return f.decoder(reader)
|
return f.decoder(reader)
|
||||||
}
|
}
|
||||||
@ -54,9 +51,9 @@ func (f Format) Validate(reader io.Reader) error {
|
|||||||
return f.validator(reader)
|
return f.validator(reader)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f Format) Presenter(catalog *pkg.Catalog, metadata *source.Metadata, d *distro.Distro, scope source.Scope) *Presenter {
|
func (f Format) Presenter(s sbom.SBOM) *Presenter {
|
||||||
if f.encoder == nil {
|
if f.encoder == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return NewPresenter(f.encoder, catalog, metadata, d, scope)
|
return NewPresenter(f.encoder, s)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,30 +3,21 @@ package format
|
|||||||
import (
|
import (
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Presenter struct {
|
type Presenter struct {
|
||||||
catalog *pkg.Catalog
|
sbom sbom.SBOM
|
||||||
srcMetadata *source.Metadata
|
|
||||||
distro *distro.Distro
|
|
||||||
scope source.Scope
|
|
||||||
encoder Encoder
|
encoder Encoder
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewPresenter(encoder Encoder, catalog *pkg.Catalog, srcMetadata *source.Metadata, d *distro.Distro, scope source.Scope) *Presenter {
|
func NewPresenter(encoder Encoder, s sbom.SBOM) *Presenter {
|
||||||
return &Presenter{
|
return &Presenter{
|
||||||
catalog: catalog,
|
sbom: s,
|
||||||
srcMetadata: srcMetadata,
|
|
||||||
distro: d,
|
|
||||||
encoder: encoder,
|
encoder: encoder,
|
||||||
scope: scope,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pres *Presenter) Present(output io.Writer) error {
|
func (pres *Presenter) Present(output io.Writer) error {
|
||||||
return pres.encoder(output, pres.catalog, pres.srcMetadata, pres.distro, pres.scope)
|
return pres.encoder(output, pres.sbom)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,15 +1,18 @@
|
|||||||
package poweruser
|
package sbom
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/anchore/syft/internal/config"
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/distro"
|
||||||
"github.com/anchore/syft/syft/file"
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
)
|
)
|
||||||
|
|
||||||
type JSONDocumentConfig struct {
|
type SBOM struct {
|
||||||
ApplicationConfig config.Application
|
Artifacts Artifacts
|
||||||
|
Source source.Metadata
|
||||||
|
}
|
||||||
|
|
||||||
|
type Artifacts struct {
|
||||||
PackageCatalog *pkg.Catalog
|
PackageCatalog *pkg.Catalog
|
||||||
FileMetadata map[source.Location]source.FileMetadata
|
FileMetadata map[source.Location]source.FileMetadata
|
||||||
FileDigests map[source.Location][]file.Digest
|
FileDigests map[source.Location][]file.Digest
|
||||||
@ -17,5 +20,4 @@ type JSONDocumentConfig struct {
|
|||||||
FileContents map[source.Location]string
|
FileContents map[source.Location]string
|
||||||
Secrets map[source.Location][]file.SearchResult
|
Secrets map[source.Location][]file.SearchResult
|
||||||
Distro *distro.Distro
|
Distro *distro.Distro
|
||||||
SourceMetadata source.Metadata
|
|
||||||
}
|
}
|
||||||
@ -3,8 +3,6 @@ package cli
|
|||||||
import (
|
import (
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestPackagesCmdFlags(t *testing.T) {
|
func TestPackagesCmdFlags(t *testing.T) {
|
||||||
@ -30,7 +28,6 @@ func TestPackagesCmdFlags(t *testing.T) {
|
|||||||
args: []string{"packages", "-o", "json", request},
|
args: []string{"packages", "-o", "json", request},
|
||||||
assertions: []traitAssertion{
|
assertions: []traitAssertion{
|
||||||
assertJsonReport,
|
assertJsonReport,
|
||||||
assertScope(source.SquashedScope),
|
|
||||||
assertSuccessfulReturnCode,
|
assertSuccessfulReturnCode,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -65,7 +62,7 @@ func TestPackagesCmdFlags(t *testing.T) {
|
|||||||
name: "squashed-scope-flag",
|
name: "squashed-scope-flag",
|
||||||
args: []string{"packages", "-o", "json", "-s", "squashed", request},
|
args: []string{"packages", "-o", "json", "-s", "squashed", request},
|
||||||
assertions: []traitAssertion{
|
assertions: []traitAssertion{
|
||||||
assertScope(source.SquashedScope),
|
assertPackageCount(17),
|
||||||
assertSuccessfulReturnCode,
|
assertSuccessfulReturnCode,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -73,18 +70,18 @@ func TestPackagesCmdFlags(t *testing.T) {
|
|||||||
name: "all-layers-scope-flag",
|
name: "all-layers-scope-flag",
|
||||||
args: []string{"packages", "-o", "json", "-s", "all-layers", request},
|
args: []string{"packages", "-o", "json", "-s", "all-layers", request},
|
||||||
assertions: []traitAssertion{
|
assertions: []traitAssertion{
|
||||||
assertScope(source.AllLayersScope),
|
assertPackageCount(19),
|
||||||
assertSuccessfulReturnCode,
|
assertSuccessfulReturnCode,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "packages-scope-env-binding",
|
name: "all-layers-scope-flag-by-env",
|
||||||
|
args: []string{"packages", "-o", "json", request},
|
||||||
env: map[string]string{
|
env: map[string]string{
|
||||||
"SYFT_PACKAGE_CATALOGER_SCOPE": "all-layers",
|
"SYFT_PACKAGE_CATALOGER_SCOPE": "all-layers",
|
||||||
},
|
},
|
||||||
args: []string{"packages", "-o", "json", request},
|
|
||||||
assertions: []traitAssertion{
|
assertions: []traitAssertion{
|
||||||
assertScope(source.AllLayersScope),
|
assertPackageCount(19),
|
||||||
assertSuccessfulReturnCode,
|
assertSuccessfulReturnCode,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
@ -80,6 +80,24 @@ func assertStdoutLengthGreaterThan(length uint) traitAssertion {
|
|||||||
tb.Helper()
|
tb.Helper()
|
||||||
if uint(len(stdout)) < length {
|
if uint(len(stdout)) < length {
|
||||||
tb.Errorf("not enough output (expected at least %d, got %d)", length, len(stdout))
|
tb.Errorf("not enough output (expected at least %d, got %d)", length, len(stdout))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func assertPackageCount(length uint) traitAssertion {
|
||||||
|
return func(tb testing.TB, stdout, _ string, _ int) {
|
||||||
|
tb.Helper()
|
||||||
|
type partial struct {
|
||||||
|
Artifacts []interface{} `json:"artifacts"`
|
||||||
|
}
|
||||||
|
var data partial
|
||||||
|
|
||||||
|
if err := json.Unmarshal([]byte(stdout), &data); err != nil {
|
||||||
|
tb.Errorf("expected to find a JSON report, but was unmarshalable: %+v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if uint(len(data.Artifacts)) != length {
|
||||||
|
tb.Errorf("expected package count of %d, but found %d", length, len(data.Artifacts))
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -41,6 +41,15 @@ var imageOnlyTestCases = []testCase{
|
|||||||
"someotherpkg": "3.19.0",
|
"someotherpkg": "3.19.0",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
// When the image is build lib overwrites pkgs/lib causing there to only be two packages
|
||||||
|
name: "find apkdb packages",
|
||||||
|
pkgType: pkg.ApkPkg,
|
||||||
|
pkgInfo: map[string]string{
|
||||||
|
"musl-utils": "1.1.24-r2",
|
||||||
|
"libc-utils": "0.7.2-r0",
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
var dirOnlyTestCases = []testCase{
|
var dirOnlyTestCases = []testCase{
|
||||||
@ -149,6 +158,15 @@ var dirOnlyTestCases = []testCase{
|
|||||||
"version_check": "0.1.5",
|
"version_check": "0.1.5",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "find apkdb packages",
|
||||||
|
pkgType: pkg.ApkPkg,
|
||||||
|
duplicates: 2, // when the directory is cataloged we have duplicates between lib/ and pkgs/lib
|
||||||
|
pkgInfo: map[string]string{
|
||||||
|
"musl-utils": "1.1.24-r2",
|
||||||
|
"libc-utils": "0.7.2-r0",
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
var commonTestCases = []testCase{
|
var commonTestCases = []testCase{
|
||||||
@ -186,13 +204,4 @@ var commonTestCases = []testCase{
|
|||||||
"example-jenkins-plugin": "1.0-SNAPSHOT",
|
"example-jenkins-plugin": "1.0-SNAPSHOT",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
|
|
||||||
name: "find apkdb packages",
|
|
||||||
pkgType: pkg.ApkPkg,
|
|
||||||
pkgInfo: map[string]string{
|
|
||||||
"musl-utils": "1.1.24-r2",
|
|
||||||
"libc-utils": "0.7.2-r0",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -7,7 +7,7 @@ import (
|
|||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/syftjson"
|
"github.com/anchore/syft/internal/formats/syftjson"
|
||||||
syftjsonModel "github.com/anchore/syft/internal/formats/syftjson/model"
|
syftjsonModel "github.com/anchore/syft/internal/formats/syftjson/model"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/sbom"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestPackageOwnershipRelationships(t *testing.T) {
|
func TestPackageOwnershipRelationships(t *testing.T) {
|
||||||
@ -25,7 +25,13 @@ func TestPackageOwnershipRelationships(t *testing.T) {
|
|||||||
t.Run(test.fixture, func(t *testing.T) {
|
t.Run(test.fixture, func(t *testing.T) {
|
||||||
catalog, d, src := catalogFixtureImage(t, test.fixture)
|
catalog, d, src := catalogFixtureImage(t, test.fixture)
|
||||||
|
|
||||||
p := syftjson.Format().Presenter(catalog, &src.Metadata, d, source.SquashedScope)
|
p := syftjson.Format().Presenter(sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
PackageCatalog: catalog,
|
||||||
|
Distro: d,
|
||||||
|
},
|
||||||
|
Source: src.Metadata,
|
||||||
|
})
|
||||||
if p == nil {
|
if p == nil {
|
||||||
t.Fatal("unable to get presenter")
|
t.Fatal("unable to get presenter")
|
||||||
}
|
}
|
||||||
|
|||||||
4
test/integration/test-fixtures/.gitignore
vendored
4
test/integration/test-fixtures/.gitignore
vendored
@ -3,5 +3,5 @@
|
|||||||
# twice in the repo seems redundant (even via symlink). Given that the fixture is a few kilobytes in size, the build process is already
|
# twice in the repo seems redundant (even via symlink). Given that the fixture is a few kilobytes in size, the build process is already
|
||||||
# captured, and integration tests should only be testing if jars can be discovered (not necessarily depth in java detection
|
# captured, and integration tests should only be testing if jars can be discovered (not necessarily depth in java detection
|
||||||
# functionality), committing it seems like an acceptable exception.
|
# functionality), committing it seems like an acceptable exception.
|
||||||
!image-pkg-coverage/java/*.jar
|
!image-pkg-coverage/pkgs/java/*.jar
|
||||||
!image-pkg-coverage/java/*.hpi
|
!image-pkg-coverage/pkgs/java/*.hpi
|
||||||
|
|||||||
@ -1,2 +1,4 @@
|
|||||||
FROM scratch
|
FROM scratch
|
||||||
COPY . .
|
COPY pkgs/ .
|
||||||
|
# we duplicate to show a package count difference between all-layers and squashed scopes
|
||||||
|
COPY lib lib
|
||||||
|
|||||||
@ -0,0 +1,49 @@
|
|||||||
|
C:Q1p78yvTLG094tHE1+dToJGbmYzQE=
|
||||||
|
P:libc-utils
|
||||||
|
V:0.7.2-r0
|
||||||
|
A:x86_64
|
||||||
|
S:1175
|
||||||
|
I:4096
|
||||||
|
T:Meta package to pull in correct libc
|
||||||
|
U:http://alpinelinux.org
|
||||||
|
L:BSD
|
||||||
|
o:libc-dev
|
||||||
|
m:Natanael Copa <ncopa@alpinelinux.org>
|
||||||
|
t:1575749004
|
||||||
|
c:97b1c2842faa3bfa30f5811ffbf16d5ff9f1a479
|
||||||
|
D:musl-utils
|
||||||
|
|
||||||
|
C:Q1bTtF5526tETKfL+lnigzIDvm+2o=
|
||||||
|
P:musl-utils
|
||||||
|
V:1.1.24-r2
|
||||||
|
A:x86_64
|
||||||
|
S:37944
|
||||||
|
I:151552
|
||||||
|
T:the musl c library (libc) implementation
|
||||||
|
U:https://musl.libc.org/
|
||||||
|
L:MIT BSD GPL2+
|
||||||
|
o:musl
|
||||||
|
m:Timo Teräs <timo.teras@iki.fi>
|
||||||
|
t:1584790550
|
||||||
|
c:4024cc3b29ad4c65544ad068b8f59172b5494306
|
||||||
|
D:scanelf so:libc.musl-x86_64.so.1
|
||||||
|
p:cmd:getconf cmd:getent cmd:iconv cmd:ldconfig cmd:ldd
|
||||||
|
r:libiconv
|
||||||
|
F:sbin
|
||||||
|
R:ldconfig
|
||||||
|
a:0:0:755
|
||||||
|
Z:Q1Kja2+POZKxEkUOZqwSjC6kmaED4=
|
||||||
|
F:usr
|
||||||
|
F:usr/bin
|
||||||
|
R:iconv
|
||||||
|
a:0:0:755
|
||||||
|
Z:Q1CVmFbdY+Hv6/jAHl1gec2Kbx1EY=
|
||||||
|
R:ldd
|
||||||
|
a:0:0:755
|
||||||
|
Z:Q1yFAhGggmL7ERgbIA7KQxyTzf3ks=
|
||||||
|
R:getconf
|
||||||
|
a:0:0:755
|
||||||
|
Z:Q1dAdYK8M/INibRQF5B3Rw7cmNDDA=
|
||||||
|
R:getent
|
||||||
|
a:0:0:755
|
||||||
|
Z:Q1eR2Dz/WylabgbWMTkd2+hGmEya4=
|
||||||
Loading…
x
Reference in New Issue
Block a user