mirror of
https://github.com/anchore/syft.git
synced 2026-02-12 02:26:42 +01:00
Migrate SPDX-JSON relationships to SBOM model (#634)
* remove power-user document shape Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * add power-user specific fields to syft-json format Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * port remaining spdx-json relationships to sbom model Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * add coordinate set Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * add SBOM file path helper Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * use internal mimetype helper in go binary cataloger Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * add new package-of relationship Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * update json schema to v2 Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * replace power-user presenter with syft-json format Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * fix tests and linting Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * remove "package-of" relationship (in favor of "contains") Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * add tests for spdx22json format encoding enhancements Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * update TODO and log entries Signed-off-by: Alex Goodman <alex.goodman@anchore.com> * introduce sbom.Descriptor Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
parent
e3b34813d7
commit
bd9007fc0e
@ -13,6 +13,7 @@ import (
|
|||||||
"github.com/anchore/syft/internal/formats"
|
"github.com/anchore/syft/internal/formats"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/internal/ui"
|
"github.com/anchore/syft/internal/ui"
|
||||||
|
"github.com/anchore/syft/internal/version"
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/event"
|
"github.com/anchore/syft/syft/event"
|
||||||
"github.com/anchore/syft/syft/format"
|
"github.com/anchore/syft/syft/format"
|
||||||
@ -263,6 +264,11 @@ func packagesExecWorker(userInput string) <-chan error {
|
|||||||
|
|
||||||
s := sbom.SBOM{
|
s := sbom.SBOM{
|
||||||
Source: src.Metadata,
|
Source: src.Metadata,
|
||||||
|
Descriptor: sbom.Descriptor{
|
||||||
|
Name: internal.ApplicationName,
|
||||||
|
Version: version.FromBuild().Version,
|
||||||
|
Configuration: appConfig,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
var relationships []<-chan artifact.Relationship
|
var relationships []<-chan artifact.Relationship
|
||||||
|
|||||||
@ -4,19 +4,18 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/artifact"
|
|
||||||
"github.com/gookit/color"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/sbom"
|
|
||||||
|
|
||||||
"github.com/anchore/stereoscope"
|
"github.com/anchore/stereoscope"
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/bus"
|
"github.com/anchore/syft/internal/bus"
|
||||||
|
"github.com/anchore/syft/internal/formats/syftjson"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/internal/presenter/poweruser"
|
|
||||||
"github.com/anchore/syft/internal/ui"
|
"github.com/anchore/syft/internal/ui"
|
||||||
|
"github.com/anchore/syft/internal/version"
|
||||||
|
"github.com/anchore/syft/syft/artifact"
|
||||||
"github.com/anchore/syft/syft/event"
|
"github.com/anchore/syft/syft/event"
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
|
"github.com/gookit/color"
|
||||||
"github.com/pkg/profile"
|
"github.com/pkg/profile"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/wagoodman/go-partybus"
|
"github.com/wagoodman/go-partybus"
|
||||||
@ -125,6 +124,11 @@ func powerUserExecWorker(userInput string) <-chan error {
|
|||||||
|
|
||||||
s := sbom.SBOM{
|
s := sbom.SBOM{
|
||||||
Source: src.Metadata,
|
Source: src.Metadata,
|
||||||
|
Descriptor: sbom.Descriptor{
|
||||||
|
Name: internal.ApplicationName,
|
||||||
|
Version: version.FromBuild().Version,
|
||||||
|
Configuration: appConfig,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
var relationships []<-chan artifact.Relationship
|
var relationships []<-chan artifact.Relationship
|
||||||
@ -139,7 +143,7 @@ func powerUserExecWorker(userInput string) <-chan error {
|
|||||||
|
|
||||||
bus.Publish(partybus.Event{
|
bus.Publish(partybus.Event{
|
||||||
Type: event.PresenterReady,
|
Type: event.PresenterReady,
|
||||||
Value: poweruser.NewJSONPresenter(s, *appConfig),
|
Value: syftjson.Format().Presenter(s),
|
||||||
})
|
})
|
||||||
}()
|
}()
|
||||||
|
|
||||||
|
|||||||
@ -163,6 +163,10 @@ func (cfg *Application) parseLogLevelOption() error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if cfg.Log.Level == "" {
|
||||||
|
cfg.Log.Level = cfg.Log.LevelOpt.String()
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -6,5 +6,5 @@ const (
|
|||||||
|
|
||||||
// JSONSchemaVersion is the current schema version output by the JSON presenter
|
// JSONSchemaVersion is the current schema version output by the JSON presenter
|
||||||
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
|
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
|
||||||
JSONSchemaVersion = "1.1.0"
|
JSONSchemaVersion = "2.0.0"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -1,51 +0,0 @@
|
|||||||
package spdxhelpers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/sha256"
|
|
||||||
"fmt"
|
|
||||||
"path/filepath"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/spdx22json/model"
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
)
|
|
||||||
|
|
||||||
func Files(packageSpdxID string, p pkg.Package) (files []model.File, fileIDs []string, relationships []model.Relationship) {
|
|
||||||
files = make([]model.File, 0)
|
|
||||||
fileIDs = make([]string, 0)
|
|
||||||
relationships = make([]model.Relationship, 0)
|
|
||||||
|
|
||||||
if !hasMetadata(p) {
|
|
||||||
return files, fileIDs, relationships
|
|
||||||
}
|
|
||||||
|
|
||||||
pkgFileOwner, ok := p.Metadata.(pkg.FileOwner)
|
|
||||||
if !ok {
|
|
||||||
return files, fileIDs, relationships
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, ownedFilePath := range pkgFileOwner.OwnedFiles() {
|
|
||||||
baseFileName := filepath.Base(ownedFilePath)
|
|
||||||
pathHash := sha256.Sum256([]byte(ownedFilePath))
|
|
||||||
fileSpdxID := model.ElementID(fmt.Sprintf("File-%s-%x", p.Name, pathHash)).String()
|
|
||||||
|
|
||||||
fileIDs = append(fileIDs, fileSpdxID)
|
|
||||||
|
|
||||||
files = append(files, model.File{
|
|
||||||
FileName: ownedFilePath,
|
|
||||||
Item: model.Item{
|
|
||||||
Element: model.Element{
|
|
||||||
SPDXID: fileSpdxID,
|
|
||||||
Name: baseFileName,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
relationships = append(relationships, model.Relationship{
|
|
||||||
SpdxElementID: packageSpdxID,
|
|
||||||
RelationshipType: model.ContainsRelationship,
|
|
||||||
RelatedSpdxElement: fileSpdxID,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return files, fileIDs, relationships
|
|
||||||
}
|
|
||||||
@ -124,6 +124,15 @@ func ImageInput(t testing.TB, testImage string, options ...ImageOption) sbom.SBO
|
|||||||
Distro: &dist,
|
Distro: &dist,
|
||||||
},
|
},
|
||||||
Source: src.Metadata,
|
Source: src.Metadata,
|
||||||
|
Descriptor: sbom.Descriptor{
|
||||||
|
Name: "syft",
|
||||||
|
Version: "v0.42.0-bogus",
|
||||||
|
// the application configuration should be persisted here, however, we do not want to import
|
||||||
|
// the application configuration in this package (it's reserved only for ingestion by the cmd package)
|
||||||
|
Configuration: map[string]string{
|
||||||
|
"config-key": "config-value",
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -187,6 +196,15 @@ func DirectoryInput(t testing.TB) sbom.SBOM {
|
|||||||
Distro: &dist,
|
Distro: &dist,
|
||||||
},
|
},
|
||||||
Source: src.Metadata,
|
Source: src.Metadata,
|
||||||
|
Descriptor: sbom.Descriptor{
|
||||||
|
Name: "syft",
|
||||||
|
Version: "v0.42.0-bogus",
|
||||||
|
// the application configuration should be persisted here, however, we do not want to import
|
||||||
|
// the application configuration in this package (it's reserved only for ingestion by the cmd package)
|
||||||
|
Configuration: map[string]string{
|
||||||
|
"config-key": "config-value",
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -3,17 +3,17 @@ package model
|
|||||||
type FileType string
|
type FileType string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
DocumentationFileType FileType = "DOCUMENTATION"
|
DocumentationFileType FileType = "DOCUMENTATION" // if the file serves as documentation
|
||||||
ImageFileType FileType = "IMAGE"
|
ImageFileType FileType = "IMAGE" // if the file is associated with a picture image file (MIME type of image/*, e.g., .jpg, .gif)
|
||||||
VideoFileType FileType = "VIDEO"
|
VideoFileType FileType = "VIDEO" // if the file is associated with a video file type (MIME type of video/*)
|
||||||
ArchiveFileType FileType = "ARCHIVE"
|
ArchiveFileType FileType = "ARCHIVE" // if the file represents an archive (.tar, .jar, etc.)
|
||||||
SpdxFileType FileType = "SPDX"
|
SpdxFileType FileType = "SPDX" // if the file is an SPDX document
|
||||||
ApplicationFileType FileType = "APPLICATION"
|
ApplicationFileType FileType = "APPLICATION" // if the file is associated with a specific application type (MIME type of application/*)
|
||||||
SourceFileType FileType = "SOURCE"
|
SourceFileType FileType = "SOURCE" // if the file is human readable source code (.c, .html, etc.)
|
||||||
BinaryFileType FileType = "BINARY"
|
BinaryFileType FileType = "BINARY" // if the file is a compiled object, target image or binary executable (.o, .a, etc.)
|
||||||
TextFileType FileType = "TEXT"
|
TextFileType FileType = "TEXT" // if the file is human readable text file (MIME type of text/*)
|
||||||
AudioFileType FileType = "AUDIO"
|
AudioFileType FileType = "AUDIO" // if the file is associated with an audio file (MIME type of audio/* , e.g. .mp3)
|
||||||
OtherFileType FileType = "OTHER"
|
OtherFileType FileType = "OTHER" // if the file doesn't fit into the above categories (generated artifacts, data files, etc.)
|
||||||
)
|
)
|
||||||
|
|
||||||
type File struct {
|
type File struct {
|
||||||
@ -36,6 +36,6 @@ type File struct {
|
|||||||
// Indicates the project in which the SpdxElement originated. Tools must preserve doap:homepage and doap:name
|
// Indicates the project in which the SpdxElement originated. Tools must preserve doap:homepage and doap:name
|
||||||
// properties and the URI (if one is known) of doap:Project resources that are values of this property. All other
|
// properties and the URI (if one is known) of doap:Project resources that are values of this property. All other
|
||||||
// properties of doap:Projects are not directly supported by SPDX and may be dropped when translating to or
|
// properties of doap:Projects are not directly supported by SPDX and may be dropped when translating to or
|
||||||
// from some SPDX formats(deprecated).
|
// from some SPDX formats (deprecated).
|
||||||
ArtifactOf []string `json:"artifactOf,omitempty"`
|
ArtifactOf []string `json:"artifactOf,omitempty"`
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,18 +3,18 @@
|
|||||||
"name": "/some/path",
|
"name": "/some/path",
|
||||||
"spdxVersion": "SPDX-2.2",
|
"spdxVersion": "SPDX-2.2",
|
||||||
"creationInfo": {
|
"creationInfo": {
|
||||||
"created": "2021-10-29T16:26:08.995826Z",
|
"created": "2021-11-17T19:35:54.834877Z",
|
||||||
"creators": [
|
"creators": [
|
||||||
"Organization: Anchore, Inc",
|
"Organization: Anchore, Inc",
|
||||||
"Tool: syft-[not provided]"
|
"Tool: syft-[not provided]"
|
||||||
],
|
],
|
||||||
"licenseListVersion": "3.14"
|
"licenseListVersion": "3.15"
|
||||||
},
|
},
|
||||||
"dataLicense": "CC0-1.0",
|
"dataLicense": "CC0-1.0",
|
||||||
"documentNamespace": "https:/anchore.com/syft/dir/some/path-5362d380-914a-458f-b059-d8d27899574c",
|
"documentNamespace": "https:/anchore.com/syft/dir/some/path-65e2226e-a61e-4ed1-81bb-56022e1ff1eb",
|
||||||
"packages": [
|
"packages": [
|
||||||
{
|
{
|
||||||
"SPDXID": "SPDXRef-Package-python-package-1-1.0.1",
|
"SPDXID": "SPDXRef-2a115ac97d018a0e",
|
||||||
"name": "package-1",
|
"name": "package-1",
|
||||||
"licenseConcluded": "MIT",
|
"licenseConcluded": "MIT",
|
||||||
"downloadLocation": "NOASSERTION",
|
"downloadLocation": "NOASSERTION",
|
||||||
@ -31,15 +31,12 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"filesAnalyzed": false,
|
"filesAnalyzed": false,
|
||||||
"hasFiles": [
|
|
||||||
"SPDXRef-File-package-1-efae7fecc76ca25da40f79d7ef5b8933510434914835832c7976f3e866aa756a"
|
|
||||||
],
|
|
||||||
"licenseDeclared": "MIT",
|
"licenseDeclared": "MIT",
|
||||||
"sourceInfo": "acquired package info from installed python package manifest file: /some/path/pkg1",
|
"sourceInfo": "acquired package info from installed python package manifest file: /some/path/pkg1",
|
||||||
"versionInfo": "1.0.1"
|
"versionInfo": "1.0.1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"SPDXID": "SPDXRef-Package-deb-package-2-2.0.1",
|
"SPDXID": "SPDXRef-5e920b2bece2c3ae",
|
||||||
"name": "package-2",
|
"name": "package-2",
|
||||||
"licenseConcluded": "NONE",
|
"licenseConcluded": "NONE",
|
||||||
"downloadLocation": "NOASSERTION",
|
"downloadLocation": "NOASSERTION",
|
||||||
@ -60,20 +57,5 @@
|
|||||||
"sourceInfo": "acquired package info from DPKG DB: /some/path/pkg1",
|
"sourceInfo": "acquired package info from DPKG DB: /some/path/pkg1",
|
||||||
"versionInfo": "2.0.1"
|
"versionInfo": "2.0.1"
|
||||||
}
|
}
|
||||||
],
|
|
||||||
"files": [
|
|
||||||
{
|
|
||||||
"SPDXID": "SPDXRef-File-package-1-efae7fecc76ca25da40f79d7ef5b8933510434914835832c7976f3e866aa756a",
|
|
||||||
"name": "foo",
|
|
||||||
"licenseConcluded": "",
|
|
||||||
"fileName": "/some/path/pkg1/dependencies/foo"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"relationships": [
|
|
||||||
{
|
|
||||||
"spdxElementId": "SPDXRef-Package-python-package-1-1.0.1",
|
|
||||||
"relationshipType": "CONTAINS",
|
|
||||||
"relatedSpdxElement": "SPDXRef-File-package-1-efae7fecc76ca25da40f79d7ef5b8933510434914835832c7976f3e866aa756a"
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,18 +3,18 @@
|
|||||||
"name": "user-image-input",
|
"name": "user-image-input",
|
||||||
"spdxVersion": "SPDX-2.2",
|
"spdxVersion": "SPDX-2.2",
|
||||||
"creationInfo": {
|
"creationInfo": {
|
||||||
"created": "2021-10-29T16:26:09.001799Z",
|
"created": "2021-11-17T19:35:57.761372Z",
|
||||||
"creators": [
|
"creators": [
|
||||||
"Organization: Anchore, Inc",
|
"Organization: Anchore, Inc",
|
||||||
"Tool: syft-[not provided]"
|
"Tool: syft-[not provided]"
|
||||||
],
|
],
|
||||||
"licenseListVersion": "3.14"
|
"licenseListVersion": "3.15"
|
||||||
},
|
},
|
||||||
"dataLicense": "CC0-1.0",
|
"dataLicense": "CC0-1.0",
|
||||||
"documentNamespace": "https:/anchore.com/syft/image/user-image-input-3ad8571c-513f-4fce-944e-5125353c3186",
|
"documentNamespace": "https:/anchore.com/syft/image/user-image-input-5383918f-ec96-4aa9-b756-ad16e1ada31e",
|
||||||
"packages": [
|
"packages": [
|
||||||
{
|
{
|
||||||
"SPDXID": "SPDXRef-Package-python-package-1-1.0.1",
|
"SPDXID": "SPDXRef-888661d4f0362f02",
|
||||||
"name": "package-1",
|
"name": "package-1",
|
||||||
"licenseConcluded": "MIT",
|
"licenseConcluded": "MIT",
|
||||||
"downloadLocation": "NOASSERTION",
|
"downloadLocation": "NOASSERTION",
|
||||||
@ -36,7 +36,7 @@
|
|||||||
"versionInfo": "1.0.1"
|
"versionInfo": "1.0.1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"SPDXID": "SPDXRef-Package-deb-package-2-2.0.1",
|
"SPDXID": "SPDXRef-4068ff5e8926b305",
|
||||||
"name": "package-2",
|
"name": "package-2",
|
||||||
"licenseConcluded": "NONE",
|
"licenseConcluded": "NONE",
|
||||||
"downloadLocation": "NOASSERTION",
|
"downloadLocation": "NOASSERTION",
|
||||||
|
|||||||
@ -3,17 +3,21 @@ package spdx22json
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"path"
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/sbom"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/formats/common/spdxhelpers"
|
"github.com/anchore/syft/internal/formats/common/spdxhelpers"
|
||||||
"github.com/anchore/syft/internal/formats/spdx22json/model"
|
"github.com/anchore/syft/internal/formats/spdx22json/model"
|
||||||
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/internal/spdxlicense"
|
"github.com/anchore/syft/internal/spdxlicense"
|
||||||
"github.com/anchore/syft/internal/version"
|
"github.com/anchore/syft/internal/version"
|
||||||
|
"github.com/anchore/syft/syft/artifact"
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
)
|
)
|
||||||
@ -21,7 +25,6 @@ import (
|
|||||||
// toFormatModel creates and populates a new JSON document struct that follows the SPDX 2.2 spec from the given cataloging results.
|
// toFormatModel creates and populates a new JSON document struct that follows the SPDX 2.2 spec from the given cataloging results.
|
||||||
func toFormatModel(s sbom.SBOM) model.Document {
|
func toFormatModel(s sbom.SBOM) model.Document {
|
||||||
name := documentName(s.Source)
|
name := documentName(s.Source)
|
||||||
packages, files, relationships := extractFromCatalog(s.Artifacts.PackageCatalog)
|
|
||||||
|
|
||||||
return model.Document{
|
return model.Document{
|
||||||
Element: model.Element{
|
Element: model.Element{
|
||||||
@ -40,9 +43,9 @@ func toFormatModel(s sbom.SBOM) model.Document {
|
|||||||
},
|
},
|
||||||
DataLicense: "CC0-1.0",
|
DataLicense: "CC0-1.0",
|
||||||
DocumentNamespace: documentNamespace(name, s.Source),
|
DocumentNamespace: documentNamespace(name, s.Source),
|
||||||
Packages: packages,
|
Packages: toPackages(s.Artifacts.PackageCatalog, s.Relationships),
|
||||||
Files: files,
|
Files: toFiles(s),
|
||||||
Relationships: relationships,
|
Relationships: toRelationships(s.Relationships),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,6 +61,17 @@ func documentName(srcMetadata source.Metadata) string {
|
|||||||
return uuid.Must(uuid.NewRandom()).String()
|
return uuid.Must(uuid.NewRandom()).String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func cleanSPDXName(name string) string {
|
||||||
|
// remove # according to specification
|
||||||
|
name = strings.ReplaceAll(name, "#", "-")
|
||||||
|
|
||||||
|
// remove : for url construction
|
||||||
|
name = strings.ReplaceAll(name, ":", "-")
|
||||||
|
|
||||||
|
// clean relative pathing
|
||||||
|
return path.Clean(name)
|
||||||
|
}
|
||||||
|
|
||||||
func documentNamespace(name string, srcMetadata source.Metadata) string {
|
func documentNamespace(name string, srcMetadata source.Metadata) string {
|
||||||
input := "unknown-source-type"
|
input := "unknown-source-type"
|
||||||
switch srcMetadata.Scheme {
|
switch srcMetadata.Scheme {
|
||||||
@ -76,19 +90,12 @@ func documentNamespace(name string, srcMetadata source.Metadata) string {
|
|||||||
return path.Join(anchoreNamespace, identifier)
|
return path.Join(anchoreNamespace, identifier)
|
||||||
}
|
}
|
||||||
|
|
||||||
func extractFromCatalog(catalog *pkg.Catalog) ([]model.Package, []model.File, []model.Relationship) {
|
func toPackages(catalog *pkg.Catalog, relationships []artifact.Relationship) []model.Package {
|
||||||
packages := make([]model.Package, 0)
|
packages := make([]model.Package, 0)
|
||||||
relationships := make([]model.Relationship, 0)
|
|
||||||
files := make([]model.File, 0)
|
|
||||||
|
|
||||||
for _, p := range catalog.Sorted() {
|
for _, p := range catalog.Sorted() {
|
||||||
license := spdxhelpers.License(p)
|
license := spdxhelpers.License(p)
|
||||||
packageSpdxID := model.ElementID(fmt.Sprintf("Package-%+v-%s-%s", p.Type, p.Name, p.Version)).String()
|
packageSpdxID := model.ElementID(p.ID()).String()
|
||||||
|
|
||||||
packageFiles, fileIDs, packageFileRelationships := spdxhelpers.Files(packageSpdxID, p)
|
|
||||||
files = append(files, packageFiles...)
|
|
||||||
|
|
||||||
relationships = append(relationships, packageFileRelationships...)
|
|
||||||
|
|
||||||
// note: the license concluded and declared should be the same since we are collecting license information
|
// note: the license concluded and declared should be the same since we are collecting license information
|
||||||
// from the project data itself (the installed package files).
|
// from the project data itself (the installed package files).
|
||||||
@ -97,14 +104,16 @@ func extractFromCatalog(catalog *pkg.Catalog) ([]model.Package, []model.File, []
|
|||||||
DownloadLocation: spdxhelpers.DownloadLocation(p),
|
DownloadLocation: spdxhelpers.DownloadLocation(p),
|
||||||
ExternalRefs: spdxhelpers.ExternalRefs(p),
|
ExternalRefs: spdxhelpers.ExternalRefs(p),
|
||||||
FilesAnalyzed: false,
|
FilesAnalyzed: false,
|
||||||
HasFiles: fileIDs,
|
HasFiles: fileIDsForPackage(packageSpdxID, relationships),
|
||||||
Homepage: spdxhelpers.Homepage(p),
|
Homepage: spdxhelpers.Homepage(p),
|
||||||
LicenseDeclared: license, // The Declared License is what the authors of a project believe govern the package
|
// The Declared License is what the authors of a project believe govern the package
|
||||||
Originator: spdxhelpers.Originator(p),
|
LicenseDeclared: license,
|
||||||
SourceInfo: spdxhelpers.SourceInfo(p),
|
Originator: spdxhelpers.Originator(p),
|
||||||
VersionInfo: p.Version,
|
SourceInfo: spdxhelpers.SourceInfo(p),
|
||||||
|
VersionInfo: p.Version,
|
||||||
Item: model.Item{
|
Item: model.Item{
|
||||||
LicenseConcluded: license, // The Concluded License field is the license the SPDX file creator believes governs the package
|
// The Concluded License field is the license the SPDX file creator believes governs the package
|
||||||
|
LicenseConcluded: license,
|
||||||
Element: model.Element{
|
Element: model.Element{
|
||||||
SPDXID: packageSpdxID,
|
SPDXID: packageSpdxID,
|
||||||
Name: p.Name,
|
Name: p.Name,
|
||||||
@ -113,16 +122,145 @@ func extractFromCatalog(catalog *pkg.Catalog) ([]model.Package, []model.File, []
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
return packages, files, relationships
|
return packages
|
||||||
}
|
}
|
||||||
|
|
||||||
func cleanSPDXName(name string) string {
|
func fileIDsForPackage(packageSpdxID string, relationships []artifact.Relationship) (fileIDs []string) {
|
||||||
// remove # according to specification
|
for _, relationship := range relationships {
|
||||||
name = strings.ReplaceAll(name, "#", "-")
|
if relationship.Type != artifact.ContainsRelationship {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
// remove : for url construction
|
if _, ok := relationship.From.(pkg.Package); !ok {
|
||||||
name = strings.ReplaceAll(name, ":", "-")
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
// clean relative pathing
|
if _, ok := relationship.To.(source.Coordinates); !ok {
|
||||||
return path.Clean(name)
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if string(relationship.From.ID()) == packageSpdxID {
|
||||||
|
fileIDs = append(fileIDs, string(relationship.To.ID()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fileIDs
|
||||||
|
}
|
||||||
|
|
||||||
|
func toFiles(s sbom.SBOM) []model.File {
|
||||||
|
results := make([]model.File, 0)
|
||||||
|
artifacts := s.Artifacts
|
||||||
|
|
||||||
|
for _, coordinates := range sbom.AllCoordinates(s) {
|
||||||
|
var metadata *source.FileMetadata
|
||||||
|
if metadataForLocation, exists := artifacts.FileMetadata[coordinates]; exists {
|
||||||
|
metadata = &metadataForLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
var digests []file.Digest
|
||||||
|
if digestsForLocation, exists := artifacts.FileDigests[coordinates]; exists {
|
||||||
|
digests = digestsForLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: add file classifications (?) and content as a snippet
|
||||||
|
|
||||||
|
var comment string
|
||||||
|
if coordinates.FileSystemID != "" {
|
||||||
|
comment = fmt.Sprintf("layerID: %s", coordinates.FileSystemID)
|
||||||
|
}
|
||||||
|
|
||||||
|
results = append(results, model.File{
|
||||||
|
Item: model.Item{
|
||||||
|
Element: model.Element{
|
||||||
|
SPDXID: string(coordinates.ID()),
|
||||||
|
Name: filepath.Base(coordinates.RealPath),
|
||||||
|
Comment: comment,
|
||||||
|
},
|
||||||
|
// required, no attempt made to determine license information
|
||||||
|
LicenseConcluded: "NOASSERTION",
|
||||||
|
},
|
||||||
|
Checksums: toFileChecksums(digests),
|
||||||
|
FileName: coordinates.RealPath,
|
||||||
|
FileTypes: toFileTypes(metadata),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// sort by real path then virtual path to ensure the result is stable across multiple runs
|
||||||
|
sort.SliceStable(results, func(i, j int) bool {
|
||||||
|
return results[i].FileName < results[j].FileName
|
||||||
|
})
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
func toFileChecksums(digests []file.Digest) (checksums []model.Checksum) {
|
||||||
|
for _, digest := range digests {
|
||||||
|
checksums = append(checksums, model.Checksum{
|
||||||
|
Algorithm: digest.Algorithm,
|
||||||
|
ChecksumValue: digest.Value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return checksums
|
||||||
|
}
|
||||||
|
|
||||||
|
func toFileTypes(metadata *source.FileMetadata) (ty []string) {
|
||||||
|
if metadata == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
mimeTypePrefix := strings.Split(metadata.MIMEType, "/")[0]
|
||||||
|
switch mimeTypePrefix {
|
||||||
|
case "image":
|
||||||
|
ty = append(ty, string(model.ImageFileType))
|
||||||
|
case "video":
|
||||||
|
ty = append(ty, string(model.VideoFileType))
|
||||||
|
case "application":
|
||||||
|
ty = append(ty, string(model.ApplicationFileType))
|
||||||
|
case "text":
|
||||||
|
ty = append(ty, string(model.TextFileType))
|
||||||
|
case "audio":
|
||||||
|
ty = append(ty, string(model.AudioFileType))
|
||||||
|
}
|
||||||
|
|
||||||
|
if internal.IsExecutable(metadata.MIMEType) {
|
||||||
|
ty = append(ty, string(model.BinaryFileType))
|
||||||
|
}
|
||||||
|
|
||||||
|
if internal.IsArchive(metadata.MIMEType) {
|
||||||
|
ty = append(ty, string(model.ArchiveFileType))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: add support for source, spdx, and documentation file types
|
||||||
|
if len(ty) == 0 {
|
||||||
|
ty = append(ty, string(model.OtherFileType))
|
||||||
|
}
|
||||||
|
|
||||||
|
return ty
|
||||||
|
}
|
||||||
|
|
||||||
|
func toRelationships(relationships []artifact.Relationship) (result []model.Relationship) {
|
||||||
|
for _, r := range relationships {
|
||||||
|
exists, relationshipType, comment := lookupRelationship(r.Type)
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
log.Warnf("unable to convert relationship from SPDX 2.2 JSON, dropping: %+v", r)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
result = append(result, model.Relationship{
|
||||||
|
SpdxElementID: string(r.From.ID()),
|
||||||
|
RelationshipType: relationshipType,
|
||||||
|
RelatedSpdxElement: string(r.To.ID()),
|
||||||
|
Comment: comment,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func lookupRelationship(ty artifact.RelationshipType) (bool, model.RelationshipType, string) {
|
||||||
|
switch ty {
|
||||||
|
case artifact.ContainsRelationship:
|
||||||
|
return true, model.ContainsRelationship, ""
|
||||||
|
case artifact.OwnershipByFileOverlapRelationship:
|
||||||
|
return true, model.OtherRelationship, fmt.Sprintf("%s: indicates that the parent package claims ownership of a child package since the parent metadata indicates overlap with a location that a cataloger found the child package by", ty)
|
||||||
|
}
|
||||||
|
return false, "", ""
|
||||||
}
|
}
|
||||||
|
|||||||
256
internal/formats/spdx22json/to_format_model_test.go
Normal file
256
internal/formats/spdx22json/to_format_model_test.go
Normal file
@ -0,0 +1,256 @@
|
|||||||
|
package spdx22json
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/artifact"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/internal/formats/spdx22json/model"
|
||||||
|
"github.com/anchore/syft/syft/source"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_toFileTypes(t *testing.T) {
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
metadata source.FileMetadata
|
||||||
|
expected []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "application",
|
||||||
|
metadata: source.FileMetadata{
|
||||||
|
MIMEType: "application/vnd.unknown",
|
||||||
|
},
|
||||||
|
expected: []string{
|
||||||
|
string(model.ApplicationFileType),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "archive",
|
||||||
|
metadata: source.FileMetadata{
|
||||||
|
MIMEType: "application/zip",
|
||||||
|
},
|
||||||
|
expected: []string{
|
||||||
|
string(model.ApplicationFileType),
|
||||||
|
string(model.ArchiveFileType),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "audio",
|
||||||
|
metadata: source.FileMetadata{
|
||||||
|
MIMEType: "audio/ogg",
|
||||||
|
},
|
||||||
|
expected: []string{
|
||||||
|
string(model.AudioFileType),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "video",
|
||||||
|
metadata: source.FileMetadata{
|
||||||
|
MIMEType: "video/3gpp",
|
||||||
|
},
|
||||||
|
expected: []string{
|
||||||
|
string(model.VideoFileType),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "text",
|
||||||
|
metadata: source.FileMetadata{
|
||||||
|
MIMEType: "text/html",
|
||||||
|
},
|
||||||
|
expected: []string{
|
||||||
|
string(model.TextFileType),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "image",
|
||||||
|
metadata: source.FileMetadata{
|
||||||
|
MIMEType: "image/png",
|
||||||
|
},
|
||||||
|
expected: []string{
|
||||||
|
string(model.ImageFileType),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "binary",
|
||||||
|
metadata: source.FileMetadata{
|
||||||
|
MIMEType: "application/x-sharedlib",
|
||||||
|
},
|
||||||
|
expected: []string{
|
||||||
|
string(model.ApplicationFileType),
|
||||||
|
string(model.BinaryFileType),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range tests {
|
||||||
|
t.Run(test.name, func(t *testing.T) {
|
||||||
|
assert.ElementsMatch(t, test.expected, toFileTypes(&test.metadata))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_lookupRelationship(t *testing.T) {
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
input artifact.RelationshipType
|
||||||
|
exists bool
|
||||||
|
ty model.RelationshipType
|
||||||
|
comment string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
input: artifact.ContainsRelationship,
|
||||||
|
exists: true,
|
||||||
|
ty: model.ContainsRelationship,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: artifact.OwnershipByFileOverlapRelationship,
|
||||||
|
exists: true,
|
||||||
|
ty: model.OtherRelationship,
|
||||||
|
comment: "ownership-by-file-overlap: indicates that the parent package claims ownership of a child package since the parent metadata indicates overlap with a location that a cataloger found the child package by",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "made-up",
|
||||||
|
exists: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range tests {
|
||||||
|
t.Run(string(test.input), func(t *testing.T) {
|
||||||
|
exists, ty, comment := lookupRelationship(test.input)
|
||||||
|
assert.Equal(t, exists, test.exists)
|
||||||
|
assert.Equal(t, ty, test.ty)
|
||||||
|
assert.Equal(t, comment, test.comment)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_toFileChecksums(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
digests []file.Digest
|
||||||
|
expected []model.Checksum
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "empty",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "has digests",
|
||||||
|
digests: []file.Digest{
|
||||||
|
{
|
||||||
|
Algorithm: "sha256",
|
||||||
|
Value: "deadbeefcafe",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Algorithm: "md5",
|
||||||
|
Value: "meh",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: []model.Checksum{
|
||||||
|
{
|
||||||
|
Algorithm: "sha256",
|
||||||
|
ChecksumValue: "deadbeefcafe",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Algorithm: "md5",
|
||||||
|
ChecksumValue: "meh",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range tests {
|
||||||
|
t.Run(test.name, func(t *testing.T) {
|
||||||
|
assert.ElementsMatch(t, test.expected, toFileChecksums(test.digests))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_fileIDsForPackage(t *testing.T) {
|
||||||
|
|
||||||
|
p := pkg.Package{
|
||||||
|
Name: "bogus",
|
||||||
|
}
|
||||||
|
|
||||||
|
c := source.Coordinates{
|
||||||
|
RealPath: "/path",
|
||||||
|
FileSystemID: "nowhere",
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
id string
|
||||||
|
relationships []artifact.Relationship
|
||||||
|
expected []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "find file IDs for packages with package-file relationships",
|
||||||
|
id: string(p.ID()),
|
||||||
|
relationships: []artifact.Relationship{
|
||||||
|
{
|
||||||
|
From: p,
|
||||||
|
To: c,
|
||||||
|
Type: artifact.ContainsRelationship,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: []string{
|
||||||
|
string(c.ID()),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ignore package-to-package",
|
||||||
|
id: string(p.ID()),
|
||||||
|
relationships: []artifact.Relationship{
|
||||||
|
{
|
||||||
|
From: p,
|
||||||
|
To: p,
|
||||||
|
Type: artifact.ContainsRelationship,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: []string{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ignore file-to-file",
|
||||||
|
id: string(p.ID()),
|
||||||
|
relationships: []artifact.Relationship{
|
||||||
|
{
|
||||||
|
From: c,
|
||||||
|
To: c,
|
||||||
|
Type: artifact.ContainsRelationship,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: []string{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ignore file-to-package",
|
||||||
|
id: string(p.ID()),
|
||||||
|
relationships: []artifact.Relationship{
|
||||||
|
{
|
||||||
|
From: c,
|
||||||
|
To: p,
|
||||||
|
Type: artifact.ContainsRelationship,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: []string{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "filter by relationship type",
|
||||||
|
id: string(p.ID()),
|
||||||
|
relationships: []artifact.Relationship{
|
||||||
|
{
|
||||||
|
From: p,
|
||||||
|
To: c,
|
||||||
|
Type: artifact.OwnershipByFileOverlapRelationship,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: []string{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range tests {
|
||||||
|
t.Run(test.name, func(t *testing.T) {
|
||||||
|
assert.ElementsMatch(t, test.expected, fileIDsForPackage(test.id, test.relationships))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -8,8 +8,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func encoder(output io.Writer, s sbom.SBOM) error {
|
func encoder(output io.Writer, s sbom.SBOM) error {
|
||||||
// TODO: application config not available yet
|
doc := ToFormatModel(s)
|
||||||
doc := ToFormatModel(s, nil)
|
|
||||||
|
|
||||||
enc := json.NewEncoder(output)
|
enc := json.NewEncoder(output)
|
||||||
// prevent > and < from being escaped in the payload
|
// prevent > and < from being escaped in the payload
|
||||||
|
|||||||
@ -4,6 +4,15 @@ import (
|
|||||||
"flag"
|
"flag"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/artifact"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/distro"
|
||||||
|
"github.com/anchore/syft/syft/pkg"
|
||||||
|
"github.com/anchore/syft/syft/sbom"
|
||||||
|
"github.com/anchore/syft/syft/source"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/formats/common/testutils"
|
"github.com/anchore/syft/internal/formats/common/testutils"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -24,3 +33,167 @@ func TestImagePresenter(t *testing.T) {
|
|||||||
*updateJson,
|
*updateJson,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestEncodeFullJSONDocument(t *testing.T) {
|
||||||
|
catalog := pkg.NewCatalog()
|
||||||
|
|
||||||
|
p1 := pkg.Package{
|
||||||
|
Name: "package-1",
|
||||||
|
Version: "1.0.1",
|
||||||
|
Locations: []source.Location{
|
||||||
|
{
|
||||||
|
Coordinates: source.Coordinates{
|
||||||
|
RealPath: "/a/place/a",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Type: pkg.PythonPkg,
|
||||||
|
FoundBy: "the-cataloger-1",
|
||||||
|
Language: pkg.Python,
|
||||||
|
MetadataType: pkg.PythonPackageMetadataType,
|
||||||
|
Licenses: []string{"MIT"},
|
||||||
|
Metadata: pkg.PythonPackageMetadata{
|
||||||
|
Name: "package-1",
|
||||||
|
Version: "1.0.1",
|
||||||
|
Files: []pkg.PythonFileRecord{},
|
||||||
|
},
|
||||||
|
PURL: "a-purl-1",
|
||||||
|
CPEs: []pkg.CPE{
|
||||||
|
pkg.MustCPE("cpe:2.3:*:some:package:1:*:*:*:*:*:*:*"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
p2 := pkg.Package{
|
||||||
|
Name: "package-2",
|
||||||
|
Version: "2.0.1",
|
||||||
|
Locations: []source.Location{
|
||||||
|
{
|
||||||
|
Coordinates: source.Coordinates{
|
||||||
|
RealPath: "/b/place/b",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Type: pkg.DebPkg,
|
||||||
|
FoundBy: "the-cataloger-2",
|
||||||
|
MetadataType: pkg.DpkgMetadataType,
|
||||||
|
Metadata: pkg.DpkgMetadata{
|
||||||
|
Package: "package-2",
|
||||||
|
Version: "2.0.1",
|
||||||
|
Files: []pkg.DpkgFileRecord{},
|
||||||
|
},
|
||||||
|
PURL: "a-purl-2",
|
||||||
|
CPEs: []pkg.CPE{
|
||||||
|
pkg.MustCPE("cpe:2.3:*:some:package:2:*:*:*:*:*:*:*"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
catalog.Add(p1)
|
||||||
|
catalog.Add(p2)
|
||||||
|
|
||||||
|
s := sbom.SBOM{
|
||||||
|
Artifacts: sbom.Artifacts{
|
||||||
|
PackageCatalog: catalog,
|
||||||
|
FileMetadata: map[source.Coordinates]source.FileMetadata{
|
||||||
|
source.NewLocation("/a/place").Coordinates: {
|
||||||
|
Mode: 0775,
|
||||||
|
Type: "directory",
|
||||||
|
UserID: 0,
|
||||||
|
GroupID: 0,
|
||||||
|
},
|
||||||
|
source.NewLocation("/a/place/a").Coordinates: {
|
||||||
|
Mode: 0775,
|
||||||
|
Type: "regularFile",
|
||||||
|
UserID: 0,
|
||||||
|
GroupID: 0,
|
||||||
|
},
|
||||||
|
source.NewLocation("/b").Coordinates: {
|
||||||
|
Mode: 0775,
|
||||||
|
Type: "symbolicLink",
|
||||||
|
LinkDestination: "/c",
|
||||||
|
UserID: 0,
|
||||||
|
GroupID: 0,
|
||||||
|
},
|
||||||
|
source.NewLocation("/b/place/b").Coordinates: {
|
||||||
|
Mode: 0644,
|
||||||
|
Type: "regularFile",
|
||||||
|
UserID: 1,
|
||||||
|
GroupID: 2,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
FileDigests: map[source.Coordinates][]file.Digest{
|
||||||
|
source.NewLocation("/a/place/a").Coordinates: {
|
||||||
|
{
|
||||||
|
Algorithm: "sha256",
|
||||||
|
Value: "366a3f5653e34673b875891b021647440d0127c2ef041e3b1a22da2a7d4f3703",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
source.NewLocation("/b/place/b").Coordinates: {
|
||||||
|
{
|
||||||
|
Algorithm: "sha256",
|
||||||
|
Value: "1b3722da2a7d90d033b87581a2a3f12021647445653e34666ef041e3b4f3707c",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
FileContents: map[source.Coordinates]string{
|
||||||
|
source.NewLocation("/a/place/a").Coordinates: "the-contents",
|
||||||
|
},
|
||||||
|
Distro: &distro.Distro{
|
||||||
|
Type: distro.RedHat,
|
||||||
|
RawVersion: "7",
|
||||||
|
IDLike: "rhel",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Relationships: []artifact.Relationship{
|
||||||
|
{
|
||||||
|
From: p1,
|
||||||
|
To: p2,
|
||||||
|
Type: artifact.OwnershipByFileOverlapRelationship,
|
||||||
|
Data: map[string]string{
|
||||||
|
"file": "path",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Source: source.Metadata{
|
||||||
|
Scheme: source.ImageScheme,
|
||||||
|
ImageMetadata: source.ImageMetadata{
|
||||||
|
UserInput: "user-image-input",
|
||||||
|
ID: "sha256:c2b46b4eb06296933b7cf0722683964e9ecbd93265b9ef6ae9642e3952afbba0",
|
||||||
|
ManifestDigest: "sha256:2731251dc34951c0e50fcc643b4c5f74922dad1a5d98f302b504cf46cd5d9368",
|
||||||
|
MediaType: "application/vnd.docker.distribution.manifest.v2+json",
|
||||||
|
Tags: []string{
|
||||||
|
"stereoscope-fixture-image-simple:85066c51088bdd274f7a89e99e00490f666c49e72ffc955707cd6e18f0e22c5b",
|
||||||
|
},
|
||||||
|
Size: 38,
|
||||||
|
Layers: []source.LayerMetadata{
|
||||||
|
{
|
||||||
|
MediaType: "application/vnd.docker.image.rootfs.diff.tar.gzip",
|
||||||
|
Digest: "sha256:3de16c5b8659a2e8d888b8ded8427be7a5686a3c8c4e4dd30de20f362827285b",
|
||||||
|
Size: 22,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
MediaType: "application/vnd.docker.image.rootfs.diff.tar.gzip",
|
||||||
|
Digest: "sha256:366a3f5653e34673b875891b021647440d0127c2ef041e3b1a22da2a7d4f3703",
|
||||||
|
Size: 16,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
RawManifest: []byte("eyJzY2hlbWFWZXJzaW9uIjoyLCJtZWRpYVR5cGUiOiJh..."),
|
||||||
|
RawConfig: []byte("eyJhcmNoaXRlY3R1cmUiOiJhbWQ2NCIsImNvbmZp..."),
|
||||||
|
RepoDigests: []string{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Descriptor: sbom.Descriptor{
|
||||||
|
Name: "syft",
|
||||||
|
Version: "v0.42.0-bogus",
|
||||||
|
// the application configuration should be persisted here, however, we do not want to import
|
||||||
|
// the application configuration in this package (it's reserved only for ingestion by the cmd package)
|
||||||
|
Configuration: map[string]string{
|
||||||
|
"config-key": "config-value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
testutils.AssertPresenterAgainstGoldenSnapshot(t,
|
||||||
|
Format().Presenter(s),
|
||||||
|
*updateJson,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|||||||
@ -4,10 +4,12 @@ package model
|
|||||||
type Document struct {
|
type Document struct {
|
||||||
Artifacts []Package `json:"artifacts"` // Artifacts is the list of packages discovered and placed into the catalog
|
Artifacts []Package `json:"artifacts"` // Artifacts is the list of packages discovered and placed into the catalog
|
||||||
ArtifactRelationships []Relationship `json:"artifactRelationships"`
|
ArtifactRelationships []Relationship `json:"artifactRelationships"`
|
||||||
Source Source `json:"source"` // Source represents the original object that was cataloged
|
Files []File `json:"files,omitempty"` // note: must have omitempty
|
||||||
Distro Distro `json:"distro"` // Distro represents the Linux distribution that was detected from the source
|
Secrets []Secrets `json:"secrets,omitempty"` // note: must have omitempty
|
||||||
Descriptor Descriptor `json:"descriptor"` // Descriptor is a block containing self-describing information about syft
|
Source Source `json:"source"` // Source represents the original object that was cataloged
|
||||||
Schema Schema `json:"schema"` // Schema is a block reserved for defining the version for the shape of this JSON document and where to find the schema document to validate the shape
|
Distro Distro `json:"distro"` // Distro represents the Linux distribution that was detected from the source
|
||||||
|
Descriptor Descriptor `json:"descriptor"` // Descriptor is a block containing self-describing information about syft
|
||||||
|
Schema Schema `json:"schema"` // Schema is a block reserved for defining the version for the shape of this JSON document and where to find the schema document to validate the shape
|
||||||
}
|
}
|
||||||
|
|
||||||
// Descriptor describes what created the document as well as surrounding metadata
|
// Descriptor describes what created the document as well as surrounding metadata
|
||||||
|
|||||||
25
internal/formats/syftjson/model/file.go
Normal file
25
internal/formats/syftjson/model/file.go
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
package model
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/source"
|
||||||
|
)
|
||||||
|
|
||||||
|
type File struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Location source.Coordinates `json:"location"`
|
||||||
|
Metadata *FileMetadataEntry `json:"metadata,omitempty"`
|
||||||
|
Contents string `json:"contents,omitempty"`
|
||||||
|
Digests []file.Digest `json:"digests,omitempty"`
|
||||||
|
Classifications []file.Classification `json:"classifications,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FileMetadataEntry struct {
|
||||||
|
Mode int `json:"mode"`
|
||||||
|
Type source.FileType `json:"type"`
|
||||||
|
LinkDestination string `json:"linkDestination,omitempty"`
|
||||||
|
UserID int `json:"userID"`
|
||||||
|
GroupID int `json:"groupID"`
|
||||||
|
MIMEType string `json:"mimeType"`
|
||||||
|
}
|
||||||
@ -9,7 +9,7 @@ import (
|
|||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Package represents a pkg.Package object specialized for JSON marshaling and unmarshaling.
|
// Package represents a pkg.Package object specialized for JSON marshaling and unmarshalling.
|
||||||
type Package struct {
|
type Package struct {
|
||||||
PackageBasicData
|
PackageBasicData
|
||||||
PackageCustomData
|
PackageCustomData
|
||||||
|
|||||||
@ -4,5 +4,5 @@ type Relationship struct {
|
|||||||
Parent string `json:"parent"`
|
Parent string `json:"parent"`
|
||||||
Child string `json:"child"`
|
Child string `json:"child"`
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
Metadata interface{} `json:"metadata"`
|
Metadata interface{} `json:"metadata,omitempty"`
|
||||||
}
|
}
|
||||||
|
|||||||
11
internal/formats/syftjson/model/secrets.go
Normal file
11
internal/formats/syftjson/model/secrets.go
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
package model
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
"github.com/anchore/syft/syft/source"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Secrets struct {
|
||||||
|
Location source.Coordinates `json:"location"`
|
||||||
|
Secrets []file.SearchResult `json:"secrets"`
|
||||||
|
}
|
||||||
@ -77,10 +77,13 @@
|
|||||||
},
|
},
|
||||||
"descriptor": {
|
"descriptor": {
|
||||||
"name": "syft",
|
"name": "syft",
|
||||||
"version": "[not provided]"
|
"version": "v0.42.0-bogus",
|
||||||
|
"configuration": {
|
||||||
|
"config-key": "config-value"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"schema": {
|
"schema": {
|
||||||
"version": "1.1.0",
|
"version": "2.0.0",
|
||||||
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-1.1.0.json"
|
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-2.0.0.json"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,75 +1,4 @@
|
|||||||
{
|
{
|
||||||
"fileContents": [
|
|
||||||
{
|
|
||||||
"location": {
|
|
||||||
"path": "/a/place/a"
|
|
||||||
},
|
|
||||||
"contents": "the-contents"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"fileMetadata": [
|
|
||||||
{
|
|
||||||
"location": {
|
|
||||||
"path": "/a/place"
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"mode": 775,
|
|
||||||
"type": "directory",
|
|
||||||
"userID": 0,
|
|
||||||
"groupID": 0,
|
|
||||||
"mimeType": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"location": {
|
|
||||||
"path": "/a/place/a"
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"mode": 775,
|
|
||||||
"type": "regularFile",
|
|
||||||
"userID": 0,
|
|
||||||
"groupID": 0,
|
|
||||||
"digests": [
|
|
||||||
{
|
|
||||||
"algorithm": "sha256",
|
|
||||||
"value": "366a3f5653e34673b875891b021647440d0127c2ef041e3b1a22da2a7d4f3703"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"mimeType": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"location": {
|
|
||||||
"path": "/b"
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"mode": 775,
|
|
||||||
"type": "symbolicLink",
|
|
||||||
"linkDestination": "/c",
|
|
||||||
"userID": 0,
|
|
||||||
"groupID": 0,
|
|
||||||
"mimeType": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"location": {
|
|
||||||
"path": "/b/place/b"
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"mode": 644,
|
|
||||||
"type": "regularFile",
|
|
||||||
"userID": 1,
|
|
||||||
"groupID": 2,
|
|
||||||
"digests": [
|
|
||||||
{
|
|
||||||
"algorithm": "sha256",
|
|
||||||
"value": "1b3722da2a7d90d033b87581a2a3f12021647445653e34666ef041e3b4f3707c"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"mimeType": ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"artifacts": [
|
"artifacts": [
|
||||||
{
|
{
|
||||||
"id": "962403cfb7be50d7",
|
"id": "962403cfb7be50d7",
|
||||||
@ -131,7 +60,84 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"artifactRelationships": [],
|
"artifactRelationships": [
|
||||||
|
{
|
||||||
|
"parent": "962403cfb7be50d7",
|
||||||
|
"child": "b11f44847bba0ed1",
|
||||||
|
"type": "ownership-by-file-overlap",
|
||||||
|
"metadata": {
|
||||||
|
"file": "path"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"id": "913b4592e2c2ebdf",
|
||||||
|
"location": {
|
||||||
|
"path": "/a/place"
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"mode": 775,
|
||||||
|
"type": "directory",
|
||||||
|
"userID": 0,
|
||||||
|
"groupID": 0,
|
||||||
|
"mimeType": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "e7c88bd18e11b0b",
|
||||||
|
"location": {
|
||||||
|
"path": "/a/place/a"
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"mode": 775,
|
||||||
|
"type": "regularFile",
|
||||||
|
"userID": 0,
|
||||||
|
"groupID": 0,
|
||||||
|
"mimeType": ""
|
||||||
|
},
|
||||||
|
"contents": "the-contents",
|
||||||
|
"digests": [
|
||||||
|
{
|
||||||
|
"algorithm": "sha256",
|
||||||
|
"value": "366a3f5653e34673b875891b021647440d0127c2ef041e3b1a22da2a7d4f3703"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "5c3dc6885f48b5a1",
|
||||||
|
"location": {
|
||||||
|
"path": "/b"
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"mode": 775,
|
||||||
|
"type": "symbolicLink",
|
||||||
|
"linkDestination": "/c",
|
||||||
|
"userID": 0,
|
||||||
|
"groupID": 0,
|
||||||
|
"mimeType": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "799d2f12da0bcec4",
|
||||||
|
"location": {
|
||||||
|
"path": "/b/place/b"
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"mode": 644,
|
||||||
|
"type": "regularFile",
|
||||||
|
"userID": 1,
|
||||||
|
"groupID": 2,
|
||||||
|
"mimeType": ""
|
||||||
|
},
|
||||||
|
"digests": [
|
||||||
|
{
|
||||||
|
"algorithm": "sha256",
|
||||||
|
"value": "1b3722da2a7d90d033b87581a2a3f12021647445653e34666ef041e3b4f3707c"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
"source": {
|
"source": {
|
||||||
"type": "image",
|
"type": "image",
|
||||||
"target": {
|
"target": {
|
||||||
@ -167,77 +173,13 @@
|
|||||||
},
|
},
|
||||||
"descriptor": {
|
"descriptor": {
|
||||||
"name": "syft",
|
"name": "syft",
|
||||||
"version": "[not provided]",
|
"version": "v0.42.0-bogus",
|
||||||
"configuration": {
|
"configuration": {
|
||||||
"configPath": "",
|
"config-key": "config-value"
|
||||||
"output": "",
|
|
||||||
"file": "",
|
|
||||||
"quiet": false,
|
|
||||||
"check-for-app-update": false,
|
|
||||||
"anchore": {
|
|
||||||
"host": "",
|
|
||||||
"path": "",
|
|
||||||
"dockerfile": "",
|
|
||||||
"overwrite-existing-image": false,
|
|
||||||
"import-timeout": 0
|
|
||||||
},
|
|
||||||
"dev": {
|
|
||||||
"profile-cpu": false,
|
|
||||||
"profile-mem": false
|
|
||||||
},
|
|
||||||
"log": {
|
|
||||||
"structured": false,
|
|
||||||
"level": "",
|
|
||||||
"file-location": ""
|
|
||||||
},
|
|
||||||
"package": {
|
|
||||||
"cataloger": {
|
|
||||||
"enabled": false,
|
|
||||||
"scope": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"file-metadata": {
|
|
||||||
"cataloger": {
|
|
||||||
"enabled": false,
|
|
||||||
"scope": ""
|
|
||||||
},
|
|
||||||
"digests": [
|
|
||||||
"sha256"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"file-classification": {
|
|
||||||
"cataloger": {
|
|
||||||
"enabled": false,
|
|
||||||
"scope": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"file-contents": {
|
|
||||||
"cataloger": {
|
|
||||||
"enabled": false,
|
|
||||||
"scope": ""
|
|
||||||
},
|
|
||||||
"skip-files-above-size": 0,
|
|
||||||
"globs": null
|
|
||||||
},
|
|
||||||
"secrets": {
|
|
||||||
"cataloger": {
|
|
||||||
"enabled": false,
|
|
||||||
"scope": ""
|
|
||||||
},
|
|
||||||
"additional-patterns": null,
|
|
||||||
"exclude-pattern-names": null,
|
|
||||||
"reveal-values": false,
|
|
||||||
"skip-files-above-size": 0
|
|
||||||
},
|
|
||||||
"registry": {
|
|
||||||
"insecure-skip-tls-verify": false,
|
|
||||||
"insecure-use-http": false,
|
|
||||||
"auth": null
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"schema": {
|
"schema": {
|
||||||
"version": "1.1.0",
|
"version": "2.0.0",
|
||||||
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-1.1.0.json"
|
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-2.0.0.json"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -98,10 +98,13 @@
|
|||||||
},
|
},
|
||||||
"descriptor": {
|
"descriptor": {
|
||||||
"name": "syft",
|
"name": "syft",
|
||||||
"version": "[not provided]"
|
"version": "v0.42.0-bogus",
|
||||||
|
"configuration": {
|
||||||
|
"config-key": "config-value"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"schema": {
|
"schema": {
|
||||||
"version": "1.1.0",
|
"version": "2.0.0",
|
||||||
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-1.1.0.json"
|
"url": "https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-2.0.0.json"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,6 +2,10 @@ package syftjson
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/anchore/syft/syft/file"
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
|
|
||||||
@ -10,14 +14,13 @@ import (
|
|||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/formats/syftjson/model"
|
"github.com/anchore/syft/internal/formats/syftjson/model"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/internal/version"
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
"github.com/anchore/syft/syft/distro"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
"github.com/anchore/syft/syft/source"
|
"github.com/anchore/syft/syft/source"
|
||||||
)
|
)
|
||||||
|
|
||||||
// TODO: this is export4ed for the use of the power-user command (temp)
|
// TODO: this is exported for the use of the power-user command (temp)
|
||||||
func ToFormatModel(s sbom.SBOM, applicationConfig interface{}) model.Document {
|
func ToFormatModel(s sbom.SBOM) model.Document {
|
||||||
src, err := toSourceModel(s.Source)
|
src, err := toSourceModel(s.Source)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warnf("unable to create syft-json source object: %+v", err)
|
log.Warnf("unable to create syft-json source object: %+v", err)
|
||||||
@ -26,13 +29,11 @@ func ToFormatModel(s sbom.SBOM, applicationConfig interface{}) model.Document {
|
|||||||
return model.Document{
|
return model.Document{
|
||||||
Artifacts: toPackageModels(s.Artifacts.PackageCatalog),
|
Artifacts: toPackageModels(s.Artifacts.PackageCatalog),
|
||||||
ArtifactRelationships: toRelationshipModel(s.Relationships),
|
ArtifactRelationships: toRelationshipModel(s.Relationships),
|
||||||
|
Files: toFile(s),
|
||||||
|
Secrets: toSecrets(s.Artifacts.Secrets),
|
||||||
Source: src,
|
Source: src,
|
||||||
Distro: toDistroModel(s.Artifacts.Distro),
|
Distro: toDistroModel(s.Artifacts.Distro),
|
||||||
Descriptor: model.Descriptor{
|
Descriptor: toDescriptor(s.Descriptor),
|
||||||
Name: internal.ApplicationName,
|
|
||||||
Version: version.FromBuild().Version,
|
|
||||||
Configuration: applicationConfig,
|
|
||||||
},
|
|
||||||
Schema: model.Schema{
|
Schema: model.Schema{
|
||||||
Version: internal.JSONSchemaVersion,
|
Version: internal.JSONSchemaVersion,
|
||||||
URL: fmt.Sprintf("https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-%s.json", internal.JSONSchemaVersion),
|
URL: fmt.Sprintf("https://raw.githubusercontent.com/anchore/syft/main/schema/json/schema-%s.json", internal.JSONSchemaVersion),
|
||||||
@ -40,6 +41,93 @@ func ToFormatModel(s sbom.SBOM, applicationConfig interface{}) model.Document {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func toDescriptor(d sbom.Descriptor) model.Descriptor {
|
||||||
|
return model.Descriptor{
|
||||||
|
Name: d.Name,
|
||||||
|
Version: d.Version,
|
||||||
|
Configuration: d.Configuration,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func toSecrets(data map[source.Coordinates][]file.SearchResult) []model.Secrets {
|
||||||
|
results := make([]model.Secrets, 0)
|
||||||
|
for coordinates, secrets := range data {
|
||||||
|
results = append(results, model.Secrets{
|
||||||
|
Location: coordinates,
|
||||||
|
Secrets: secrets,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// sort by real path then virtual path to ensure the result is stable across multiple runs
|
||||||
|
sort.SliceStable(results, func(i, j int) bool {
|
||||||
|
return results[i].Location.RealPath < results[j].Location.RealPath
|
||||||
|
})
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
func toFile(s sbom.SBOM) []model.File {
|
||||||
|
results := make([]model.File, 0)
|
||||||
|
artifacts := s.Artifacts
|
||||||
|
|
||||||
|
for _, coordinates := range sbom.AllCoordinates(s) {
|
||||||
|
var metadata *source.FileMetadata
|
||||||
|
if metadataForLocation, exists := artifacts.FileMetadata[coordinates]; exists {
|
||||||
|
metadata = &metadataForLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
var digests []file.Digest
|
||||||
|
if digestsForLocation, exists := artifacts.FileDigests[coordinates]; exists {
|
||||||
|
digests = digestsForLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
var classifications []file.Classification
|
||||||
|
if classificationsForLocation, exists := artifacts.FileClassifications[coordinates]; exists {
|
||||||
|
classifications = classificationsForLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
var contents string
|
||||||
|
if contentsForLocation, exists := artifacts.FileContents[coordinates]; exists {
|
||||||
|
contents = contentsForLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
results = append(results, model.File{
|
||||||
|
ID: string(coordinates.ID()),
|
||||||
|
Location: coordinates,
|
||||||
|
Metadata: toFileMetadataEntry(coordinates, metadata),
|
||||||
|
Digests: digests,
|
||||||
|
Classifications: classifications,
|
||||||
|
Contents: contents,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// sort by real path then virtual path to ensure the result is stable across multiple runs
|
||||||
|
sort.SliceStable(results, func(i, j int) bool {
|
||||||
|
return results[i].Location.RealPath < results[j].Location.RealPath
|
||||||
|
})
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
func toFileMetadataEntry(coordinates source.Coordinates, metadata *source.FileMetadata) *model.FileMetadataEntry {
|
||||||
|
if metadata == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
mode, err := strconv.Atoi(fmt.Sprintf("%o", metadata.Mode))
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("invalid mode found in file catalog @ location=%+v mode=%q: %+v", coordinates, metadata.Mode, err)
|
||||||
|
mode = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.FileMetadataEntry{
|
||||||
|
Mode: mode,
|
||||||
|
Type: metadata.Type,
|
||||||
|
LinkDestination: metadata.LinkDestination,
|
||||||
|
UserID: metadata.UserID,
|
||||||
|
GroupID: metadata.GroupID,
|
||||||
|
MIMEType: metadata.MIMEType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func toPackageModels(catalog *pkg.Catalog) []model.Package {
|
func toPackageModels(catalog *pkg.Catalog) []model.Package {
|
||||||
artifacts := make([]model.Package, 0)
|
artifacts := make([]model.Package, 0)
|
||||||
if catalog == nil {
|
if catalog == nil {
|
||||||
|
|||||||
@ -20,10 +20,19 @@ func toSyftModel(doc model.Document) (*sbom.SBOM, error) {
|
|||||||
PackageCatalog: toSyftCatalog(doc.Artifacts),
|
PackageCatalog: toSyftCatalog(doc.Artifacts),
|
||||||
Distro: &dist,
|
Distro: &dist,
|
||||||
},
|
},
|
||||||
Source: *toSyftSourceData(doc.Source),
|
Source: *toSyftSourceData(doc.Source),
|
||||||
|
Descriptor: toSyftDescriptor(doc.Descriptor),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func toSyftDescriptor(d model.Descriptor) sbom.Descriptor {
|
||||||
|
return sbom.Descriptor{
|
||||||
|
Name: d.Name,
|
||||||
|
Version: d.Version,
|
||||||
|
Configuration: d.Configuration,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func toSyftSourceData(s model.Source) *source.Metadata {
|
func toSyftSourceData(s model.Source) *source.Metadata {
|
||||||
switch s.Type {
|
switch s.Type {
|
||||||
case "directory":
|
case "directory":
|
||||||
|
|||||||
72
internal/mimetype_helper.go
Normal file
72
internal/mimetype_helper.go
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
package internal
|
||||||
|
|
||||||
|
import "github.com/scylladb/go-set/strset"
|
||||||
|
|
||||||
|
var (
|
||||||
|
ArchiveMIMETypeSet = strset.New(
|
||||||
|
// derived from https://en.wikipedia.org/wiki/List_of_archive_formats
|
||||||
|
[]string{
|
||||||
|
// archive only
|
||||||
|
"application/x-archive",
|
||||||
|
"application/x-cpio",
|
||||||
|
"application/x-shar",
|
||||||
|
"application/x-iso9660-image",
|
||||||
|
"application/x-sbx",
|
||||||
|
"application/x-tar",
|
||||||
|
// compression only
|
||||||
|
"application/x-bzip2",
|
||||||
|
"application/gzip",
|
||||||
|
"application/x-lzip",
|
||||||
|
"application/x-lzma",
|
||||||
|
"application/x-lzop",
|
||||||
|
"application/x-snappy-framed",
|
||||||
|
"application/x-xz",
|
||||||
|
"application/x-compress",
|
||||||
|
"application/zstd",
|
||||||
|
// archiving and compression
|
||||||
|
"application/x-7z-compressed",
|
||||||
|
"application/x-ace-compressed",
|
||||||
|
"application/x-astrotite-afa",
|
||||||
|
"application/x-alz-compressed",
|
||||||
|
"application/vnd.android.package-archive",
|
||||||
|
"application/x-freearc",
|
||||||
|
"application/x-arj",
|
||||||
|
"application/x-b1",
|
||||||
|
"application/vnd.ms-cab-compressed",
|
||||||
|
"application/x-cfs-compressed",
|
||||||
|
"application/x-dar",
|
||||||
|
"application/x-dgc-compressed",
|
||||||
|
"application/x-apple-diskimage",
|
||||||
|
"application/x-gca-compressed",
|
||||||
|
"application/java-archive",
|
||||||
|
"application/x-lzh",
|
||||||
|
"application/x-lzx",
|
||||||
|
"application/x-rar-compressed",
|
||||||
|
"application/x-stuffit",
|
||||||
|
"application/x-stuffitx",
|
||||||
|
"application/x-gtar",
|
||||||
|
"application/x-ms-wim",
|
||||||
|
"application/x-xar",
|
||||||
|
"application/zip",
|
||||||
|
"application/x-zoo",
|
||||||
|
}...,
|
||||||
|
)
|
||||||
|
|
||||||
|
ExecutableMIMETypeSet = strset.New(
|
||||||
|
[]string{
|
||||||
|
"application/x-executable",
|
||||||
|
"application/x-mach-binary",
|
||||||
|
"application/x-elf",
|
||||||
|
"application/x-sharedlib",
|
||||||
|
"application/vnd.microsoft.portable-executable",
|
||||||
|
}...,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
func IsArchive(mimeType string) bool {
|
||||||
|
return ArchiveMIMETypeSet.Has(mimeType)
|
||||||
|
}
|
||||||
|
|
||||||
|
func IsExecutable(mimeType string) bool {
|
||||||
|
return ExecutableMIMETypeSet.Has(mimeType)
|
||||||
|
}
|
||||||
57
internal/mimetype_helper_test.go
Normal file
57
internal/mimetype_helper_test.go
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
package internal
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_IsArchive(t *testing.T) {
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
mimeType string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "not an archive",
|
||||||
|
mimeType: "application/vnd.unknown",
|
||||||
|
expected: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "archive",
|
||||||
|
mimeType: "application/x-rar-compressed",
|
||||||
|
expected: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range tests {
|
||||||
|
t.Run(test.name, func(t *testing.T) {
|
||||||
|
assert.Equal(t, test.expected, IsArchive(test.mimeType))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_IsExecutable(t *testing.T) {
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
mimeType string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "not an executable",
|
||||||
|
mimeType: "application/vnd.unknown",
|
||||||
|
expected: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "executable",
|
||||||
|
mimeType: "application/x-mach-binary",
|
||||||
|
expected: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range tests {
|
||||||
|
t.Run(test.name, func(t *testing.T) {
|
||||||
|
assert.Equal(t, test.expected, IsExecutable(test.mimeType))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,35 +0,0 @@
|
|||||||
package poweruser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/anchore/syft/internal/formats/syftjson"
|
|
||||||
"github.com/anchore/syft/internal/formats/syftjson/model"
|
|
||||||
"github.com/anchore/syft/syft/sbom"
|
|
||||||
)
|
|
||||||
|
|
||||||
type JSONDocument struct {
|
|
||||||
// note: poweruser.JSONDocument is meant to always be a superset of packages.JSONDocument, any additional fields
|
|
||||||
// here should be optional by supplying "omitempty" on these fields hint to the jsonschema generator to not
|
|
||||||
// require these fields. As an accepted rule in this repo all collections should still be initialized in the
|
|
||||||
// context of being used in a JSON document.
|
|
||||||
FileClassifications []JSONFileClassifications `json:"fileClassifications,omitempty"` // note: must have omitempty
|
|
||||||
FileContents []JSONFileContents `json:"fileContents,omitempty"` // note: must have omitempty
|
|
||||||
FileMetadata []JSONFileMetadata `json:"fileMetadata,omitempty"` // note: must have omitempty
|
|
||||||
Secrets []JSONSecrets `json:"secrets,omitempty"` // note: must have omitempty
|
|
||||||
model.Document
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewJSONDocument creates and populates a new JSON document struct from the given cataloging results.
|
|
||||||
func NewJSONDocument(s sbom.SBOM, appConfig interface{}) (JSONDocument, error) {
|
|
||||||
fileMetadata, err := NewJSONFileMetadata(s.Artifacts.FileMetadata, s.Artifacts.FileDigests)
|
|
||||||
if err != nil {
|
|
||||||
return JSONDocument{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return JSONDocument{
|
|
||||||
FileClassifications: NewJSONFileClassifications(s.Artifacts.FileClassifications),
|
|
||||||
FileContents: NewJSONFileContents(s.Artifacts.FileContents),
|
|
||||||
FileMetadata: fileMetadata,
|
|
||||||
Secrets: NewJSONSecrets(s.Artifacts.Secrets),
|
|
||||||
Document: syftjson.ToFormatModel(s, appConfig),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
@ -1,31 +0,0 @@
|
|||||||
package poweruser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sort"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/file"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
|
||||||
|
|
||||||
type JSONFileClassifications struct {
|
|
||||||
Location source.Coordinates `json:"location"`
|
|
||||||
Classification file.Classification `json:"classification"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewJSONFileClassifications(data map[source.Coordinates][]file.Classification) []JSONFileClassifications {
|
|
||||||
results := make([]JSONFileClassifications, 0)
|
|
||||||
for coordinates, classifications := range data {
|
|
||||||
for _, classification := range classifications {
|
|
||||||
results = append(results, JSONFileClassifications{
|
|
||||||
Location: coordinates,
|
|
||||||
Classification: classification,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// sort by real path then virtual path to ensure the result is stable across multiple runs
|
|
||||||
sort.SliceStable(results, func(i, j int) bool {
|
|
||||||
return results[i].Location.RealPath < results[j].Location.RealPath
|
|
||||||
})
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
package poweruser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sort"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
|
||||||
|
|
||||||
type JSONFileContents struct {
|
|
||||||
Location source.Coordinates `json:"location"`
|
|
||||||
Contents string `json:"contents"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewJSONFileContents(data map[source.Coordinates]string) []JSONFileContents {
|
|
||||||
results := make([]JSONFileContents, 0)
|
|
||||||
for coordinates, contents := range data {
|
|
||||||
results = append(results, JSONFileContents{
|
|
||||||
Location: coordinates,
|
|
||||||
Contents: contents,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// sort by real path then virtual path to ensure the result is stable across multiple runs
|
|
||||||
sort.SliceStable(results, func(i, j int) bool {
|
|
||||||
return results[i].Location.RealPath < results[j].Location.RealPath
|
|
||||||
})
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
@ -1,60 +0,0 @@
|
|||||||
package poweruser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"sort"
|
|
||||||
"strconv"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/file"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
|
||||||
|
|
||||||
type JSONFileMetadata struct {
|
|
||||||
Location source.Coordinates `json:"location"`
|
|
||||||
Metadata JSONFileMetadataEntry `json:"metadata"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type JSONFileMetadataEntry struct {
|
|
||||||
Mode int `json:"mode"`
|
|
||||||
Type source.FileType `json:"type"`
|
|
||||||
LinkDestination string `json:"linkDestination,omitempty"`
|
|
||||||
UserID int `json:"userID"`
|
|
||||||
GroupID int `json:"groupID"`
|
|
||||||
Digests []file.Digest `json:"digests,omitempty"`
|
|
||||||
MIMEType string `json:"mimeType"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewJSONFileMetadata(data map[source.Coordinates]source.FileMetadata, digests map[source.Coordinates][]file.Digest) ([]JSONFileMetadata, error) {
|
|
||||||
results := make([]JSONFileMetadata, 0)
|
|
||||||
for coordinates, metadata := range data {
|
|
||||||
mode, err := strconv.Atoi(fmt.Sprintf("%o", metadata.Mode))
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("invalid mode found in file catalog @ location=%+v mode=%q: %w", coordinates, metadata.Mode, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var digestResults []file.Digest
|
|
||||||
if digestsForLocation, exists := digests[coordinates]; exists {
|
|
||||||
digestResults = digestsForLocation
|
|
||||||
}
|
|
||||||
|
|
||||||
results = append(results, JSONFileMetadata{
|
|
||||||
Location: coordinates,
|
|
||||||
Metadata: JSONFileMetadataEntry{
|
|
||||||
Mode: mode,
|
|
||||||
Type: metadata.Type,
|
|
||||||
LinkDestination: metadata.LinkDestination,
|
|
||||||
UserID: metadata.UserID,
|
|
||||||
GroupID: metadata.GroupID,
|
|
||||||
Digests: digestResults,
|
|
||||||
MIMEType: metadata.MIMEType,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// sort by real path then virtual path to ensure the result is stable across multiple runs
|
|
||||||
sort.SliceStable(results, func(i, j int) bool {
|
|
||||||
return results[i].Location.RealPath < results[j].Location.RealPath
|
|
||||||
})
|
|
||||||
return results, nil
|
|
||||||
}
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
package poweruser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"io"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/sbom"
|
|
||||||
)
|
|
||||||
|
|
||||||
// JSONPresenter is a JSON presentation object for the syft results
|
|
||||||
type JSONPresenter struct {
|
|
||||||
sbom sbom.SBOM
|
|
||||||
config interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewJSONPresenter creates a new JSON presenter object for the given cataloging results.
|
|
||||||
func NewJSONPresenter(s sbom.SBOM, appConfig interface{}) *JSONPresenter {
|
|
||||||
return &JSONPresenter{
|
|
||||||
sbom: s,
|
|
||||||
config: appConfig,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Present the PackageCatalog results to the given writer.
|
|
||||||
func (p *JSONPresenter) Present(output io.Writer) error {
|
|
||||||
doc, err := NewJSONDocument(p.sbom, p.config)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
enc := json.NewEncoder(output)
|
|
||||||
// prevent > and < from being escaped in the payload
|
|
||||||
enc.SetEscapeHTML(false)
|
|
||||||
enc.SetIndent("", " ")
|
|
||||||
return enc.Encode(&doc)
|
|
||||||
}
|
|
||||||
@ -1,189 +0,0 @@
|
|||||||
package poweruser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"flag"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/sbom"
|
|
||||||
|
|
||||||
"github.com/sergi/go-diff/diffmatchpatch"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/file"
|
|
||||||
|
|
||||||
"github.com/anchore/go-testutils"
|
|
||||||
"github.com/anchore/syft/internal/config"
|
|
||||||
"github.com/anchore/syft/syft/distro"
|
|
||||||
"github.com/anchore/syft/syft/pkg"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
|
||||||
|
|
||||||
var updateJSONGoldenFiles = flag.Bool("update-json", false, "update the *.golden files for json presenters")
|
|
||||||
|
|
||||||
func must(c pkg.CPE, e error) pkg.CPE {
|
|
||||||
if e != nil {
|
|
||||||
panic(e)
|
|
||||||
}
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestJSONPresenter(t *testing.T) {
|
|
||||||
var buffer bytes.Buffer
|
|
||||||
|
|
||||||
catalog := pkg.NewCatalog()
|
|
||||||
|
|
||||||
catalog.Add(pkg.Package{
|
|
||||||
Name: "package-1",
|
|
||||||
Version: "1.0.1",
|
|
||||||
Locations: []source.Location{
|
|
||||||
{
|
|
||||||
Coordinates: source.Coordinates{
|
|
||||||
RealPath: "/a/place/a",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Type: pkg.PythonPkg,
|
|
||||||
FoundBy: "the-cataloger-1",
|
|
||||||
Language: pkg.Python,
|
|
||||||
MetadataType: pkg.PythonPackageMetadataType,
|
|
||||||
Licenses: []string{"MIT"},
|
|
||||||
Metadata: pkg.PythonPackageMetadata{
|
|
||||||
Name: "package-1",
|
|
||||||
Version: "1.0.1",
|
|
||||||
Files: []pkg.PythonFileRecord{},
|
|
||||||
},
|
|
||||||
PURL: "a-purl-1",
|
|
||||||
CPEs: []pkg.CPE{
|
|
||||||
must(pkg.NewCPE("cpe:2.3:*:some:package:1:*:*:*:*:*:*:*")),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
catalog.Add(pkg.Package{
|
|
||||||
Name: "package-2",
|
|
||||||
Version: "2.0.1",
|
|
||||||
Locations: []source.Location{
|
|
||||||
{
|
|
||||||
Coordinates: source.Coordinates{
|
|
||||||
RealPath: "/b/place/b",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Type: pkg.DebPkg,
|
|
||||||
FoundBy: "the-cataloger-2",
|
|
||||||
MetadataType: pkg.DpkgMetadataType,
|
|
||||||
Metadata: pkg.DpkgMetadata{
|
|
||||||
Package: "package-2",
|
|
||||||
Version: "2.0.1",
|
|
||||||
Files: []pkg.DpkgFileRecord{},
|
|
||||||
},
|
|
||||||
PURL: "a-purl-2",
|
|
||||||
CPEs: []pkg.CPE{
|
|
||||||
must(pkg.NewCPE("cpe:2.3:*:some:package:2:*:*:*:*:*:*:*")),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
appConfig := config.Application{
|
|
||||||
FileMetadata: config.FileMetadata{
|
|
||||||
Digests: []string{"sha256"},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
cfg := sbom.SBOM{
|
|
||||||
Artifacts: sbom.Artifacts{
|
|
||||||
PackageCatalog: catalog,
|
|
||||||
FileMetadata: map[source.Coordinates]source.FileMetadata{
|
|
||||||
source.NewLocation("/a/place").Coordinates: {
|
|
||||||
Mode: 0775,
|
|
||||||
Type: "directory",
|
|
||||||
UserID: 0,
|
|
||||||
GroupID: 0,
|
|
||||||
},
|
|
||||||
source.NewLocation("/a/place/a").Coordinates: {
|
|
||||||
Mode: 0775,
|
|
||||||
Type: "regularFile",
|
|
||||||
UserID: 0,
|
|
||||||
GroupID: 0,
|
|
||||||
},
|
|
||||||
source.NewLocation("/b").Coordinates: {
|
|
||||||
Mode: 0775,
|
|
||||||
Type: "symbolicLink",
|
|
||||||
LinkDestination: "/c",
|
|
||||||
UserID: 0,
|
|
||||||
GroupID: 0,
|
|
||||||
},
|
|
||||||
source.NewLocation("/b/place/b").Coordinates: {
|
|
||||||
Mode: 0644,
|
|
||||||
Type: "regularFile",
|
|
||||||
UserID: 1,
|
|
||||||
GroupID: 2,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
FileDigests: map[source.Coordinates][]file.Digest{
|
|
||||||
source.NewLocation("/a/place/a").Coordinates: {
|
|
||||||
{
|
|
||||||
Algorithm: "sha256",
|
|
||||||
Value: "366a3f5653e34673b875891b021647440d0127c2ef041e3b1a22da2a7d4f3703",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
source.NewLocation("/b/place/b").Coordinates: {
|
|
||||||
{
|
|
||||||
Algorithm: "sha256",
|
|
||||||
Value: "1b3722da2a7d90d033b87581a2a3f12021647445653e34666ef041e3b4f3707c",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
FileContents: map[source.Coordinates]string{
|
|
||||||
source.NewLocation("/a/place/a").Coordinates: "the-contents",
|
|
||||||
},
|
|
||||||
Distro: &distro.Distro{
|
|
||||||
Type: distro.RedHat,
|
|
||||||
RawVersion: "7",
|
|
||||||
IDLike: "rhel",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Source: source.Metadata{
|
|
||||||
Scheme: source.ImageScheme,
|
|
||||||
ImageMetadata: source.ImageMetadata{
|
|
||||||
UserInput: "user-image-input",
|
|
||||||
ID: "sha256:c2b46b4eb06296933b7cf0722683964e9ecbd93265b9ef6ae9642e3952afbba0",
|
|
||||||
ManifestDigest: "sha256:2731251dc34951c0e50fcc643b4c5f74922dad1a5d98f302b504cf46cd5d9368",
|
|
||||||
MediaType: "application/vnd.docker.distribution.manifest.v2+json",
|
|
||||||
Tags: []string{
|
|
||||||
"stereoscope-fixture-image-simple:85066c51088bdd274f7a89e99e00490f666c49e72ffc955707cd6e18f0e22c5b",
|
|
||||||
},
|
|
||||||
Size: 38,
|
|
||||||
Layers: []source.LayerMetadata{
|
|
||||||
{
|
|
||||||
MediaType: "application/vnd.docker.image.rootfs.diff.tar.gzip",
|
|
||||||
Digest: "sha256:3de16c5b8659a2e8d888b8ded8427be7a5686a3c8c4e4dd30de20f362827285b",
|
|
||||||
Size: 22,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
MediaType: "application/vnd.docker.image.rootfs.diff.tar.gzip",
|
|
||||||
Digest: "sha256:366a3f5653e34673b875891b021647440d0127c2ef041e3b1a22da2a7d4f3703",
|
|
||||||
Size: 16,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
RawManifest: []byte("eyJzY2hlbWFWZXJzaW9uIjoyLCJtZWRpYVR5cGUiOiJh..."),
|
|
||||||
RawConfig: []byte("eyJhcmNoaXRlY3R1cmUiOiJhbWQ2NCIsImNvbmZp..."),
|
|
||||||
RepoDigests: []string{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := NewJSONPresenter(cfg, appConfig).Present(&buffer); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
actual := buffer.Bytes()
|
|
||||||
|
|
||||||
if *updateJSONGoldenFiles {
|
|
||||||
testutils.UpdateGoldenFileContents(t, actual)
|
|
||||||
}
|
|
||||||
|
|
||||||
expected := testutils.GetGoldenFileContents(t)
|
|
||||||
|
|
||||||
if !bytes.Equal(expected, actual) {
|
|
||||||
dmp := diffmatchpatch.New()
|
|
||||||
diffs := dmp.DiffMain(string(expected), string(actual), true)
|
|
||||||
t.Errorf("mismatched output:\n%s", dmp.DiffPrettyText(diffs))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
package poweruser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sort"
|
|
||||||
|
|
||||||
"github.com/anchore/syft/syft/file"
|
|
||||||
"github.com/anchore/syft/syft/source"
|
|
||||||
)
|
|
||||||
|
|
||||||
type JSONSecrets struct {
|
|
||||||
Location source.Coordinates `json:"location"`
|
|
||||||
Secrets []file.SearchResult `json:"secrets"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewJSONSecrets(data map[source.Coordinates][]file.SearchResult) []JSONSecrets {
|
|
||||||
results := make([]JSONSecrets, 0)
|
|
||||||
for coordinates, secrets := range data {
|
|
||||||
results = append(results, JSONSecrets{
|
|
||||||
Location: coordinates,
|
|
||||||
Secrets: secrets,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// sort by real path then virtual path to ensure the result is stable across multiple runs
|
|
||||||
sort.SliceStable(results, func(i, j int) bool {
|
|
||||||
return results[i].Location.RealPath < results[j].Location.RealPath
|
|
||||||
})
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
@ -12,7 +12,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alecthomas/jsonschema"
|
"github.com/alecthomas/jsonschema"
|
||||||
"github.com/anchore/syft/internal"
|
"github.com/anchore/syft/internal"
|
||||||
"github.com/anchore/syft/internal/presenter/poweruser"
|
syftjsonModel "github.com/anchore/syft/internal/formats/syftjson/model"
|
||||||
"github.com/anchore/syft/syft/pkg"
|
"github.com/anchore/syft/syft/pkg"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -48,7 +48,7 @@ func build() *jsonschema.Schema {
|
|||||||
return strings.TrimPrefix(r.Name(), "JSON")
|
return strings.TrimPrefix(r.Name(), "JSON")
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
documentSchema := reflector.ReflectFromType(reflect.TypeOf(&poweruser.JSONDocument{}))
|
documentSchema := reflector.ReflectFromType(reflect.TypeOf(&syftjsonModel.Document{}))
|
||||||
metadataSchema := reflector.ReflectFromType(reflect.TypeOf(&artifactMetadataContainer{}))
|
metadataSchema := reflector.ReflectFromType(reflect.TypeOf(&artifactMetadataContainer{}))
|
||||||
|
|
||||||
// TODO: inject source definitions
|
// TODO: inject source definitions
|
||||||
|
|||||||
1002
schema/json/schema-2.0.0.json
Normal file
1002
schema/json/schema-2.0.0.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,17 +1,21 @@
|
|||||||
package artifact
|
package artifact
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// OwnershipByFileOverlapRelationship indicates that the parent package claims ownership of a child package since
|
// OwnershipByFileOverlapRelationship (supports package-to-package linkages) indicates that the parent package
|
||||||
// the parent metadata indicates overlap with a location that a cataloger found the child package by. This is
|
// claims ownership of a child package since the parent metadata indicates overlap with a location that a
|
||||||
// by definition a package-to-package relationship and is created only after all package cataloging has been completed.
|
// cataloger found the child package by. This relationship must be created only after all package cataloging
|
||||||
|
// has been completed.
|
||||||
OwnershipByFileOverlapRelationship RelationshipType = "ownership-by-file-overlap"
|
OwnershipByFileOverlapRelationship RelationshipType = "ownership-by-file-overlap"
|
||||||
|
|
||||||
|
// ContainsRelationship (supports any-to-any linkages) is a proxy for the SPDX 2.2 CONTAINS relationship.
|
||||||
|
ContainsRelationship RelationshipType = "contains"
|
||||||
)
|
)
|
||||||
|
|
||||||
type RelationshipType string
|
type RelationshipType string
|
||||||
|
|
||||||
type Relationship struct {
|
type Relationship struct {
|
||||||
From Identifiable `json:"from"`
|
From Identifiable
|
||||||
To Identifiable `json:"to"`
|
To Identifiable
|
||||||
Type RelationshipType `json:"type"`
|
Type RelationshipType
|
||||||
Data interface{} `json:"data,omitempty"`
|
Data interface{}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,8 @@
|
|||||||
package cataloger
|
package cataloger
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/bus"
|
"github.com/anchore/syft/internal/bus"
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
@ -67,6 +69,14 @@ func Catalog(resolver source.FileResolver, theDistro *distro.Distro, catalogers
|
|||||||
// generate PURL
|
// generate PURL
|
||||||
p.PURL = generatePackageURL(p, theDistro)
|
p.PURL = generatePackageURL(p, theDistro)
|
||||||
|
|
||||||
|
// create file-to-package relationships for files owned by the package
|
||||||
|
owningRelationships, err := packageFileOwnershipRelationships(p, resolver)
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("unable to create any package-file relationships for package name=%q: %w", p.Name, err)
|
||||||
|
} else {
|
||||||
|
allRelationships = append(allRelationships, owningRelationships...)
|
||||||
|
}
|
||||||
|
|
||||||
// add to catalog
|
// add to catalog
|
||||||
catalog.Add(p)
|
catalog.Add(p)
|
||||||
}
|
}
|
||||||
@ -85,3 +95,35 @@ func Catalog(resolver source.FileResolver, theDistro *distro.Distro, catalogers
|
|||||||
|
|
||||||
return catalog, allRelationships, nil
|
return catalog, allRelationships, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func packageFileOwnershipRelationships(p pkg.Package, resolver source.FilePathResolver) ([]artifact.Relationship, error) {
|
||||||
|
fileOwner, ok := p.Metadata.(pkg.FileOwner)
|
||||||
|
if !ok {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var relationships []artifact.Relationship
|
||||||
|
|
||||||
|
for _, path := range fileOwner.OwnedFiles() {
|
||||||
|
locations, err := resolver.FilesByPath(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("unable to find path for path=%q: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(locations) == 0 {
|
||||||
|
// TODO: this is a known-unknown that could later be persisted in the SBOM (or as a validation failure)
|
||||||
|
log.Warnf("unable to find location which a package claims ownership of: %s", path)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, l := range locations {
|
||||||
|
relationships = append(relationships, artifact.Relationship{
|
||||||
|
From: p,
|
||||||
|
To: l.Coordinates,
|
||||||
|
Type: artifact.ContainsRelationship,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return relationships, nil
|
||||||
|
}
|
||||||
|
|||||||
@ -16,15 +16,6 @@ import (
|
|||||||
|
|
||||||
const catalogerName = "go-module-binary-cataloger"
|
const catalogerName = "go-module-binary-cataloger"
|
||||||
|
|
||||||
// current mime types to search by to discover go binaries
|
|
||||||
var mimeTypes = []string{
|
|
||||||
"application/x-executable",
|
|
||||||
"application/x-mach-binary",
|
|
||||||
"application/x-elf",
|
|
||||||
"application/x-sharedlib",
|
|
||||||
"application/vnd.microsoft.portable-executable",
|
|
||||||
}
|
|
||||||
|
|
||||||
type Cataloger struct{}
|
type Cataloger struct{}
|
||||||
|
|
||||||
// NewGoModuleBinaryCataloger returns a new Golang cataloger object.
|
// NewGoModuleBinaryCataloger returns a new Golang cataloger object.
|
||||||
@ -41,7 +32,7 @@ func (c *Cataloger) Name() string {
|
|||||||
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||||
var pkgs []pkg.Package
|
var pkgs []pkg.Package
|
||||||
|
|
||||||
fileMatches, err := resolver.FilesByMIMEType(mimeTypes...)
|
fileMatches, err := resolver.FilesByMIMEType(internal.ExecutableMIMETypeSet.List()...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return pkgs, nil, fmt.Errorf("failed to find bin by mime types: %w", err)
|
return pkgs, nil, fmt.Errorf("failed to find bin by mime types: %w", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -12,6 +12,7 @@ type SBOM struct {
|
|||||||
Artifacts Artifacts
|
Artifacts Artifacts
|
||||||
Relationships []artifact.Relationship
|
Relationships []artifact.Relationship
|
||||||
Source source.Metadata
|
Source source.Metadata
|
||||||
|
Descriptor Descriptor
|
||||||
}
|
}
|
||||||
|
|
||||||
type Artifacts struct {
|
type Artifacts struct {
|
||||||
@ -23,3 +24,43 @@ type Artifacts struct {
|
|||||||
Secrets map[source.Coordinates][]file.SearchResult
|
Secrets map[source.Coordinates][]file.SearchResult
|
||||||
Distro *distro.Distro
|
Distro *distro.Distro
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Descriptor struct {
|
||||||
|
Name string
|
||||||
|
Version string
|
||||||
|
Configuration interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func AllCoordinates(sbom SBOM) []source.Coordinates {
|
||||||
|
set := source.NewCoordinateSet()
|
||||||
|
for coordinates := range sbom.Artifacts.FileMetadata {
|
||||||
|
set.Add(coordinates)
|
||||||
|
}
|
||||||
|
for coordinates := range sbom.Artifacts.FileContents {
|
||||||
|
set.Add(coordinates)
|
||||||
|
}
|
||||||
|
for coordinates := range sbom.Artifacts.FileClassifications {
|
||||||
|
set.Add(coordinates)
|
||||||
|
}
|
||||||
|
for coordinates := range sbom.Artifacts.FileDigests {
|
||||||
|
set.Add(coordinates)
|
||||||
|
}
|
||||||
|
for _, relationship := range sbom.Relationships {
|
||||||
|
for _, coordinates := range extractCoordinates(relationship) {
|
||||||
|
set.Add(coordinates)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return set.ToSlice()
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractCoordinates(relationship artifact.Relationship) (results []source.Coordinates) {
|
||||||
|
if coordinates, exists := relationship.From.(source.Coordinates); exists {
|
||||||
|
results = append(results, coordinates)
|
||||||
|
}
|
||||||
|
|
||||||
|
if coordinates, exists := relationship.To.(source.Coordinates); exists {
|
||||||
|
results = append(results, coordinates)
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package source
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
|
||||||
"github.com/anchore/syft/internal/log"
|
"github.com/anchore/syft/internal/log"
|
||||||
"github.com/anchore/syft/syft/artifact"
|
"github.com/anchore/syft/syft/artifact"
|
||||||
@ -13,6 +14,18 @@ type Coordinates struct {
|
|||||||
FileSystemID string `json:"layerID,omitempty"` // An ID representing the filesystem. For container images, this is a layer digest. For directories or a root filesystem, this is blank.
|
FileSystemID string `json:"layerID,omitempty"` // An ID representing the filesystem. For container images, this is a layer digest. For directories or a root filesystem, this is blank.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CoordinateSet represents a set of string types.
|
||||||
|
type CoordinateSet map[Coordinates]struct{}
|
||||||
|
|
||||||
|
// NewCoordinateSet creates a CoordinateSet populated with values from the given slice.
|
||||||
|
func NewCoordinateSet(start ...Coordinates) CoordinateSet {
|
||||||
|
ret := make(CoordinateSet)
|
||||||
|
for _, s := range start {
|
||||||
|
ret.Add(s)
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
func (c Coordinates) ID() artifact.ID {
|
func (c Coordinates) ID() artifact.ID {
|
||||||
f, err := artifact.IDFromHash(c)
|
f, err := artifact.IDFromHash(c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -32,3 +45,37 @@ func (c Coordinates) String() string {
|
|||||||
}
|
}
|
||||||
return fmt.Sprintf("Location<%s>", str)
|
return fmt.Sprintf("Location<%s>", str)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add a string to the set.
|
||||||
|
func (s CoordinateSet) Add(i Coordinates) {
|
||||||
|
s[i] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove a string from the set.
|
||||||
|
func (s CoordinateSet) Remove(i Coordinates) {
|
||||||
|
delete(s, i)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Contains indicates if the given string is contained within the set.
|
||||||
|
func (s CoordinateSet) Contains(i Coordinates) bool {
|
||||||
|
_, ok := s[i]
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToSlice returns a sorted slice of Locations that are contained within the set.
|
||||||
|
func (s CoordinateSet) ToSlice() []Coordinates {
|
||||||
|
ret := make([]Coordinates, len(s))
|
||||||
|
idx := 0
|
||||||
|
for v := range s {
|
||||||
|
ret[idx] = v
|
||||||
|
idx++
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.SliceStable(ret, func(i, j int) bool {
|
||||||
|
if ret[i].RealPath == ret[j].RealPath {
|
||||||
|
return ret[i].FileSystemID < ret[j].FileSystemID
|
||||||
|
}
|
||||||
|
return ret[i].RealPath < ret[j].RealPath
|
||||||
|
})
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|||||||
51
syft/source/coordinates_test.go
Normal file
51
syft/source/coordinates_test.go
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
package source
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCoordinateSet(t *testing.T) {
|
||||||
|
|
||||||
|
binA := Coordinates{
|
||||||
|
RealPath: "/bin",
|
||||||
|
FileSystemID: "a",
|
||||||
|
}
|
||||||
|
|
||||||
|
binB := Coordinates{
|
||||||
|
RealPath: "/bin",
|
||||||
|
FileSystemID: "b",
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input []Coordinates
|
||||||
|
expected []Coordinates
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "de-dup same location",
|
||||||
|
input: []Coordinates{
|
||||||
|
binA, binA, binA,
|
||||||
|
},
|
||||||
|
expected: []Coordinates{
|
||||||
|
binA,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "dont de-dup different filesystem",
|
||||||
|
input: []Coordinates{
|
||||||
|
binB, binA,
|
||||||
|
},
|
||||||
|
expected: []Coordinates{
|
||||||
|
binA, binB,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
t.Run(test.name, func(t *testing.T) {
|
||||||
|
assert.Equal(t, test.expected, NewCoordinateSet(test.input...).ToSlice())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -32,6 +32,15 @@ func catalogFixtureImage(t *testing.T, fixtureImageName string) (sbom.SBOM, *sou
|
|||||||
},
|
},
|
||||||
Relationships: relationships,
|
Relationships: relationships,
|
||||||
Source: theSource.Metadata,
|
Source: theSource.Metadata,
|
||||||
|
Descriptor: sbom.Descriptor{
|
||||||
|
Name: "syft",
|
||||||
|
Version: "v0.42.0-bogus",
|
||||||
|
// the application configuration should be persisted here, however, we do not want to import
|
||||||
|
// the application configuration in this package (it's reserved only for ingestion by the cmd package)
|
||||||
|
Configuration: map[string]string{
|
||||||
|
"config-key": "config-value",
|
||||||
|
},
|
||||||
|
},
|
||||||
}, theSource
|
}, theSource
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user