internalize format helpers (#2543)

Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
This commit is contained in:
Alex Goodman 2024-01-26 12:16:26 -05:00 committed by GitHub
parent b6cbf82389
commit f893933336
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
60 changed files with 265 additions and 267 deletions

View File

@ -1,4 +1,4 @@
package internal package mimetype
import "github.com/scylladb/go-set/strset" import "github.com/scylladb/go-set/strset"

View File

@ -1,4 +1,4 @@
package internal package mimetype
import ( import (
"testing" "testing"

View File

@ -11,6 +11,7 @@ import (
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/cpe" "github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/format/internal/cyclonedxutil/helpers"
"github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/linux"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
@ -29,7 +30,7 @@ func ToFormatModel(s sbom.SBOM) *cyclonedx.BOM {
packages := s.Artifacts.Packages.Sorted() packages := s.Artifacts.Packages.Sorted()
components := make([]cyclonedx.Component, len(packages)) components := make([]cyclonedx.Component, len(packages))
for i, p := range packages { for i, p := range packages {
components[i] = encodeComponent(p) components[i] = helpers.EncodeComponent(p)
} }
components = append(components, toOSComponent(s.Artifacts.LinuxDistribution)...) components = append(components, toOSComponent(s.Artifacts.LinuxDistribution)...)
cdxBOM.Components = &components cdxBOM.Components = &components
@ -76,7 +77,7 @@ func toOSComponent(distro *linux.Release) []cyclonedx.Component {
if len(*eRefs) == 0 { if len(*eRefs) == 0 {
eRefs = nil eRefs = nil
} }
props := encodeProperties(distro, "syft:distro") props := helpers.EncodeProperties(distro, "syft:distro")
var properties *[]cyclonedx.Property var properties *[]cyclonedx.Property
if len(props) > 0 { if len(props) > 0 {
properties = &props properties = &props
@ -165,7 +166,7 @@ func toDependencies(relationships []artifact.Relationship) []cyclonedx.Dependenc
continue continue
} }
toRef := deriveBomRef(toPkg) toRef := helpers.DeriveBomRef(toPkg)
dep := dependencies[toRef] dep := dependencies[toRef]
if dep == nil { if dep == nil {
dep = &cyclonedx.Dependency{ dep = &cyclonedx.Dependency{
@ -175,7 +176,7 @@ func toDependencies(relationships []artifact.Relationship) []cyclonedx.Dependenc
dependencies[toRef] = dep dependencies[toRef] = dep
} }
fromRef := deriveBomRef(fromPkg) fromRef := helpers.DeriveBomRef(fromPkg)
if !slices.Contains(*dep.Dependencies, fromRef) { if !slices.Contains(*dep.Dependencies, fromRef) {
*dep.Dependencies = append(*dep.Dependencies, fromRef) *dep.Dependencies = append(*dep.Dependencies, fromRef)
} }
@ -197,7 +198,7 @@ func toDependencies(relationships []artifact.Relationship) []cyclonedx.Dependenc
func toBomProperties(srcMetadata source.Description) *[]cyclonedx.Property { func toBomProperties(srcMetadata source.Description) *[]cyclonedx.Property {
metadata, ok := srcMetadata.Metadata.(source.StereoscopeImageSourceMetadata) metadata, ok := srcMetadata.Metadata.(source.StereoscopeImageSourceMetadata)
if ok { if ok {
props := encodeProperties(metadata.Labels, "syft:image:labels") props := helpers.EncodeProperties(metadata.Labels, "syft:image:labels")
return &props return &props
} }
return nil return nil

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/format/internal/cyclonedxutil/helpers"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
@ -95,16 +96,16 @@ func Test_relationships(t *testing.T) {
}, },
expected: &[]cyclonedx.Dependency{ expected: &[]cyclonedx.Dependency{
{ {
Ref: deriveBomRef(p1), Ref: helpers.DeriveBomRef(p1),
Dependencies: &[]string{ Dependencies: &[]string{
deriveBomRef(p2), helpers.DeriveBomRef(p2),
deriveBomRef(p3), helpers.DeriveBomRef(p3),
}, },
}, },
{ {
Ref: deriveBomRef(p2), Ref: helpers.DeriveBomRef(p2),
Dependencies: &[]string{ Dependencies: &[]string{
deriveBomRef(p4), helpers.DeriveBomRef(p4),
}, },
}, },
}, },

View File

@ -14,12 +14,12 @@ import (
"github.com/spdx/tools-golang/spdx" "github.com/spdx/tools-golang/spdx"
"github.com/anchore/packageurl-go" "github.com/anchore/packageurl-go"
"github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/mimetype"
"github.com/anchore/syft/internal/spdxlicense" "github.com/anchore/syft/internal/spdxlicense"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/format/common/util" "github.com/anchore/syft/syft/format/internal/spdxutil/helpers"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
@ -43,7 +43,7 @@ const (
// //
//nolint:funlen //nolint:funlen
func ToFormatModel(s sbom.SBOM) *spdx.Document { func ToFormatModel(s sbom.SBOM) *spdx.Document {
name, namespace := DocumentNameAndNamespace(s.Source, s.Descriptor) name, namespace := helpers.DocumentNameAndNamespace(s.Source, s.Descriptor)
packages := toPackages(s.Artifacts.Packages, s) packages := toPackages(s.Artifacts.Packages, s)
@ -68,7 +68,7 @@ func ToFormatModel(s sbom.SBOM) *spdx.Document {
RefA: spdx.DocElementID{ RefA: spdx.DocElementID{
ElementRefID: "DOCUMENT", ElementRefID: "DOCUMENT",
}, },
Relationship: string(DescribesRelationship), Relationship: string(helpers.DescribesRelationship),
RefB: spdx.DocElementID{ RefB: spdx.DocElementID{
ElementRefID: describesID, ElementRefID: describesID,
}, },
@ -161,7 +161,7 @@ func toRootRelationships(rootPackage *spdx.Package, packages []*spdx.Package) (o
RefA: spdx.DocElementID{ RefA: spdx.DocElementID{
ElementRefID: rootPackage.PackageSPDXIdentifier, ElementRefID: rootPackage.PackageSPDXIdentifier,
}, },
Relationship: string(ContainsRelationship), Relationship: string(helpers.ContainsRelationship),
RefB: spdx.DocElementID{ RefB: spdx.DocElementID{
ElementRefID: p.PackageSPDXIdentifier, ElementRefID: p.PackageSPDXIdentifier,
}, },
@ -236,22 +236,22 @@ func toRootPackage(s source.Description) *spdx.Package {
p := &spdx.Package{ p := &spdx.Package{
PackageName: name, PackageName: name,
PackageSPDXIdentifier: spdx.ElementID(SanitizeElementID(fmt.Sprintf("DocumentRoot-%s-%s", prefix, name))), PackageSPDXIdentifier: spdx.ElementID(helpers.SanitizeElementID(fmt.Sprintf("DocumentRoot-%s-%s", prefix, name))),
PackageVersion: version, PackageVersion: version,
PackageChecksums: checksums, PackageChecksums: checksums,
PackageExternalReferences: nil, PackageExternalReferences: nil,
PrimaryPackagePurpose: purpose, PrimaryPackagePurpose: purpose,
PackageSupplier: &spdx.Supplier{ PackageSupplier: &spdx.Supplier{
Supplier: NOASSERTION, Supplier: helpers.NOASSERTION,
}, },
PackageDownloadLocation: NOASSERTION, PackageDownloadLocation: helpers.NOASSERTION,
} }
if purl != nil { if purl != nil {
p.PackageExternalReferences = []*spdx.PackageExternalReference{ p.PackageExternalReferences = []*spdx.PackageExternalReference{
{ {
Category: string(PackageManagerReferenceCategory), Category: string(helpers.PackageManagerReferenceCategory),
RefType: string(PurlExternalRefType), RefType: string(helpers.PurlExternalRefType),
Locator: purl.String(), Locator: purl.String(),
}, },
} }
@ -294,7 +294,7 @@ func toSPDXID(identifiable artifact.Identifiable) spdx.ElementID {
id = string(identifiable.ID()) id = string(identifiable.ID())
} }
// NOTE: the spdx library prepend SPDXRef-, so we don't do it here // NOTE: the spdx library prepend SPDXRef-, so we don't do it here
return spdx.ElementID(SanitizeElementID(id)) return spdx.ElementID(helpers.SanitizeElementID(id))
} }
// packages populates all Package Information from the package Collection (see https://spdx.github.io/spdx-spec/3-package-information/) // packages populates all Package Information from the package Collection (see https://spdx.github.io/spdx-spec/3-package-information/)
@ -309,7 +309,7 @@ func toPackages(catalog *pkg.Collection, sbom sbom.SBOM) (results []*spdx.Packag
// in the Comments on License field (section 7.16). With respect to NOASSERTION, a written explanation in // in the Comments on License field (section 7.16). With respect to NOASSERTION, a written explanation in
// the Comments on License field (section 7.16) is preferred. // the Comments on License field (section 7.16) is preferred.
// extract these correctly to the spdx license format // extract these correctly to the spdx license format
concluded, declared := License(p) concluded, declared := helpers.License(p)
// two ways to get filesAnalyzed == true: // two ways to get filesAnalyzed == true:
// 1. syft has generated a sha1 digest for the package itself - usually in the java cataloger // 1. syft has generated a sha1 digest for the package itself - usually in the java cataloger
@ -370,7 +370,7 @@ func toPackages(catalog *pkg.Collection, sbom sbom.SBOM) (results []*spdx.Packag
// (i) the SPDX file creator has attempted to but cannot reach a reasonable objective determination; // (i) the SPDX file creator has attempted to but cannot reach a reasonable objective determination;
// (ii) the SPDX file creator has made no attempt to determine this field; or // (ii) the SPDX file creator has made no attempt to determine this field; or
// (iii) the SPDX file creator has intentionally provided no information (no meaning should be implied by doing so). // (iii) the SPDX file creator has intentionally provided no information (no meaning should be implied by doing so).
PackageDownloadLocation: DownloadLocation(p), PackageDownloadLocation: helpers.DownloadLocation(p),
// 7.8: FilesAnalyzed // 7.8: FilesAnalyzed
// Cardinality: optional, one; default value is "true" if omitted // Cardinality: optional, one; default value is "true" if omitted
@ -403,11 +403,11 @@ func toPackages(catalog *pkg.Collection, sbom sbom.SBOM) (results []*spdx.Packag
// 7.11: Package Home Page // 7.11: Package Home Page
// Cardinality: optional, one // Cardinality: optional, one
PackageHomePage: Homepage(p), PackageHomePage: helpers.Homepage(p),
// 7.12: Source Information // 7.12: Source Information
// Cardinality: optional, one // Cardinality: optional, one
PackageSourceInfo: SourceInfo(p), PackageSourceInfo: helpers.SourceInfo(p),
// 7.13: Concluded License: SPDX License Expression, "NONE" or "NOASSERTION" // 7.13: Concluded License: SPDX License Expression, "NONE" or "NOASSERTION"
// Cardinality: mandatory, one // Cardinality: mandatory, one
@ -449,7 +449,7 @@ func toPackages(catalog *pkg.Collection, sbom sbom.SBOM) (results []*spdx.Packag
// 7.19: Package Detailed Description // 7.19: Package Detailed Description
// Cardinality: optional, one // Cardinality: optional, one
PackageDescription: Description(p), PackageDescription: helpers.Description(p),
// 7.20: Package Comment // 7.20: Package Comment
// Cardinality: optional, one // Cardinality: optional, one
@ -491,7 +491,7 @@ func toPackageChecksums(p pkg.Package) ([]spdx.Checksum, bool) {
} }
case pkg.GolangBinaryBuildinfoEntry: case pkg.GolangBinaryBuildinfoEntry:
// because the H1 digest is found in the Golang metadata we cannot claim that the files were analyzed // because the H1 digest is found in the Golang metadata we cannot claim that the files were analyzed
algo, hexStr, err := util.HDigestToSHA(meta.H1Digest) algo, hexStr, err := helpers.HDigestToSHA(meta.H1Digest)
if err != nil { if err != nil {
log.Debugf("invalid h1digest: %s: %v", meta.H1Digest, err) log.Debugf("invalid h1digest: %s: %v", meta.H1Digest, err)
break break
@ -506,7 +506,7 @@ func toPackageChecksums(p pkg.Package) ([]spdx.Checksum, bool) {
} }
func toPackageOriginator(p pkg.Package) *spdx.Originator { func toPackageOriginator(p pkg.Package) *spdx.Originator {
kind, originator := Originator(p) kind, originator := helpers.Originator(p)
if kind == "" || originator == "" { if kind == "" || originator == "" {
return nil return nil
} }
@ -519,10 +519,10 @@ func toPackageOriginator(p pkg.Package) *spdx.Originator {
func toPackageSupplier(p pkg.Package) *spdx.Supplier { func toPackageSupplier(p pkg.Package) *spdx.Supplier {
// this uses the Originator function for now until // this uses the Originator function for now until
// a better distinction can be made for supplier // a better distinction can be made for supplier
kind, supplier := Originator(p) kind, supplier := helpers.Originator(p)
if kind == "" || supplier == "" { if kind == "" || supplier == "" {
return &spdx.Supplier{ return &spdx.Supplier{
Supplier: NOASSERTION, Supplier: helpers.NOASSERTION,
} }
} }
return &spdx.Supplier{ return &spdx.Supplier{
@ -532,7 +532,7 @@ func toPackageSupplier(p pkg.Package) *spdx.Supplier {
} }
func formatSPDXExternalRefs(p pkg.Package) (refs []*spdx.PackageExternalReference) { func formatSPDXExternalRefs(p pkg.Package) (refs []*spdx.PackageExternalReference) {
for _, ref := range ExternalRefs(p) { for _, ref := range helpers.ExternalRefs(p) {
refs = append(refs, &spdx.PackageExternalReference{ refs = append(refs, &spdx.PackageExternalReference{
Category: string(ref.ReferenceCategory), Category: string(ref.ReferenceCategory),
RefType: string(ref.ReferenceType), RefType: string(ref.ReferenceType),
@ -572,16 +572,16 @@ func toRelationships(relationships []artifact.Relationship) (result []*spdx.Rela
return result return result
} }
func lookupRelationship(ty artifact.RelationshipType) (bool, RelationshipType, string) { func lookupRelationship(ty artifact.RelationshipType) (bool, helpers.RelationshipType, string) {
switch ty { switch ty {
case artifact.ContainsRelationship: case artifact.ContainsRelationship:
return true, ContainsRelationship, "" return true, helpers.ContainsRelationship, ""
case artifact.DependencyOfRelationship: case artifact.DependencyOfRelationship:
return true, DependencyOfRelationship, "" return true, helpers.DependencyOfRelationship, ""
case artifact.OwnershipByFileOverlapRelationship: case artifact.OwnershipByFileOverlapRelationship:
return true, OtherRelationship, fmt.Sprintf("%s: indicates that the parent package claims ownership of a child package since the parent metadata indicates overlap with a location that a cataloger found the child package by", ty) return true, helpers.OtherRelationship, fmt.Sprintf("%s: indicates that the parent package claims ownership of a child package since the parent metadata indicates overlap with a location that a cataloger found the child package by", ty)
case artifact.EvidentByRelationship: case artifact.EvidentByRelationship:
return true, OtherRelationship, fmt.Sprintf("%s: indicates the package's existence is evident by the given file", ty) return true, helpers.OtherRelationship, fmt.Sprintf("%s: indicates the package's existence is evident by the given file", ty)
} }
return false, "", "" return false, "", ""
} }
@ -673,28 +673,28 @@ func toFileTypes(metadata *file.Metadata) (ty []string) {
mimeTypePrefix := strings.Split(metadata.MIMEType, "/")[0] mimeTypePrefix := strings.Split(metadata.MIMEType, "/")[0]
switch mimeTypePrefix { switch mimeTypePrefix {
case "image": case "image":
ty = append(ty, string(ImageFileType)) ty = append(ty, string(helpers.ImageFileType))
case "video": case "video":
ty = append(ty, string(VideoFileType)) ty = append(ty, string(helpers.VideoFileType))
case "application": case "application":
ty = append(ty, string(ApplicationFileType)) ty = append(ty, string(helpers.ApplicationFileType))
case "text": case "text":
ty = append(ty, string(TextFileType)) ty = append(ty, string(helpers.TextFileType))
case "audio": case "audio":
ty = append(ty, string(AudioFileType)) ty = append(ty, string(helpers.AudioFileType))
} }
if internal.IsExecutable(metadata.MIMEType) { if mimetype.IsExecutable(metadata.MIMEType) {
ty = append(ty, string(BinaryFileType)) ty = append(ty, string(helpers.BinaryFileType))
} }
if internal.IsArchive(metadata.MIMEType) { if mimetype.IsArchive(metadata.MIMEType) {
ty = append(ty, string(ArchiveFileType)) ty = append(ty, string(helpers.ArchiveFileType))
} }
// TODO: add support for source, spdx, and documentation file types // TODO: add support for source, spdx, and documentation file types
if len(ty) == 0 { if len(ty) == 0 {
ty = append(ty, string(OtherFileType)) ty = append(ty, string(helpers.OtherFileType))
} }
return ty return ty
@ -703,18 +703,18 @@ func toFileTypes(metadata *file.Metadata) (ty []string) {
// other licenses are for licenses from the pkg.Package that do not have an SPDXExpression // other licenses are for licenses from the pkg.Package that do not have an SPDXExpression
// field. The spdxexpression field is only filled given a validated Value field. // field. The spdxexpression field is only filled given a validated Value field.
func toOtherLicenses(catalog *pkg.Collection) []*spdx.OtherLicense { func toOtherLicenses(catalog *pkg.Collection) []*spdx.OtherLicense {
licenses := map[string]spdxLicense{} licenses := map[string]helpers.SPDXLicense{}
for p := range catalog.Enumerate() { for p := range catalog.Enumerate() {
declaredLicenses, concludedLicenses := parseLicenses(p.Licenses.ToSlice()) declaredLicenses, concludedLicenses := helpers.ParseLicenses(p.Licenses.ToSlice())
for _, l := range declaredLicenses { for _, l := range declaredLicenses {
if l.value != "" { if l.Value != "" {
licenses[l.id] = l licenses[l.ID] = l
} }
} }
for _, l := range concludedLicenses { for _, l := range concludedLicenses {
if l.value != "" { if l.Value != "" {
licenses[l.id] = l licenses[l.ID] = l
} }
} }
} }
@ -730,8 +730,8 @@ func toOtherLicenses(catalog *pkg.Collection) []*spdx.OtherLicense {
for _, id := range ids { for _, id := range ids {
license := licenses[id] license := licenses[id]
result = append(result, &spdx.OtherLicense{ result = append(result, &spdx.OtherLicense{
LicenseIdentifier: license.id, LicenseIdentifier: license.ID,
ExtractedText: license.value, ExtractedText: license.Value,
}) })
} }
return result return result

View File

@ -14,6 +14,7 @@ import (
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/format/internal/spdxutil/helpers"
"github.com/anchore/syft/syft/internal/sourcemetadata" "github.com/anchore/syft/syft/internal/sourcemetadata"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
@ -368,7 +369,7 @@ func Test_toFileTypes(t *testing.T) {
MIMEType: "application/vnd.unknown", MIMEType: "application/vnd.unknown",
}, },
expected: []string{ expected: []string{
string(ApplicationFileType), string(helpers.ApplicationFileType),
}, },
}, },
{ {
@ -377,8 +378,8 @@ func Test_toFileTypes(t *testing.T) {
MIMEType: "application/zip", MIMEType: "application/zip",
}, },
expected: []string{ expected: []string{
string(ApplicationFileType), string(helpers.ApplicationFileType),
string(ArchiveFileType), string(helpers.ArchiveFileType),
}, },
}, },
{ {
@ -387,7 +388,7 @@ func Test_toFileTypes(t *testing.T) {
MIMEType: "audio/ogg", MIMEType: "audio/ogg",
}, },
expected: []string{ expected: []string{
string(AudioFileType), string(helpers.AudioFileType),
}, },
}, },
{ {
@ -396,7 +397,7 @@ func Test_toFileTypes(t *testing.T) {
MIMEType: "video/3gpp", MIMEType: "video/3gpp",
}, },
expected: []string{ expected: []string{
string(VideoFileType), string(helpers.VideoFileType),
}, },
}, },
{ {
@ -405,7 +406,7 @@ func Test_toFileTypes(t *testing.T) {
MIMEType: "text/html", MIMEType: "text/html",
}, },
expected: []string{ expected: []string{
string(TextFileType), string(helpers.TextFileType),
}, },
}, },
{ {
@ -414,7 +415,7 @@ func Test_toFileTypes(t *testing.T) {
MIMEType: "image/png", MIMEType: "image/png",
}, },
expected: []string{ expected: []string{
string(ImageFileType), string(helpers.ImageFileType),
}, },
}, },
{ {
@ -423,8 +424,8 @@ func Test_toFileTypes(t *testing.T) {
MIMEType: "application/x-sharedlib", MIMEType: "application/x-sharedlib",
}, },
expected: []string{ expected: []string{
string(ApplicationFileType), string(helpers.ApplicationFileType),
string(BinaryFileType), string(helpers.BinaryFileType),
}, },
}, },
} }
@ -440,24 +441,24 @@ func Test_lookupRelationship(t *testing.T) {
tests := []struct { tests := []struct {
input artifact.RelationshipType input artifact.RelationshipType
exists bool exists bool
ty RelationshipType ty helpers.RelationshipType
comment string comment string
}{ }{
{ {
input: artifact.ContainsRelationship, input: artifact.ContainsRelationship,
exists: true, exists: true,
ty: ContainsRelationship, ty: helpers.ContainsRelationship,
}, },
{ {
input: artifact.OwnershipByFileOverlapRelationship, input: artifact.OwnershipByFileOverlapRelationship,
exists: true, exists: true,
ty: OtherRelationship, ty: helpers.OtherRelationship,
comment: "ownership-by-file-overlap: indicates that the parent package claims ownership of a child package since the parent metadata indicates overlap with a location that a cataloger found the child package by", comment: "ownership-by-file-overlap: indicates that the parent package claims ownership of a child package since the parent metadata indicates overlap with a location that a cataloger found the child package by",
}, },
{ {
input: artifact.EvidentByRelationship, input: artifact.EvidentByRelationship,
exists: true, exists: true,
ty: OtherRelationship, ty: helpers.OtherRelationship,
comment: "evident-by: indicates the package's existence is evident by the given file", comment: "evident-by: indicates the package's existence is evident by the given file",
}, },
{ {
@ -777,3 +778,74 @@ func Test_toSPDXID(t *testing.T) {
}) })
} }
} }
func Test_otherLicenses(t *testing.T) {
pkg1 := pkg.Package{
Name: "first-pkg",
Version: "1.1",
Licenses: pkg.NewLicenseSet(
pkg.NewLicense("MIT"),
),
}
pkg2 := pkg.Package{
Name: "second-pkg",
Version: "2.2",
Licenses: pkg.NewLicenseSet(
pkg.NewLicense("non spdx license"),
),
}
bigText := `
Apache License
Version 2.0, January 2004`
pkg3 := pkg.Package{
Name: "third-pkg",
Version: "3.3",
Licenses: pkg.NewLicenseSet(
pkg.NewLicense(bigText),
),
}
tests := []struct {
name string
packages []pkg.Package
expected []*spdx.OtherLicense
}{
{
name: "no other licenses when all valid spdx expressions",
packages: []pkg.Package{pkg1},
expected: nil,
},
{
name: "other licenses includes original text",
packages: []pkg.Package{pkg2},
expected: []*spdx.OtherLicense{
{
LicenseIdentifier: "LicenseRef-non-spdx-license",
ExtractedText: "non spdx license",
},
},
},
{
name: "big licenses get hashed",
packages: []pkg.Package{pkg3},
expected: []*spdx.OtherLicense{
{
LicenseIdentifier: "LicenseRef-e9a1e42833d3e456f147052f4d312101bd171a0798893169fe596ca6b55c049e",
ExtractedText: bigText,
},
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
s := sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(test.packages...),
},
}
got := ToFormatModel(s)
require.Equal(t, test.expected, got.OtherLicenses)
})
}
}

View File

@ -18,7 +18,7 @@ import (
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/cpe" "github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/format/common/util" "github.com/anchore/syft/syft/format/internal/spdxutil/helpers"
"github.com/anchore/syft/syft/license" "github.com/anchore/syft/syft/license"
"github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/linux"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
@ -227,15 +227,15 @@ func extractSourceFromNamespace(ns string) source.Description {
parts := strings.Split(u.Path, "/") parts := strings.Split(u.Path, "/")
for _, p := range parts { for _, p := range parts {
switch p { switch p {
case inputFile: case helpers.InputFile:
return source.Description{ return source.Description{
Metadata: source.FileSourceMetadata{}, Metadata: source.FileSourceMetadata{},
} }
case inputImage: case helpers.InputImage:
return source.Description{ return source.Description{
Metadata: source.StereoscopeImageSourceMetadata{}, Metadata: source.StereoscopeImageSourceMetadata{},
} }
case inputDirectory: case helpers.InputDirectory:
return source.Description{ return source.Description{
Metadata: source.DirectorySourceMetadata{}, Metadata: source.DirectorySourceMetadata{},
} }
@ -322,20 +322,20 @@ func fromChecksumAlgorithm(algorithm common.ChecksumAlgorithm) string {
func toFileMetadata(f *spdx.File) (meta file.Metadata) { func toFileMetadata(f *spdx.File) (meta file.Metadata) {
// FIXME Syft is currently lossy due to the SPDX 2.2.1 spec not supporting arbitrary mimetypes // FIXME Syft is currently lossy due to the SPDX 2.2.1 spec not supporting arbitrary mimetypes
for _, typ := range f.FileTypes { for _, typ := range f.FileTypes {
switch FileType(typ) { switch helpers.FileType(typ) {
case ImageFileType: case helpers.ImageFileType:
meta.MIMEType = "image/" meta.MIMEType = "image/"
case VideoFileType: case helpers.VideoFileType:
meta.MIMEType = "video/" meta.MIMEType = "video/"
case ApplicationFileType: case helpers.ApplicationFileType:
meta.MIMEType = "application/" meta.MIMEType = "application/"
case TextFileType: case helpers.TextFileType:
meta.MIMEType = "text/" meta.MIMEType = "text/"
case AudioFileType: case helpers.AudioFileType:
meta.MIMEType = "audio/" meta.MIMEType = "audio/"
case BinaryFileType: case helpers.BinaryFileType:
case ArchiveFileType: case helpers.ArchiveFileType:
case OtherFileType: case helpers.OtherFileType:
} }
} }
return meta return meta
@ -368,11 +368,11 @@ func collectDocRelationships(spdxIDMap map[string]any, doc *spdx.Document) (out
var to artifact.Identifiable var to artifact.Identifiable
var typ artifact.RelationshipType var typ artifact.RelationshipType
if toLocationOk { if toLocationOk {
switch RelationshipType(r.Relationship) { switch helpers.RelationshipType(r.Relationship) {
case ContainsRelationship: case helpers.ContainsRelationship:
typ = artifact.ContainsRelationship typ = artifact.ContainsRelationship
to = toLocation to = toLocation
case OtherRelationship: case helpers.OtherRelationship:
// Encoding uses a specifically formatted comment... // Encoding uses a specifically formatted comment...
if strings.Index(r.RelationshipComment, string(artifact.EvidentByRelationship)) == 0 { if strings.Index(r.RelationshipComment, string(artifact.EvidentByRelationship)) == 0 {
typ = artifact.EvidentByRelationship typ = artifact.EvidentByRelationship
@ -380,11 +380,11 @@ func collectDocRelationships(spdxIDMap map[string]any, doc *spdx.Document) (out
} }
} }
} else { } else {
switch RelationshipType(r.Relationship) { switch helpers.RelationshipType(r.Relationship) {
case ContainsRelationship: case helpers.ContainsRelationship:
typ = artifact.ContainsRelationship typ = artifact.ContainsRelationship
to = toPackage to = toPackage
case OtherRelationship: case helpers.OtherRelationship:
// Encoding uses a specifically formatted comment... // Encoding uses a specifically formatted comment...
if strings.Index(r.RelationshipComment, string(artifact.OwnershipByFileOverlapRelationship)) == 0 { if strings.Index(r.RelationshipComment, string(artifact.OwnershipByFileOverlapRelationship)) == 0 {
typ = artifact.OwnershipByFileOverlapRelationship typ = artifact.OwnershipByFileOverlapRelationship
@ -518,14 +518,14 @@ func parseSPDXLicenses(p *spdx.Package) []pkg.License {
licenses := make([]pkg.License, 0) licenses := make([]pkg.License, 0)
// concluded // concluded
if p.PackageLicenseConcluded != NOASSERTION && p.PackageLicenseConcluded != NONE && p.PackageLicenseConcluded != "" { if p.PackageLicenseConcluded != helpers.NOASSERTION && p.PackageLicenseConcluded != helpers.NONE && p.PackageLicenseConcluded != "" {
l := pkg.NewLicense(cleanSPDXID(p.PackageLicenseConcluded)) l := pkg.NewLicense(cleanSPDXID(p.PackageLicenseConcluded))
l.Type = license.Concluded l.Type = license.Concluded
licenses = append(licenses, l) licenses = append(licenses, l)
} }
// declared // declared
if p.PackageLicenseDeclared != NOASSERTION && p.PackageLicenseDeclared != NONE && p.PackageLicenseDeclared != "" { if p.PackageLicenseDeclared != helpers.NOASSERTION && p.PackageLicenseDeclared != helpers.NONE && p.PackageLicenseDeclared != "" {
l := pkg.NewLicense(cleanSPDXID(p.PackageLicenseDeclared)) l := pkg.NewLicense(cleanSPDXID(p.PackageLicenseDeclared))
l.Type = license.Declared l.Type = license.Declared
licenses = append(licenses, l) licenses = append(licenses, l)
@ -603,7 +603,7 @@ func extractMetadata(p *spdx.Package, info pkgInfo) any {
case pkg.GoModulePkg: case pkg.GoModulePkg:
var h1Digest string var h1Digest string
for _, value := range p.PackageChecksums { for _, value := range p.PackageChecksums {
digest, err := util.HDigestFromSHA(fromChecksumAlgorithm(value.Algorithm), value.Value) digest, err := helpers.HDigestFromSHA(fromChecksumAlgorithm(value.Algorithm), value.Value)
if err != nil { if err != nil {
log.Debugf("invalid h1digest: %v %v", value, err) log.Debugf("invalid h1digest: %v %v", value, err)
continue continue
@ -620,7 +620,7 @@ func extractMetadata(p *spdx.Package, info pkgInfo) any {
func findPURLValue(p *spdx.Package) string { func findPURLValue(p *spdx.Package) string {
for _, r := range p.PackageExternalReferences { for _, r := range p.PackageExternalReferences {
if r.RefType == string(PurlExternalRefType) { if r.RefType == string(helpers.PurlExternalRefType) {
return r.Locator return r.Locator
} }
} }
@ -629,7 +629,7 @@ func findPURLValue(p *spdx.Package) string {
func extractCPEs(p *spdx.Package) (cpes []cpe.CPE) { func extractCPEs(p *spdx.Package) (cpes []cpe.CPE) {
for _, r := range p.PackageExternalReferences { for _, r := range p.PackageExternalReferences {
if r.RefType == string(Cpe23ExternalRefType) { if r.RefType == string(helpers.Cpe23ExternalRefType) {
c, err := cpe.New(r.Locator) c, err := cpe.New(r.Locator)
if err != nil { if err != nil {
log.Warnf("unable to extract SPDX CPE=%q: %+v", r.Locator, err) log.Warnf("unable to extract SPDX CPE=%q: %+v", r.Locator, err)

View File

@ -8,8 +8,8 @@ import (
"github.com/CycloneDX/cyclonedx-go" "github.com/CycloneDX/cyclonedx-go"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/format/common/cyclonedxhelpers"
"github.com/anchore/syft/syft/format/internal/cyclonedxutil" "github.com/anchore/syft/syft/format/internal/cyclonedxutil"
"github.com/anchore/syft/syft/format/internal/cyclonedxutil/helpers"
"github.com/anchore/syft/syft/format/internal/stream" "github.com/anchore/syft/syft/format/internal/stream"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
) )
@ -45,7 +45,7 @@ func (d decoder) Decode(r io.Reader) (*sbom.SBOM, sbom.FormatID, string, error)
return nil, id, version, fmt.Errorf("unable to decode cyclonedx json document: %w", err) return nil, id, version, fmt.Errorf("unable to decode cyclonedx json document: %w", err)
} }
s, err := cyclonedxhelpers.ToSyftModel(doc) s, err := helpers.ToSyftModel(doc)
if err != nil { if err != nil {
return nil, id, version, err return nil, id, version, err
} }

View File

@ -9,8 +9,8 @@ import (
"github.com/CycloneDX/cyclonedx-go" "github.com/CycloneDX/cyclonedx-go"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/format/common/cyclonedxhelpers"
"github.com/anchore/syft/syft/format/internal/cyclonedxutil" "github.com/anchore/syft/syft/format/internal/cyclonedxutil"
"github.com/anchore/syft/syft/format/internal/cyclonedxutil/helpers"
"github.com/anchore/syft/syft/format/internal/stream" "github.com/anchore/syft/syft/format/internal/stream"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
) )
@ -46,7 +46,7 @@ func (d decoder) Decode(r io.Reader) (*sbom.SBOM, sbom.FormatID, string, error)
return nil, id, version, fmt.Errorf("unable to decode cyclonedx xml document: %w", err) return nil, id, version, fmt.Errorf("unable to decode cyclonedx xml document: %w", err)
} }
s, err := cyclonedxhelpers.ToSyftModel(doc) s, err := helpers.ToSyftModel(doc)
if err != nil { if err != nil {
return nil, id, version, err return nil, id, version, err
} }

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"reflect" "reflect"
@ -7,13 +7,12 @@ import (
"github.com/anchore/packageurl-go" "github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/format/common"
"github.com/anchore/syft/syft/internal/packagemetadata" "github.com/anchore/syft/syft/internal/packagemetadata"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
) )
func encodeComponent(p pkg.Package) cyclonedx.Component { func EncodeComponent(p pkg.Package) cyclonedx.Component {
props := encodeProperties(p, "syft:package") props := EncodeProperties(p, "syft:package")
if p.Metadata != nil { if p.Metadata != nil {
// encode the metadataType as a property, something that doesn't exist on the core model // encode the metadataType as a property, something that doesn't exist on the core model
@ -26,10 +25,10 @@ func encodeComponent(p pkg.Package) cyclonedx.Component {
props = append(props, encodeCPEs(p)...) props = append(props, encodeCPEs(p)...)
locations := p.Locations.ToSlice() locations := p.Locations.ToSlice()
if len(locations) > 0 { if len(locations) > 0 {
props = append(props, encodeProperties(locations, "syft:location")...) props = append(props, EncodeProperties(locations, "syft:location")...)
} }
if hasMetadata(p) { if hasMetadata(p) {
props = append(props, encodeProperties(p.Metadata, "syft:metadata")...) props = append(props, EncodeProperties(p.Metadata, "syft:metadata")...)
} }
var properties *[]cyclonedx.Property var properties *[]cyclonedx.Property
@ -55,11 +54,11 @@ func encodeComponent(p pkg.Package) cyclonedx.Component {
Description: encodeDescription(p), Description: encodeDescription(p),
ExternalReferences: encodeExternalReferences(p), ExternalReferences: encodeExternalReferences(p),
Properties: properties, Properties: properties,
BOMRef: deriveBomRef(p), BOMRef: DeriveBomRef(p),
} }
} }
func deriveBomRef(p pkg.Package) string { func DeriveBomRef(p pkg.Package) string {
// try and parse the PURL if possible and append syft id to it, to make // try and parse the PURL if possible and append syft id to it, to make
// the purl unique in the BOM. // the purl unique in the BOM.
// TODO: In the future we may want to dedupe by PURL and combine components with // TODO: In the future we may want to dedupe by PURL and combine components with
@ -93,7 +92,7 @@ func decodeComponent(c *cyclonedx.Component) *pkg.Package {
PURL: c.PackageURL, PURL: c.PackageURL,
} }
common.DecodeInto(p, values, "syft:package", CycloneDXFields) DecodeInto(p, values, "syft:package", CycloneDXFields)
metadataType := values["syft:package:metadataType"] metadataType := values["syft:package:metadataType"]
@ -111,7 +110,7 @@ func decodeComponent(c *cyclonedx.Component) *pkg.Package {
} }
func decodeLocations(vals map[string]string) file.LocationSet { func decodeLocations(vals map[string]string) file.LocationSet {
v := common.Decode(reflect.TypeOf([]file.Location{}), vals, "syft:location", CycloneDXFields) v := Decode(reflect.TypeOf([]file.Location{}), vals, "syft:location", CycloneDXFields)
out, ok := v.([]file.Location) out, ok := v.([]file.Location)
if !ok { if !ok {
out = nil out = nil
@ -126,7 +125,7 @@ func decodePackageMetadata(vals map[string]string, c *cyclonedx.Component, typeN
return nil return nil
} }
metaPtrTyp := reflect.PtrTo(metadataType) metaPtrTyp := reflect.PtrTo(metadataType)
metaPtr := common.Decode(metaPtrTyp, vals, "syft:metadata", CycloneDXFields) metaPtr := Decode(metaPtrTyp, vals, "syft:metadata", CycloneDXFields)
// Map all explicit metadata properties // Map all explicit metadata properties
decodeAuthor(c.Author, metaPtr) decodeAuthor(c.Author, metaPtr)
@ -136,7 +135,7 @@ func decodePackageMetadata(vals map[string]string, c *cyclonedx.Component, typeN
decodeExternalReferences(c, metaPtr) decodeExternalReferences(c, metaPtr)
// return the actual interface{} -> struct ... not interface{} -> *struct // return the actual interface{} -> struct ... not interface{} -> *struct
return common.PtrToStruct(metaPtr) return PtrToStruct(metaPtr)
} }
return nil return nil

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"fmt" "fmt"
@ -152,7 +152,7 @@ func Test_encodeComponentProperties(t *testing.T) {
} }
for _, test := range tests { for _, test := range tests {
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
c := encodeComponent(test.input) c := EncodeComponent(test.input)
if test.expected == nil { if test.expected == nil {
if c.Properties != nil { if c.Properties != nil {
t.Fatalf("expected no properties, got: %+v", *c.Properties) t.Fatalf("expected no properties, got: %+v", *c.Properties)
@ -212,7 +212,7 @@ func Test_encodeCompomentType(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
tt.pkg.ID() tt.pkg.ID()
p := encodeComponent(tt.pkg) p := EncodeComponent(tt.pkg)
assert.Equal(t, tt.want, p) assert.Equal(t, tt.want, p)
}) })
} }
@ -264,7 +264,7 @@ func Test_deriveBomRef(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
tt.pkg.ID() tt.pkg.ID()
assert.Equal(t, tt.want, deriveBomRef(tt.pkg)) assert.Equal(t, tt.want, DeriveBomRef(tt.pkg))
}) })
} }
} }

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"github.com/CycloneDX/cyclonedx-go" "github.com/CycloneDX/cyclonedx-go"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"fmt" "fmt"
@ -7,7 +7,6 @@ import (
"github.com/anchore/packageurl-go" "github.com/anchore/packageurl-go"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/format/common"
"github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/linux"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/sbom"
@ -151,7 +150,7 @@ func linuxReleaseFromOSComponent(component *cyclonedx.Component) *linux.Release
for _, p := range *component.Properties { for _, p := range *component.Properties {
values[p.Name] = p.Value values[p.Name] = p.Value
} }
common.DecodeInto(&rel, values, "syft:distro", CycloneDXFields) DecodeInto(&rel, values, "syft:distro", CycloneDXFields)
} }
return rel return rel
@ -181,7 +180,7 @@ func collectRelationships(bom *cyclonedx.BOM, s *sbom.SBOM, idMap map[string]int
if !toExists { if !toExists {
continue continue
} }
to, ok := common.PtrToStruct(toPtr).(artifact.Identifiable) to, ok := PtrToStruct(toPtr).(artifact.Identifiable)
if !ok { if !ok {
continue continue
} }
@ -191,7 +190,7 @@ func collectRelationships(bom *cyclonedx.BOM, s *sbom.SBOM, idMap map[string]int
if !fromExists { if !fromExists {
continue continue
} }
from, ok := common.PtrToStruct(fromPtr).(artifact.Identifiable) from, ok := PtrToStruct(fromPtr).(artifact.Identifiable)
if !ok { if !ok {
continue continue
} }

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import "github.com/anchore/syft/syft/pkg" import "github.com/anchore/syft/syft/pkg"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import "github.com/anchore/syft/syft/pkg" import "github.com/anchore/syft/syft/pkg"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,19 +1,17 @@
package cyclonedxhelpers package helpers
import ( import (
"strings" "strings"
"github.com/CycloneDX/cyclonedx-go" "github.com/CycloneDX/cyclonedx-go"
"github.com/anchore/syft/syft/format/common"
) )
var ( var (
CycloneDXFields = common.RequiredTag("cyclonedx") CycloneDXFields = RequiredTag("cyclonedx")
) )
func encodeProperties(obj interface{}, prefix string) (out []cyclonedx.Property) { func EncodeProperties(obj interface{}, prefix string) (out []cyclonedx.Property) {
for _, p := range common.Sorted(common.Encode(obj, prefix, CycloneDXFields)) { for _, p := range Sorted(Encode(obj, prefix, CycloneDXFields)) {
out = append(out, cyclonedx.Property{ out = append(out, cyclonedx.Property{
Name: p.Name, Name: p.Name,
Value: p.Value, Value: p.Value,

View File

@ -1,4 +1,4 @@
package common package helpers
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package common package helpers
import ( import (
"reflect" "reflect"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"

View File

@ -1,4 +1,4 @@
package cyclonedxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import "github.com/anchore/syft/syft/pkg" import "github.com/anchore/syft/syft/pkg"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"fmt" "fmt"
@ -13,9 +13,9 @@ import (
) )
const ( const (
inputImage = "image" InputImage = "image"
inputDirectory = "dir" InputDirectory = "dir"
inputFile = "file" InputFile = "file"
) )
func DocumentNameAndNamespace(src source.Description, desc sbom.Descriptor) (string, string) { func DocumentNameAndNamespace(src source.Description, desc sbom.Descriptor) (string, string) {
@ -28,11 +28,11 @@ func DocumentNamespace(name string, src source.Description, desc sbom.Descriptor
input := "unknown-source-type" input := "unknown-source-type"
switch src.Metadata.(type) { switch src.Metadata.(type) {
case source.StereoscopeImageSourceMetadata: case source.StereoscopeImageSourceMetadata:
input = inputImage input = InputImage
case source.DirectorySourceMetadata: case source.DirectorySourceMetadata:
input = inputDirectory input = InputDirectory
case source.FileSourceMetadata: case source.FileSourceMetadata:
input = inputFile input = InputFile
} }
uniqueID := uuid.Must(uuid.NewRandom()) uniqueID := uuid.Must(uuid.NewRandom())

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import "github.com/anchore/syft/syft/pkg" import "github.com/anchore/syft/syft/pkg"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
type ReferenceCategory string type ReferenceCategory string

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
type FileType string type FileType string

View File

@ -1,4 +1,4 @@
package util package helpers
import ( import (
"encoding/base64" "encoding/base64"

View File

@ -1,4 +1,4 @@
package util package helpers
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import "github.com/anchore/syft/syft/pkg" import "github.com/anchore/syft/syft/pkg"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"crypto/sha256" "crypto/sha256"
@ -27,12 +27,12 @@ func License(p pkg.Package) (concluded, declared string) {
// take all licenses and assume an AND expression; // take all licenses and assume an AND expression;
// for information about license expressions see: // for information about license expressions see:
// https://spdx.github.io/spdx-spec/v2.3/SPDX-license-expressions/ // https://spdx.github.io/spdx-spec/v2.3/SPDX-license-expressions/
pc, pd := parseLicenses(p.Licenses.ToSlice()) pc, pd := ParseLicenses(p.Licenses.ToSlice())
return joinLicenses(pc), joinLicenses(pd) return joinLicenses(pc), joinLicenses(pd)
} }
func joinLicenses(licenses []spdxLicense) string { func joinLicenses(licenses []SPDXLicense) string {
if len(licenses) == 0 { if len(licenses) == 0 {
return NOASSERTION return NOASSERTION
} }
@ -40,7 +40,7 @@ func joinLicenses(licenses []spdxLicense) string {
var newLicenses []string var newLicenses []string
for _, l := range licenses { for _, l := range licenses {
v := l.id v := l.ID
// check if license does not start or end with parens // check if license does not start or end with parens
if !strings.HasPrefix(v, "(") && !strings.HasSuffix(v, ")") { if !strings.HasPrefix(v, "(") && !strings.HasSuffix(v, ")") {
// if license contains AND, OR, or WITH, then wrap in parens // if license contains AND, OR, or WITH, then wrap in parens
@ -57,31 +57,31 @@ func joinLicenses(licenses []spdxLicense) string {
return strings.Join(newLicenses, " AND ") return strings.Join(newLicenses, " AND ")
} }
type spdxLicense struct { type SPDXLicense struct {
id string ID string
value string Value string
} }
func parseLicenses(raw []pkg.License) (concluded, declared []spdxLicense) { func ParseLicenses(raw []pkg.License) (concluded, declared []SPDXLicense) {
for _, l := range raw { for _, l := range raw {
if l.Value == "" { if l.Value == "" {
continue continue
} }
candidate := spdxLicense{} candidate := SPDXLicense{}
if l.SPDXExpression != "" { if l.SPDXExpression != "" {
candidate.id = l.SPDXExpression candidate.ID = l.SPDXExpression
} else { } else {
// we did not find a valid SPDX license ID so treat as separate license // we did not find a valid SPDX license ID so treat as separate license
if len(l.Value) <= 64 { if len(l.Value) <= 64 {
// if the license text is less than the size of the hash, // if the license text is less than the size of the hash,
// just use it directly so the id is more readable // just use it directly so the id is more readable
candidate.id = spdxlicense.LicenseRefPrefix + SanitizeElementID(l.Value) candidate.ID = spdxlicense.LicenseRefPrefix + SanitizeElementID(l.Value)
} else { } else {
hash := sha256.Sum256([]byte(l.Value)) hash := sha256.Sum256([]byte(l.Value))
candidate.id = fmt.Sprintf("%s%x", spdxlicense.LicenseRefPrefix, hash) candidate.ID = fmt.Sprintf("%s%x", spdxlicense.LicenseRefPrefix, hash)
} }
candidate.value = l.Value candidate.Value = l.Value
} }
switch l.Type { switch l.Type {

View File

@ -1,16 +1,13 @@
package spdxhelpers package helpers
import ( import (
"strings" "strings"
"testing" "testing"
"github.com/spdx/tools-golang/spdx"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/anchore/syft/internal/spdxlicense" "github.com/anchore/syft/internal/spdxlicense"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/sbom"
) )
func Test_License(t *testing.T) { func Test_License(t *testing.T) {
@ -108,77 +105,6 @@ func Test_License(t *testing.T) {
} }
} }
func Test_otherLicenses(t *testing.T) {
pkg1 := pkg.Package{
Name: "first-pkg",
Version: "1.1",
Licenses: pkg.NewLicenseSet(
pkg.NewLicense("MIT"),
),
}
pkg2 := pkg.Package{
Name: "second-pkg",
Version: "2.2",
Licenses: pkg.NewLicenseSet(
pkg.NewLicense("non spdx license"),
),
}
bigText := `
Apache License
Version 2.0, January 2004`
pkg3 := pkg.Package{
Name: "third-pkg",
Version: "3.3",
Licenses: pkg.NewLicenseSet(
pkg.NewLicense(bigText),
),
}
tests := []struct {
name string
packages []pkg.Package
expected []*spdx.OtherLicense
}{
{
name: "no other licenses when all valid spdx expressions",
packages: []pkg.Package{pkg1},
expected: nil,
},
{
name: "other licenses includes original text",
packages: []pkg.Package{pkg2},
expected: []*spdx.OtherLicense{
{
LicenseIdentifier: "LicenseRef-non-spdx-license",
ExtractedText: "non spdx license",
},
},
},
{
name: "big licenses get hashed",
packages: []pkg.Package{pkg3},
expected: []*spdx.OtherLicense{
{
LicenseIdentifier: "LicenseRef-e9a1e42833d3e456f147052f4d312101bd171a0798893169fe596ca6b55c049e",
ExtractedText: bigText,
},
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
s := sbom.SBOM{
Artifacts: sbom.Artifacts{
Packages: pkg.NewCollection(test.packages...),
},
}
got := ToFormatModel(s)
require.Equal(t, test.expected, got.OtherLicenses)
})
}
}
func Test_joinLicenses(t *testing.T) { func Test_joinLicenses(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
@ -203,11 +129,11 @@ func Test_joinLicenses(t *testing.T) {
} }
} }
func toSpdxLicenses(ids []string) (licenses []spdxLicense) { func toSpdxLicenses(ids []string) (licenses []SPDXLicense) {
for _, l := range ids { for _, l := range ids {
license := spdxLicense{id: l} license := SPDXLicense{ID: l}
if strings.HasPrefix(l, spdxlicense.LicenseRefPrefix) { if strings.HasPrefix(l, spdxlicense.LicenseRefPrefix) {
license.value = l license.Value = l
} }
licenses = append(licenses, license) licenses = append(licenses, license)
} }

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"strings" "strings"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
// source: https://spdx.github.io/spdx-spec/7-relationships-between-SPDX-elements/ // source: https://spdx.github.io/spdx-spec/7-relationships-between-SPDX-elements/
type RelationshipType string type RelationshipType string

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"strings" "strings"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"regexp" "regexp"

View File

@ -1,4 +1,4 @@
package spdxhelpers package helpers
import ( import (
"testing" "testing"

View File

@ -10,6 +10,7 @@ import (
"strings" "strings"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/mimetype"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/cpe" "github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
@ -42,7 +43,7 @@ func NewGoModuleBinaryCataloger(opts CatalogerConfig) pkg.Cataloger {
} }
return &progressingCataloger{ return &progressingCataloger{
cataloger: generic.NewCataloger(binaryCatalogerName). cataloger: generic.NewCataloger(binaryCatalogerName).
WithParserByMimeTypes(c.parseGoBinary, internal.ExecutableMIMETypeSet.List()...), WithParserByMimeTypes(c.parseGoBinary, mimetype.ExecutableMIMETypeSet.List()...),
} }
} }

View File

@ -16,6 +16,7 @@ import (
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/mimetype"
"github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/cpe" "github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/file"
@ -573,7 +574,7 @@ func fetchPkgs(reader unionreader.UnionReader, filename string) []pkg.Package {
// Catalog attempts to find any native image executables reachable from a resolver. // Catalog attempts to find any native image executables reachable from a resolver.
func (c *nativeImageCataloger) Catalog(_ context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { func (c *nativeImageCataloger) Catalog(_ context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
var pkgs []pkg.Package var pkgs []pkg.Package
fileMatches, err := resolver.FilesByMIMEType(internal.ExecutableMIMETypeSet.List()...) fileMatches, err := resolver.FilesByMIMEType(mimetype.ExecutableMIMETypeSet.List()...)
if err != nil { if err != nil {
return pkgs, nil, fmt.Errorf("failed to find binaries by mime types: %w", err) return pkgs, nil, fmt.Errorf("failed to find binaries by mime types: %w", err)
} }

View File

@ -4,7 +4,7 @@ Package rust provides a concrete Cataloger implementation relating to packages w
package rust package rust
import ( import (
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal/mimetype"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/generic"
) )
@ -19,5 +19,5 @@ func NewCargoLockCataloger() pkg.Cataloger {
// in binaries produced with https://github.com/Shnatsel/rust-audit // in binaries produced with https://github.com/Shnatsel/rust-audit
func NewAuditBinaryCataloger() pkg.Cataloger { func NewAuditBinaryCataloger() pkg.Cataloger {
return generic.NewCataloger("cargo-auditable-binary-cataloger"). return generic.NewCataloger("cargo-auditable-binary-cataloger").
WithParserByMimeTypes(parseAuditBinary, internal.ExecutableMIMETypeSet.List()...) WithParserByMimeTypes(parseAuditBinary, mimetype.ExecutableMIMETypeSet.List()...)
} }