From ab74caa87f67d4bf58238c566c1fdca0e8efcfa2 Mon Sep 17 00:00:00 2001 From: Teppei Fukuda Date: Mon, 18 Mar 2024 12:52:11 +0400 Subject: [PATCH 01/57] refactor(sbom): use intermediate representation for SPDX (#6310) Signed-off-by: knqyf263 Co-authored-by: DmitriyLewen --- integration/testdata/conda-spdx.json.golden | 39 +- ...fluentd-multiple-lockfiles.cdx.json.golden | 24 +- pkg/fanal/analyzer/sbom/sbom_test.go | 24 +- pkg/fanal/applier/docker.go | 9 +- pkg/fanal/types/const.go | 8 + pkg/k8s/scanner/scanner.go | 6 +- pkg/k8s/scanner/scanner_test.go | 2 +- pkg/report/spdx/spdx.go | 2 +- pkg/sbom/core/bom.go | 55 +- pkg/sbom/cyclonedx/marshal.go | 21 +- pkg/sbom/cyclonedx/marshal_test.go | 4 +- pkg/sbom/cyclonedx/unmarshal.go | 12 +- pkg/sbom/io/decode.go | 62 +- pkg/sbom/io/encode.go | 106 ++-- pkg/sbom/io/encode_test.go | 5 +- pkg/sbom/sbom.go | 13 +- pkg/sbom/spdx/marshal.go | 568 +++++++++--------- pkg/sbom/spdx/marshal_test.go | 510 +++++++++------- ...lid-source-info.json => invalid-purl.json} | 4 +- pkg/sbom/spdx/unmarshal.go | 388 +++++------- pkg/sbom/spdx/unmarshal_test.go | 54 +- 21 files changed, 1041 insertions(+), 875 deletions(-) rename pkg/sbom/spdx/testdata/sad/{invalid-source-info.json => invalid-purl.json} (92%) diff --git a/integration/testdata/conda-spdx.json.golden b/integration/testdata/conda-spdx.json.golden index be1146b285c4..db81eb8abd13 100644 --- a/integration/testdata/conda-spdx.json.golden +++ b/integration/testdata/conda-spdx.json.golden @@ -3,7 +3,7 @@ "dataLicense": "CC0-1.0", "SPDXID": "SPDXRef-DOCUMENT", "name": "testdata/fixtures/repo/conda", - "documentNamespace": "http://aquasecurity.github.io/trivy/filesystem/testdata/fixtures/repo/conda-3ff14136-e09f-4df9-80ea-000000000001", + "documentNamespace": "http://aquasecurity.github.io/trivy/filesystem/testdata/fixtures/repo/conda-3ff14136-e09f-4df9-80ea-000000000004", "creationInfo": { "creators": [ "Organization: aquasecurity", @@ -12,17 +12,9 @@ "created": "2021-08-25T12:20:30Z" }, "packages": [ - { - "name": "conda-pkg", - "SPDXID": "SPDXRef-Application-ee5ef1aa4ac89125", - "downloadLocation": "NONE", - "filesAnalyzed": false, - "sourceInfo": "Conda", - "primaryPackagePurpose": "APPLICATION" - }, { "name": "openssl", - "SPDXID": "SPDXRef-Package-20b95c21bfbf9fc4", + "SPDXID": "SPDXRef-Package-b8061a5279413d55", "versionInfo": "1.1.1q", "supplier": "NOASSERTION", "downloadLocation": "NONE", @@ -39,11 +31,14 @@ "referenceLocator": "pkg:conda/openssl@1.1.1q" } ], + "attributionTexts": [ + "PkgType: conda-pkg" + ], "primaryPackagePurpose": "LIBRARY" }, { "name": "pip", - "SPDXID": "SPDXRef-Package-11a429ec3bd01d80", + "SPDXID": "SPDXRef-Package-84198b3828050c11", "versionInfo": "22.2.2", "supplier": "NOASSERTION", "downloadLocation": "NONE", @@ -60,6 +55,9 @@ "referenceLocator": "pkg:conda/pip@22.2.2" } ], + "attributionTexts": [ + "PkgType: conda-pkg" + ], "primaryPackagePurpose": "LIBRARY" }, { @@ -105,27 +103,22 @@ }, { "spdxElementId": "SPDXRef-Filesystem-2e2426fd0f2580ef", - "relatedSpdxElement": "SPDXRef-Application-ee5ef1aa4ac89125", + "relatedSpdxElement": "SPDXRef-Package-84198b3828050c11", "relationshipType": "CONTAINS" }, { - "spdxElementId": "SPDXRef-Application-ee5ef1aa4ac89125", - "relatedSpdxElement": "SPDXRef-Package-20b95c21bfbf9fc4", - "relationshipType": "CONTAINS" - }, - { - "spdxElementId": "SPDXRef-Package-20b95c21bfbf9fc4", - "relatedSpdxElement": "SPDXRef-File-600e5e0110a84891", + "spdxElementId": "SPDXRef-Filesystem-2e2426fd0f2580ef", + "relatedSpdxElement": "SPDXRef-Package-b8061a5279413d55", "relationshipType": "CONTAINS" }, { - "spdxElementId": "SPDXRef-Application-ee5ef1aa4ac89125", - "relatedSpdxElement": "SPDXRef-Package-11a429ec3bd01d80", + "spdxElementId": "SPDXRef-Package-84198b3828050c11", + "relatedSpdxElement": "SPDXRef-File-7eb62e2a3edddc0a", "relationshipType": "CONTAINS" }, { - "spdxElementId": "SPDXRef-Package-11a429ec3bd01d80", - "relatedSpdxElement": "SPDXRef-File-7eb62e2a3edddc0a", + "spdxElementId": "SPDXRef-Package-b8061a5279413d55", + "relatedSpdxElement": "SPDXRef-File-600e5e0110a84891", "relationshipType": "CONTAINS" } ] diff --git a/integration/testdata/fluentd-multiple-lockfiles.cdx.json.golden b/integration/testdata/fluentd-multiple-lockfiles.cdx.json.golden index 40fdceb532c5..934bda200639 100644 --- a/integration/testdata/fluentd-multiple-lockfiles.cdx.json.golden +++ b/integration/testdata/fluentd-multiple-lockfiles.cdx.json.golden @@ -286,7 +286,7 @@ "bom-ref": "pkg:deb/debian/bsdutils@2.33.1-0.1?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "bsdutils", - "version": "2.33.1-0.1", + "version": "1:2.33.1-0.1", "licenses": [ { "license": { @@ -628,7 +628,7 @@ "bom-ref": "pkg:deb/debian/diffutils@3.7-3?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "diffutils", - "version": "3.7-3", + "version": "1:3.7-3", "licenses": [ { "license": { @@ -1338,7 +1338,7 @@ "bom-ref": "pkg:deb/debian/libattr1@2.4.48-4?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "libattr1", - "version": "2.4.48-4", + "version": "1:2.4.48-4", "licenses": [ { "license": { @@ -1396,7 +1396,7 @@ "bom-ref": "pkg:deb/debian/libaudit-common@2.8.4-3?arch=all&distro=debian-10.2&epoch=1", "type": "library", "name": "libaudit-common", - "version": "2.8.4-3", + "version": "1:2.8.4-3", "licenses": [ { "license": { @@ -1454,7 +1454,7 @@ "bom-ref": "pkg:deb/debian/libaudit1@2.8.4-3?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "libaudit1", - "version": "2.8.4-3", + "version": "1:2.8.4-3", "licenses": [ { "license": { @@ -2091,7 +2091,7 @@ "bom-ref": "pkg:deb/debian/libgcc1@8.3.0-6?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "libgcc1", - "version": "8.3.0-6", + "version": "1:8.3.0-6", "purl": "pkg:deb/debian/libgcc1@8.3.0-6?arch=amd64&distro=debian-10.2&epoch=1", "properties": [ { @@ -2285,7 +2285,7 @@ "bom-ref": "pkg:deb/debian/libgmp10@6.1.2%2Bdfsg-4?arch=amd64&distro=debian-10.2&epoch=2", "type": "library", "name": "libgmp10", - "version": "6.1.2+dfsg-4", + "version": "2:6.1.2+dfsg-4", "licenses": [ { "license": { @@ -3286,7 +3286,7 @@ "bom-ref": "pkg:deb/debian/libpcre3@8.39-12?arch=amd64&distro=debian-10.2&epoch=2", "type": "library", "name": "libpcre3", - "version": "8.39-12", + "version": "2:8.39-12", "purl": "pkg:deb/debian/libpcre3@8.39-12?arch=amd64&distro=debian-10.2&epoch=2", "properties": [ { @@ -4450,7 +4450,7 @@ "bom-ref": "pkg:deb/debian/login@4.5-1.1?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "login", - "version": "4.5-1.1", + "version": "1:4.5-1.1", "licenses": [ { "license": { @@ -4742,7 +4742,7 @@ "bom-ref": "pkg:deb/debian/passwd@4.5-1.1?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "passwd", - "version": "4.5-1.1", + "version": "1:4.5-1.1", "licenses": [ { "license": { @@ -5338,7 +5338,7 @@ "bom-ref": "pkg:deb/debian/ruby@2.5.1?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "ruby", - "version": "2.5.1", + "version": "1:2.5.1", "licenses": [ { "license": { @@ -5690,7 +5690,7 @@ "bom-ref": "pkg:deb/debian/zlib1g@1.2.11.dfsg-1?arch=amd64&distro=debian-10.2&epoch=1", "type": "library", "name": "zlib1g", - "version": "1.2.11.dfsg-1", + "version": "1:1.2.11.dfsg-1", "licenses": [ { "license": { diff --git a/pkg/fanal/analyzer/sbom/sbom_test.go b/pkg/fanal/analyzer/sbom/sbom_test.go index c6f5b4b33701..3bcb619d402b 100644 --- a/pkg/fanal/analyzer/sbom/sbom_test.go +++ b/pkg/fanal/analyzer/sbom/sbom_test.go @@ -31,6 +31,7 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { Type: types.Jar, Libraries: types.Packages{ { + ID: "co.elastic.apm:apm-agent:1.36.0", Name: "co.elastic.apm:apm-agent", Version: "1.36.0", FilePath: "opt/bitnami/elasticsearch", @@ -44,6 +45,7 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { + ID: "co.elastic.apm:apm-agent-cached-lookup-key:1.36.0", Name: "co.elastic.apm:apm-agent-cached-lookup-key", Version: "1.36.0", FilePath: "opt/bitnami/elasticsearch", @@ -57,6 +59,7 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { + ID: "co.elastic.apm:apm-agent-common:1.36.0", Name: "co.elastic.apm:apm-agent-common", Version: "1.36.0", FilePath: "opt/bitnami/elasticsearch", @@ -70,6 +73,7 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { + ID: "co.elastic.apm:apm-agent-core:1.36.0", Name: "co.elastic.apm:apm-agent-core", Version: "1.36.0", FilePath: "opt/bitnami/elasticsearch", @@ -89,7 +93,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { FilePath: "opt/bitnami/elasticsearch", Libraries: types.Packages{ { - Name: "elasticsearch", + ID: "Elasticsearch@8.9.1", + Name: "Elasticsearch", Version: "8.9.1", Arch: "arm64", Licenses: []string{"Elastic-2.0"}, @@ -169,7 +174,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { FilePath: "opt/bitnami/postgresql", Libraries: types.Packages{ { - Name: "gdal", + ID: "GDAL@3.7.1", + Name: "GDAL", Version: "3.7.1", Licenses: []string{"MIT"}, Identifier: types.PkgIdentifier{ @@ -181,7 +187,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { - Name: "geos", + ID: "GEOS@3.8.3", + Name: "GEOS", Version: "3.8.3", Licenses: []string{"LGPL-2.1-only"}, Identifier: types.PkgIdentifier{ @@ -193,7 +200,8 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { }, }, { - Name: "postgresql", + ID: "PostgreSQL@15.3.0", + Name: "PostgreSQL", Version: "15.3.0", Licenses: []string{"PostgreSQL"}, Identifier: types.PkgIdentifier{ @@ -203,9 +211,15 @@ func Test_sbomAnalyzer_Analyze(t *testing.T) { Version: "15.3.0", }, }, + DependsOn: []string{ + "GEOS@3.8.3", + "Proj@6.3.2", + "GDAL@3.7.1", + }, }, { - Name: "proj", + ID: "Proj@6.3.2", + Name: "Proj", Version: "6.3.2", Licenses: []string{"MIT"}, Identifier: types.PkgIdentifier{ diff --git a/pkg/fanal/applier/docker.go b/pkg/fanal/applier/docker.go index 730737e8a370..abcc1ce51958 100644 --- a/pkg/fanal/applier/docker.go +++ b/pkg/fanal/applier/docker.go @@ -263,12 +263,9 @@ func newPURL(pkgType ftypes.TargetType, metadata types.Metadata, pkg ftypes.Pack func aggregate(detail *ftypes.ArtifactDetail) { var apps []ftypes.Application - aggregatedApps := map[ftypes.LangType]*ftypes.Application{ - ftypes.PythonPkg: {Type: ftypes.PythonPkg}, - ftypes.CondaPkg: {Type: ftypes.CondaPkg}, - ftypes.GemSpec: {Type: ftypes.GemSpec}, - ftypes.NodePkg: {Type: ftypes.NodePkg}, - ftypes.Jar: {Type: ftypes.Jar}, + aggregatedApps := make(map[ftypes.LangType]*ftypes.Application) + for _, t := range ftypes.AggregatingTypes { + aggregatedApps[t] = &ftypes.Application{Type: t} } for _, app := range detail.Applications { diff --git a/pkg/fanal/types/const.go b/pkg/fanal/types/const.go index 115850f43978..b46b36a8d425 100644 --- a/pkg/fanal/types/const.go +++ b/pkg/fanal/types/const.go @@ -81,6 +81,14 @@ const ( OCP LangType = "ocp" // Red Hat OpenShift Container Platform ) +var AggregatingTypes = []LangType{ + PythonPkg, + CondaPkg, + GemSpec, + NodePkg, + Jar, +} + // Config files const ( JSON ConfigType = "json" diff --git a/pkg/k8s/scanner/scanner.go b/pkg/k8s/scanner/scanner.go index 16ac301c9fa4..55fe4c1e9386 100644 --- a/pkg/k8s/scanner/scanner.go +++ b/pkg/k8s/scanner/scanner.go @@ -375,7 +375,9 @@ func (s *Scanner) clusterInfoToReportResources(allArtifact []*artifacts.Artifact return nil, fmt.Errorf("failed to find node name") } - kbom := core.NewBOM() + kbom := core.NewBOM(core.Options{ + GenerateBOMRef: true, + }) for _, artifact := range allArtifact { switch artifact.Kind { case controlPlaneComponents: @@ -413,7 +415,7 @@ func (s *Scanner) clusterInfoToReportResources(allArtifact []*artifacts.Artifact } imageComponent := &core.Component{ - Type: core.TypeContainer, + Type: core.TypeContainerImage, Name: name, Version: cDigest, PkgID: core.PkgID{ diff --git a/pkg/k8s/scanner/scanner_test.go b/pkg/k8s/scanner/scanner_test.go index 8c9850c12b76..9269f78cf11b 100644 --- a/pkg/k8s/scanner/scanner_test.go +++ b/pkg/k8s/scanner/scanner_test.go @@ -155,7 +155,7 @@ func TestScanner_Scan(t *testing.T) { }, }, { - Type: core.TypeContainer, + Type: core.TypeContainerImage, Name: "k8s.gcr.io/kube-apiserver", Version: "sha256:18e61c783b41758dd391ab901366ec3546b26fae00eef7e223d1f94da808e02f", PkgID: core.PkgID{ diff --git a/pkg/report/spdx/spdx.go b/pkg/report/spdx/spdx.go index 7984db0e1517..a8550ddca0f3 100644 --- a/pkg/report/spdx/spdx.go +++ b/pkg/report/spdx/spdx.go @@ -30,7 +30,7 @@ func NewWriter(output io.Writer, version string, spdxFormat types.Format) Writer } func (w Writer) Write(ctx context.Context, report types.Report) error { - spdxDoc, err := w.marshaler.Marshal(ctx, report) + spdxDoc, err := w.marshaler.MarshalReport(ctx, report) if err != nil { return xerrors.Errorf("failed to marshal spdx: %w", err) } diff --git a/pkg/sbom/core/bom.go b/pkg/sbom/core/bom.go index 5f55f673306b..54755a81e6c8 100644 --- a/pkg/sbom/core/bom.go +++ b/pkg/sbom/core/bom.go @@ -11,11 +11,14 @@ import ( ) const ( - TypeApplication ComponentType = "application" - TypeContainer ComponentType = "container" - TypeLibrary ComponentType = "library" - TypeOS ComponentType = "os" - TypePlatform ComponentType = "platform" + TypeFilesystem ComponentType = "filesystem" + TypeRepository ComponentType = "repository" + TypeContainerImage ComponentType = "container_image" + TypeVM ComponentType = "vm" + TypeApplication ComponentType = "application" + TypeLibrary ComponentType = "library" + TypeOS ComponentType = "os" + TypePlatform ComponentType = "platform" // Metadata properties PropertySchemaVersion = "SchemaVersion" @@ -59,7 +62,7 @@ type BOM struct { components map[uuid.UUID]*Component relationships map[uuid.UUID][]Relationship - // Vulnerabilities is a list of vulnerabilities that affect the component + // Vulnerabilities is a list of vulnerabilities that affect the component. // CycloneDX: vulnerabilities // SPDX: N/A vulnerabilities map[uuid.UUID][]Vulnerability @@ -67,6 +70,9 @@ type BOM struct { // purls is a map of package URLs to UUIDs // This is used to ensure that each package URL is only represented once in the BOM. purls map[string][]uuid.UUID + + // opts is a set of options for the BOM. + opts Options } type Component struct { @@ -98,6 +104,21 @@ type Component struct { // SPDX: package.versionInfo Version string + // SrcName is the name of the source component + // CycloneDX: N/A + // SPDX: package.sourceInfo + SrcName string + + // SrcVersion is the version of the source component + // CycloneDX: N/A + // SPDX: package.sourceInfo + SrcVersion string + + // SrcFile is the file path where the component is found. + // CycloneDX: N/A + // SPDX: package.sourceInfo + SrcFile string + // Licenses is a list of licenses that apply to the component // CycloneDX: component.licenses // SPDX: package.licenseConcluded, package.licenseDeclared @@ -139,9 +160,10 @@ type File struct { Path string // Hash is a hash that uniquely identify the component. + // A file can have several digests with different algorithms, like SHA1, SHA256, etc. // CycloneDX: component.hashes - // SPDX: package.files[].checksum - Hash digest.Digest + // SPDX: package.files[].checksums + Digests []digest.Digest } type Property struct { @@ -182,12 +204,17 @@ type Vulnerability struct { DataSource *dtypes.DataSource } -func NewBOM() *BOM { +type Options struct { + GenerateBOMRef bool +} + +func NewBOM(opts Options) *BOM { return &BOM{ components: make(map[uuid.UUID]*Component), relationships: make(map[uuid.UUID][]Relationship), vulnerabilities: make(map[uuid.UUID][]Vulnerability), purls: make(map[string][]uuid.UUID), + opts: opts, } } @@ -245,14 +272,18 @@ func (b *BOM) Root() *Component { if !ok { return nil } - root.PkgID.BOMRef = b.bomRef(root) + if b.opts.GenerateBOMRef { + root.PkgID.BOMRef = b.bomRef(root) + } return root } func (b *BOM) Components() map[uuid.UUID]*Component { // Fill in BOMRefs for components - for id, c := range b.components { - b.components[id].PkgID.BOMRef = b.bomRef(c) + if b.opts.GenerateBOMRef { + for id, c := range b.components { + b.components[id].PkgID.BOMRef = b.bomRef(c) + } } return b.components } diff --git a/pkg/sbom/cyclonedx/marshal.go b/pkg/sbom/cyclonedx/marshal.go index be9fc23372b7..684b1b7d235d 100644 --- a/pkg/sbom/cyclonedx/marshal.go +++ b/pkg/sbom/cyclonedx/marshal.go @@ -48,7 +48,8 @@ func NewMarshaler(version string) Marshaler { // MarshalReport converts the Trivy report to the CycloneDX format func (m *Marshaler) MarshalReport(ctx context.Context, report types.Report) (*cdx.BOM, error) { // Convert into an intermediate representation - bom, err := sbomio.NewEncoder().Encode(report) + opts := core.Options{GenerateBOMRef: true} + bom, err := sbomio.NewEncoder(opts).Encode(report) if err != nil { return nil, xerrors.Errorf("failed to marshal report: %w", err) } @@ -218,9 +219,9 @@ func (m *Marshaler) marshalVulnerabilities() *[]cdx.Vulnerability { // componentType converts the Trivy component type to the CycloneDX component type func (*Marshaler) componentType(t core.ComponentType) (cdx.ComponentType, error) { switch t { - case core.TypeContainer: + case core.TypeContainerImage, core.TypeVM: return cdx.ComponentTypeContainer, nil - case core.TypeApplication: + case core.TypeApplication, core.TypeFilesystem, core.TypeRepository: return cdx.ComponentTypeApplication, nil case core.TypeLibrary: return cdx.ComponentTypeLibrary, nil @@ -249,17 +250,17 @@ func (*Marshaler) Supplier(supplier string) *cdx.OrganizationalEntity { } func (*Marshaler) Hashes(files []core.File) *[]cdx.Hash { - hashes := lo.FilterMap(files, func(f core.File, index int) (digest.Digest, bool) { - return f.Hash, f.Hash != "" + digests := lo.FlatMap(files, func(file core.File, _ int) []digest.Digest { + return file.Digests }) - if len(hashes) == 0 { + if len(digests) == 0 { return nil } var cdxHashes []cdx.Hash - for _, h := range hashes { + for _, d := range digests { var alg cdx.HashAlgorithm - switch h.Algorithm() { + switch d.Algorithm() { case digest.SHA1: alg = cdx.HashAlgoSHA1 case digest.SHA256: @@ -267,13 +268,13 @@ func (*Marshaler) Hashes(files []core.File) *[]cdx.Hash { case digest.MD5: alg = cdx.HashAlgoMD5 default: - log.Logger.Debugf("Unable to convert %q algorithm to CycloneDX format", h.Algorithm()) + log.Logger.Debugf("Unable to convert %q algorithm to CycloneDX format", d.Algorithm()) continue } cdxHashes = append(cdxHashes, cdx.Hash{ Algorithm: alg, - Value: h.Encoded(), + Value: d.Encoded(), }) } return &cdxHashes diff --git a/pkg/sbom/cyclonedx/marshal_test.go b/pkg/sbom/cyclonedx/marshal_test.go index 7999ea2eae70..de723236a66a 100644 --- a/pkg/sbom/cyclonedx/marshal_test.go +++ b/pkg/sbom/cyclonedx/marshal_test.go @@ -24,7 +24,7 @@ import ( ) func TestMarshaler_MarshalReport(t *testing.T) { - testSBOM := core.NewBOM() + testSBOM := core.NewBOM(core.Options{GenerateBOMRef: true}) testSBOM.AddComponent(&core.Component{ Root: true, Type: core.TypeApplication, @@ -1022,7 +1022,7 @@ func TestMarshaler_MarshalReport(t *testing.T) { BOMRef: "pkg:rpm/centos/acl@2.2.53-1.el8?arch=aarch64&distro=centos-8.3.2011&epoch=1", Type: cdx.ComponentTypeLibrary, Name: "acl", - Version: "2.2.53-1.el8", + Version: "1:2.2.53-1.el8", Licenses: &cdx.Licenses{ cdx.LicenseChoice{ License: &cdx.License{ diff --git a/pkg/sbom/cyclonedx/unmarshal.go b/pkg/sbom/cyclonedx/unmarshal.go index b234b85fd637..8821fe8b111a 100644 --- a/pkg/sbom/cyclonedx/unmarshal.go +++ b/pkg/sbom/cyclonedx/unmarshal.go @@ -37,7 +37,7 @@ func DecodeJSON(r io.Reader) (*cdx.BOM, error) { func (b *BOM) UnmarshalJSON(data []byte) error { log.Logger.Debug("Unmarshalling CycloneDX JSON...") if b.BOM == nil { - b.BOM = core.NewBOM() + b.BOM = core.NewBOM(core.Options{GenerateBOMRef: true}) } cdxBOM, err := DecodeJSON(bytes.NewReader(data)) @@ -143,9 +143,11 @@ func (b *BOM) parseComponent(c cdx.Component) (*core.Component, error) { Group: c.Group, Version: c.Version, Licenses: b.unmarshalLicenses(c.Licenses), - Files: lo.Map(b.unmarshalHashes(c.Hashes), func(d digest.Digest, _ int) core.File { - return core.File{Hash: d} // CycloneDX doesn't have a file path for the hash - }), + Files: []core.File{ + { + Digests: b.unmarshalHashes(c.Hashes), + }, + }, PkgID: core.PkgID{ PURL: &purl, BOMRef: c.BOMRef, @@ -161,7 +163,7 @@ func (b *BOM) unmarshalType(t cdx.ComponentType) (core.ComponentType, error) { var ctype core.ComponentType switch t { case cdx.ComponentTypeContainer: - ctype = core.TypeContainer + ctype = core.TypeContainerImage case cdx.ComponentTypeApplication: ctype = core.TypeApplication case cdx.ComponentTypeLibrary: diff --git a/pkg/sbom/io/decode.go b/pkg/sbom/io/decode.go index f0385ddedc26..af61f41b5a8c 100644 --- a/pkg/sbom/io/decode.go +++ b/pkg/sbom/io/decode.go @@ -2,14 +2,18 @@ package io import ( "errors" + "slices" "sort" "strconv" + debver "github.com/knqyf263/go-deb-version" + rpmver "github.com/knqyf263/go-rpm-version" "github.com/package-url/packageurl-go" "go.uber.org/zap" "golang.org/x/exp/maps" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/dependency" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/purl" @@ -125,7 +129,7 @@ func (m *Decoder) decodeComponents(sbom *types.SBOM) error { // Third-party SBOMs may contain packages in types other than "Library" if c.Type == core.TypeLibrary || c.PkgID.PURL != nil { pkg, err := m.decodeLibrary(c) - if errors.Is(err, ErrUnsupportedType) { + if errors.Is(err, ErrUnsupportedType) || errors.Is(err, ErrPURLEmpty) { continue } else if err != nil { return xerrors.Errorf("failed to decode library: %w", err) @@ -156,15 +160,19 @@ func (m *Decoder) buildDependencyGraph() { } func (m *Decoder) decodeApplication(c *core.Component) *ftypes.Application { - app := &ftypes.Application{ - FilePath: c.Name, - } + var app ftypes.Application for _, prop := range c.Properties { if prop.Name == core.PropertyType { app.Type = ftypes.LangType(prop.Value) } } - return app + + // Aggregation Types use the name of the language (e.g. `Java`, `Python`, etc.) as the component name. + // Other language files use the file path as their name. + if !slices.Contains(ftypes.AggregatingTypes, app.Type) { + app.FilePath = c.Name + } + return &app } func (m *Decoder) decodeLibrary(c *core.Component) (*ftypes.Package, error) { @@ -182,6 +190,7 @@ func (m *Decoder) decodeLibrary(c *core.Component) (*ftypes.Package, error) { return nil, ErrUnsupportedType } pkg.Name = m.pkgName(pkg, c) + pkg.ID = dependency.ID(p.LangType(), pkg.Name, p.Version) // Re-generate ID with the updated name var err error for _, prop := range c.Properties { @@ -211,12 +220,19 @@ func (m *Decoder) decodeLibrary(c *core.Component) (*ftypes.Package, error) { pkg.Identifier.BOMRef = c.PkgID.BOMRef pkg.Licenses = c.Licenses - if len(c.Files) > 0 { - pkg.Digest = c.Files[0].Hash + + for _, f := range c.Files { + if f.Path != "" && pkg.FilePath == "" { + pkg.FilePath = f.Path + } + // An empty path represents a package digest + if f.Path == "" && len(f.Digests) > 0 { + pkg.Digest = f.Digests[0] + } } if p.Class() == types.ClassOSPkg { - m.fillSrcPkg(pkg) + m.fillSrcPkg(c, pkg) } return pkg, nil @@ -241,7 +257,12 @@ func (m *Decoder) pkgName(pkg *ftypes.Package, c *core.Component) string { return c.Name } -func (m *Decoder) fillSrcPkg(pkg *ftypes.Package) { +func (m *Decoder) fillSrcPkg(c *core.Component, pkg *ftypes.Package) { + if c.SrcName != "" && pkg.SrcName == "" { + pkg.SrcName = c.SrcName + } + m.parseSrcVersion(pkg, c.SrcVersion) + // Fill source package information for components in third-party SBOMs . if pkg.SrcName == "" { pkg.SrcName = pkg.Name @@ -257,6 +278,29 @@ func (m *Decoder) fillSrcPkg(pkg *ftypes.Package) { } } +// parseSrcVersion parses the version of the source package. +func (m *Decoder) parseSrcVersion(pkg *ftypes.Package, ver string) { + if ver == "" { + return + } + switch pkg.Identifier.PURL.Type { + case packageurl.TypeRPM: + v := rpmver.NewVersion(ver) + pkg.SrcEpoch = v.Epoch() + pkg.SrcVersion = v.Version() + pkg.SrcRelease = v.Release() + case packageurl.TypeDebian: + v, err := debver.NewVersion(ver) + if err != nil { + log.Logger.Debugw("Failed to parse Debian version", zap.Error(err)) + return + } + pkg.SrcEpoch = v.Epoch() + pkg.SrcVersion = v.Version() + pkg.SrcRelease = v.Revision() + } +} + // addOSPkgs traverses relationships and adds OS packages func (m *Decoder) addOSPkgs(sbom *types.SBOM) { var pkgs []ftypes.Package diff --git a/pkg/sbom/io/encode.go b/pkg/sbom/io/encode.go index 73c0d4fef3dc..5bb181992975 100644 --- a/pkg/sbom/io/encode.go +++ b/pkg/sbom/io/encode.go @@ -2,48 +2,52 @@ package io import ( "fmt" + "slices" "strconv" "github.com/package-url/packageurl-go" "github.com/samber/lo" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/digest" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/purl" "github.com/aquasecurity/trivy/pkg/sbom/core" + "github.com/aquasecurity/trivy/pkg/scanner/utils" "github.com/aquasecurity/trivy/pkg/types" ) type Encoder struct { - bom *core.BOM + bom *core.BOM + opts core.Options } -func NewEncoder() *Encoder { - return &Encoder{} +func NewEncoder(opts core.Options) *Encoder { + return &Encoder{opts: opts} } -func (m *Encoder) Encode(report types.Report) (*core.BOM, error) { +func (e *Encoder) Encode(report types.Report) (*core.BOM, error) { // Metadata component - root, err := m.rootComponent(report) + root, err := e.rootComponent(report) if err != nil { return nil, xerrors.Errorf("failed to create root component: %w", err) } - m.bom = core.NewBOM() - m.bom.AddComponent(root) + e.bom = core.NewBOM(e.opts) + e.bom.AddComponent(root) for _, result := range report.Results { - m.encodeResult(root, report.Metadata, result) + e.encodeResult(root, report.Metadata, result) } // Components that do not have their own dependencies MUST be declared as empty elements within the graph. - if _, ok := m.bom.Relationships()[root.ID()]; !ok { - m.bom.AddRelationship(root, nil, "") + if _, ok := e.bom.Relationships()[root.ID()]; !ok { + e.bom.AddRelationship(root, nil, "") } - return m.bom, nil + return e.bom, nil } -func (m *Encoder) rootComponent(r types.Report) (*core.Component, error) { +func (e *Encoder) rootComponent(r types.Report) (*core.Component, error) { root := &core.Component{ Root: true, Name: r.ArtifactName, @@ -58,7 +62,7 @@ func (m *Encoder) rootComponent(r types.Report) (*core.Component, error) { switch r.ArtifactType { case ftypes.ArtifactContainerImage: - root.Type = core.TypeContainer + root.Type = core.TypeContainerImage props = append(props, core.Property{ Name: core.PropertyImageID, Value: r.Metadata.ImageID, @@ -73,9 +77,11 @@ func (m *Encoder) rootComponent(r types.Report) (*core.Component, error) { } case ftypes.ArtifactVM: - root.Type = core.TypeContainer - case ftypes.ArtifactFilesystem, ftypes.ArtifactRepository: - root.Type = core.TypeApplication + root.Type = core.TypeVM + case ftypes.ArtifactFilesystem: + root.Type = core.TypeFilesystem + case ftypes.ArtifactRepository: + root.Type = core.TypeRepository case ftypes.ArtifactCycloneDX: return r.BOM.Root(), nil } @@ -113,9 +119,8 @@ func (m *Encoder) rootComponent(r types.Report) (*core.Component, error) { return root, nil } -func (m *Encoder) encodeResult(root *core.Component, metadata types.Metadata, result types.Result) { - if result.Type == ftypes.NodePkg || result.Type == ftypes.PythonPkg || - result.Type == ftypes.GemSpec || result.Type == ftypes.Jar || result.Type == ftypes.CondaPkg { +func (e *Encoder) encodeResult(root *core.Component, metadata types.Metadata, result types.Result) { + if slices.Contains(ftypes.AggregatingTypes, result.Type) { // If a package is language-specific package that isn't associated with a lock file, // it will be a dependency of a component under "metadata". // e.g. @@ -126,7 +131,7 @@ func (m *Encoder) encodeResult(root *core.Component, metadata types.Metadata, re // ref. https://cyclonedx.org/use-cases/#inventory // Dependency graph from #1 to #2 - m.encodePackages(root, result) + e.encodePackages(root, result) } else if result.Class == types.ClassOSPkg || result.Class == types.ClassLangPkg { // If a package is OS package, it will be a dependency of "Operating System" component. // e.g. @@ -146,21 +151,21 @@ func (m *Encoder) encodeResult(root *core.Component, metadata types.Metadata, re // -> etc. // #2 - appComponent := m.resultComponent(root, result, metadata.OS) + appComponent := e.resultComponent(root, result, metadata.OS) // #3 - m.encodePackages(appComponent, result) + e.encodePackages(appComponent, result) } } -func (m *Encoder) encodePackages(parent *core.Component, result types.Result) { +func (e *Encoder) encodePackages(parent *core.Component, result types.Result) { // Get dependency parents first parents := ftypes.Packages(result.Packages).ParentDeps() // Group vulnerabilities by package ID vulns := make(map[string][]core.Vulnerability) for _, vuln := range result.Vulnerabilities { - v := m.vulnerability(vuln) + v := e.vulnerability(vuln) vulns[v.PkgID] = append(vulns[v.PkgID], v) } @@ -171,15 +176,15 @@ func (m *Encoder) encodePackages(parent *core.Component, result types.Result) { result.Packages[i].ID = pkgID // Convert packages to components - c := m.component(result.Type, pkg) - components[pkgID] = c + c := e.component(result, pkg) + components[pkgID+pkg.FilePath] = c // Add a component - m.bom.AddComponent(c) + e.bom.AddComponent(c) // Add vulnerabilities if vv := vulns[pkgID]; vv != nil { - m.bom.AddVulnerabilities(c, vv) + e.bom.AddVulnerabilities(c, vv) } } @@ -190,26 +195,26 @@ func (m *Encoder) encodePackages(parent *core.Component, result types.Result) { continue } - directPkg := components[pkg.ID] - m.bom.AddRelationship(parent, directPkg, core.RelationshipContains) + directPkg := components[pkg.ID+pkg.FilePath] + e.bom.AddRelationship(parent, directPkg, core.RelationshipContains) for _, dep := range pkg.DependsOn { indirectPkg, ok := components[dep] if !ok { continue } - m.bom.AddRelationship(directPkg, indirectPkg, core.RelationshipDependsOn) + e.bom.AddRelationship(directPkg, indirectPkg, core.RelationshipDependsOn) } // Components that do not have their own dependencies MUST be declared as empty elements within the graph. // TODO: Should check if the component has actually no dependencies or the dependency graph is not supported. if len(pkg.DependsOn) == 0 { - m.bom.AddRelationship(directPkg, nil, "") + e.bom.AddRelationship(directPkg, nil, "") } } } -func (m *Encoder) resultComponent(root *core.Component, r types.Result, osFound *ftypes.OS) *core.Component { +func (e *Encoder) resultComponent(root *core.Component, r types.Result, osFound *ftypes.OS) *core.Component { component := &core.Component{ Name: r.Target, Properties: []core.Property{ @@ -235,18 +240,24 @@ func (m *Encoder) resultComponent(root *core.Component, r types.Result, osFound component.Type = core.TypeApplication } - m.bom.AddRelationship(root, component, core.RelationshipContains) + e.bom.AddRelationship(root, component, core.RelationshipContains) return component } -func (*Encoder) component(pkgType ftypes.TargetType, pkg ftypes.Package) *core.Component { +func (*Encoder) component(result types.Result, pkg ftypes.Package) *core.Component { name := pkg.Name - version := pkg.Version + version := utils.FormatVersion(pkg) var group string // there are cases when we can't build purl // e.g. local Go packages if pu := pkg.Identifier.PURL; pu != nil { version = pu.Version + for _, q := range pu.Qualifiers { + if q.Key == "epoch" && q.Value != "0" { + version = fmt.Sprintf("%s:%s", q.Value, version) + } + } + // Use `group` field for GroupID and `name` for ArtifactID for java files // https://github.com/aquasecurity/trivy/issues/4675 // Use `group` field for npm scopes @@ -264,7 +275,7 @@ func (*Encoder) component(pkgType ftypes.TargetType, pkg ftypes.Package) *core.C }, { Name: core.PropertyPkgType, - Value: string(pkgType), + Value: string(result.Type), }, { Name: core.PropertyFilePath, @@ -303,16 +314,25 @@ func (*Encoder) component(pkgType ftypes.TargetType, pkg ftypes.Package) *core.C var files []core.File if pkg.FilePath != "" || pkg.Digest != "" { files = append(files, core.File{ - Path: pkg.FilePath, - Hash: pkg.Digest, + Path: pkg.FilePath, + Digests: lo.Ternary(pkg.Digest != "", []digest.Digest{pkg.Digest}, nil), }) } + // TODO(refactor): simplify the list of conditions + var srcFile string + if result.Class == types.ClassLangPkg && !slices.Contains(ftypes.AggregatingTypes, result.Type) { + srcFile = result.Target + } + return &core.Component{ - Type: core.TypeLibrary, - Name: name, - Group: group, - Version: version, + Type: core.TypeLibrary, + Name: name, + Group: group, + Version: version, + SrcName: pkg.SrcName, + SrcVersion: utils.FormatSrcVersion(pkg), + SrcFile: srcFile, PkgID: core.PkgID{ PURL: pkg.Identifier.PURL, }, diff --git a/pkg/sbom/io/encode_test.go b/pkg/sbom/io/encode_test.go index 5c2af5b54d9f..a57bddd9983d 100644 --- a/pkg/sbom/io/encode_test.go +++ b/pkg/sbom/io/encode_test.go @@ -113,7 +113,7 @@ func TestEncoder_Encode(t *testing.T) { }, wantComponents: map[uuid.UUID]*core.Component{ uuid.MustParse("3ff14136-e09f-4df9-80ea-000000000001"): { - Type: core.TypeContainer, + Type: core.TypeContainerImage, Name: "debian:12", Root: true, PkgID: core.PkgID{ @@ -320,7 +320,8 @@ func TestEncoder_Encode(t *testing.T) { t.Run(tt.name, func(t *testing.T) { uuid.SetFakeUUID(t, "3ff14136-e09f-4df9-80ea-%012d") - got, err := sbomio.NewEncoder().Encode(tt.report) + opts := core.Options{GenerateBOMRef: true} + got, err := sbomio.NewEncoder(opts).Encode(tt.report) if tt.wantErr != "" { require.ErrorContains(t, err, tt.wantErr) return diff --git a/pkg/sbom/sbom.go b/pkg/sbom/sbom.go index 2d8d74b267a0..5b1055ed7174 100644 --- a/pkg/sbom/sbom.go +++ b/pkg/sbom/sbom.go @@ -183,8 +183,7 @@ func decodeAttestCycloneDXJSONFormat(r io.ReadSeeker) (Format, bool) { func Decode(f io.Reader, format Format) (types.SBOM, error) { var ( v interface{} - bom = core.NewBOM() - sbom types.SBOM + bom = core.NewBOM(core.Options{}) decoder interface{ Decode(any) error } ) @@ -212,10 +211,10 @@ func Decode(f io.Reader, format Format) (types.SBOM, error) { } decoder = json.NewDecoder(f) case FormatSPDXJSON: - v = &spdx.SPDX{SBOM: &sbom} + v = &spdx.SPDX{BOM: bom} decoder = json.NewDecoder(f) case FormatSPDXTV: - v = &spdx.SPDX{SBOM: &sbom} + v = &spdx.SPDX{BOM: bom} decoder = spdx.NewTVDecoder(f) default: return types.SBOM{}, xerrors.Errorf("%s scanning is not yet supported", format) @@ -227,11 +226,7 @@ func Decode(f io.Reader, format Format) (types.SBOM, error) { return types.SBOM{}, xerrors.Errorf("failed to decode: %w", err) } - // TODO: use BOM in SPDX - if format == FormatSPDXJSON || format == FormatSPDXTV { - return sbom, nil - } - + var sbom types.SBOM if err := sbomio.NewDecoder(bom).Decode(&sbom); err != nil { return types.SBOM{}, xerrors.Errorf("failed to decode: %w", err) } diff --git a/pkg/sbom/spdx/marshal.go b/pkg/sbom/spdx/marshal.go index ceb9a1ae24ce..6c1490fe1aec 100644 --- a/pkg/sbom/spdx/marshal.go +++ b/pkg/sbom/spdx/marshal.go @@ -4,26 +4,25 @@ import ( "context" "fmt" "sort" - "strconv" "strings" "time" "github.com/mitchellh/hashstructure/v2" + "github.com/package-url/packageurl-go" "github.com/samber/lo" "github.com/spdx/tools-golang/spdx" "github.com/spdx/tools-golang/spdx/v2/common" spdxutils "github.com/spdx/tools-golang/utils" - "golang.org/x/exp/maps" + "golang.org/x/exp/slices" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/clock" "github.com/aquasecurity/trivy/pkg/digest" - ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/licensing" "github.com/aquasecurity/trivy/pkg/licensing/expression" "github.com/aquasecurity/trivy/pkg/log" - "github.com/aquasecurity/trivy/pkg/purl" - "github.com/aquasecurity/trivy/pkg/scanner/utils" + "github.com/aquasecurity/trivy/pkg/sbom/core" + sbomio "github.com/aquasecurity/trivy/pkg/sbom/io" "github.com/aquasecurity/trivy/pkg/types" "github.com/aquasecurity/trivy/pkg/uuid" ) @@ -40,19 +39,6 @@ const ( CategoryPackageManager = "PACKAGE-MANAGER" RefTypePurl = "purl" - PropertySchemaVersion = "SchemaVersion" - - // Image properties - PropertySize = "Size" - PropertyImageID = "ImageID" - PropertyRepoDigest = "RepoDigest" - PropertyDiffID = "DiffID" - PropertyRepoTag = "RepoTag" - - // Package properties - PropertyPkgID = "PkgID" - PropertyLayerDiffID = "LayerDiffID" - PropertyLayerDigest = "LayerDigest" // Package Purpose fields PackagePurposeOS = "OPERATING-SYSTEM" PackagePurposeContainer = "CONTAINER" @@ -75,8 +61,20 @@ const ( var ( SourcePackagePrefix = "built package from" + SourceFilePrefix = "package found in" ) +// duplicateProperties contains a list of properties contained in other fields. +var duplicateProperties = []string{ + // `SourceInfo` contains SrcName and SrcVersion (it contains PropertySrcRelease and PropertySrcEpoch) + core.PropertySrcName, + core.PropertySrcRelease, + core.PropertySrcEpoch, + core.PropertySrcVersion, + // `File` contains filePath. + core.PropertyFilePath, +} + type Marshaler struct { format spdx.Document hasher Hash @@ -107,75 +105,95 @@ func NewMarshaler(version string, opts ...marshalOption) *Marshaler { return m } -func (m *Marshaler) Marshal(ctx context.Context, r types.Report) (*spdx.Document, error) { - var relationShips []*spdx.Relationship - packages := make(map[spdx.ElementID]*spdx.Package) - pkgDownloadLocation := getPackageDownloadLocation(r.ArtifactType, r.ArtifactName) +func (m *Marshaler) MarshalReport(ctx context.Context, report types.Report) (*spdx.Document, error) { + // Convert into an intermediate representation + bom, err := sbomio.NewEncoder(core.Options{}).Encode(report) + if err != nil { + return nil, xerrors.Errorf("failed to marshal report: %w", err) + } + + return m.Marshal(ctx, bom) +} + +func (m *Marshaler) Marshal(ctx context.Context, bom *core.BOM) (*spdx.Document, error) { + var ( + relationShips []*spdx.Relationship + packages []*spdx.Package + ) + + root := bom.Root() + pkgDownloadLocation := m.packageDownloadLocation(root) + + // Component ID => SPDX ID + packageIDs := make(map[uuid.UUID]spdx.ElementID) // Root package contains OS, OS packages, language-specific packages and so on. - rootPkg, err := m.rootPackage(r, pkgDownloadLocation) + rootPkg, err := m.rootSPDXPackage(root, pkgDownloadLocation) if err != nil { return nil, xerrors.Errorf("failed to generate a root package: %w", err) } - packages[rootPkg.PackageSPDXIdentifier] = rootPkg + packages = append(packages, rootPkg) relationShips = append(relationShips, - relationShip(DocumentSPDXIdentifier, rootPkg.PackageSPDXIdentifier, RelationShipDescribe), + m.spdxRelationShip(DocumentSPDXIdentifier, rootPkg.PackageSPDXIdentifier, RelationShipDescribe), ) + packageIDs[root.ID()] = rootPkg.PackageSPDXIdentifier - var spdxFiles []*spdx.File - - for _, result := range r.Results { - if len(result.Packages) == 0 { + var files []*spdx.File + for _, c := range bom.Components() { + if c.Root { continue } - parentPackage, err := m.resultToSpdxPackage(result, r.Metadata.OS, pkgDownloadLocation) + spdxPackage, err := m.spdxPackage(c, pkgDownloadLocation) if err != nil { - return nil, xerrors.Errorf("failed to parse result: %w", err) + return nil, xerrors.Errorf("spdx package error: %w", err) } - packages[parentPackage.PackageSPDXIdentifier] = &parentPackage - relationShips = append(relationShips, - relationShip(rootPkg.PackageSPDXIdentifier, parentPackage.PackageSPDXIdentifier, RelationShipContains), - ) - - for _, pkg := range result.Packages { - spdxPackage, err := m.pkgToSpdxPackage(result.Type, pkgDownloadLocation, result.Class, r.Metadata, pkg) - if err != nil { - return nil, xerrors.Errorf("failed to parse package: %w", err) - } - packages[spdxPackage.PackageSPDXIdentifier] = &spdxPackage + packages = append(packages, &spdxPackage) + packageIDs[c.ID()] = spdxPackage.PackageSPDXIdentifier + + spdxFiles, err := m.spdxFiles(c) + if err != nil { + return nil, xerrors.Errorf("spdx files error: %w", err) + } else if len(spdxFiles) == 0 { + continue + } + + files = append(files, spdxFiles...) + for _, file := range spdxFiles { relationShips = append(relationShips, - relationShip(parentPackage.PackageSPDXIdentifier, spdxPackage.PackageSPDXIdentifier, RelationShipContains), + m.spdxRelationShip(spdxPackage.PackageSPDXIdentifier, file.FileSPDXIdentifier, RelationShipContains), ) - files, err := m.pkgFiles(pkg) - if err != nil { - return nil, xerrors.Errorf("package file error: %w", err) - } else if files == nil { - continue - } + } + verificationCode, err := spdxutils.GetVerificationCode(spdxFiles, "") + if err != nil { + return nil, xerrors.Errorf("package verification error: %w", err) + } + spdxPackage.FilesAnalyzed = true + spdxPackage.PackageVerificationCode = &verificationCode + } - spdxFiles = append(spdxFiles, files...) - for _, file := range files { - relationShips = append(relationShips, - relationShip(spdxPackage.PackageSPDXIdentifier, file.FileSPDXIdentifier, RelationShipContains), - ) + for id, rels := range bom.Relationships() { + for _, rel := range rels { + refA, ok := packageIDs[id] + if !ok { + continue } - - verificationCode, err := spdxutils.GetVerificationCode(files, "") - if err != nil { - return nil, xerrors.Errorf("package verification error: %w", err) + refB, ok := packageIDs[rel.Dependency] + if !ok { + continue } - - spdxPackage.FilesAnalyzed = true - spdxPackage.PackageVerificationCode = &verificationCode + relationShips = append(relationShips, m.spdxRelationShip(refA, refB, m.spdxRelationshipType(rel.Type))) } } + sortPackages(packages) + sortRelationships(relationShips) + sortFiles(files) return &spdx.Document{ SPDXVersion: spdx.Version, DataLicense: spdx.DataLicense, SPDXIdentifier: DocumentSPDXIdentifier, - DocumentName: r.ArtifactName, - DocumentNamespace: getDocumentNamespace(r, m), + DocumentName: root.Name, + DocumentNamespace: getDocumentNamespace(root), CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -189,214 +207,215 @@ func (m *Marshaler) Marshal(ctx context.Context, r types.Report) (*spdx.Document }, Created: clock.Now(ctx).UTC().Format(time.RFC3339), }, - Packages: toPackages(packages), + Packages: packages, Relationships: relationShips, - Files: spdxFiles, + Files: files, }, nil } -func toPackages(packages map[spdx.ElementID]*spdx.Package) []*spdx.Package { - ret := maps.Values(packages) - sort.Slice(ret, func(i, j int) bool { - if ret[i].PackageName != ret[j].PackageName { - return ret[i].PackageName < ret[j].PackageName - } - return ret[i].PackageSPDXIdentifier < ret[j].PackageSPDXIdentifier - }) - return ret -} - -func (m *Marshaler) resultToSpdxPackage(result types.Result, os *ftypes.OS, pkgDownloadLocation string) (spdx.Package, error) { - switch result.Class { - case types.ClassOSPkg: - osPkg, err := m.osPackage(os, pkgDownloadLocation) - if err != nil { - return spdx.Package{}, xerrors.Errorf("failed to parse operating system package: %w", err) - } - return osPkg, nil - case types.ClassLangPkg: - langPkg, err := m.langPackage(result.Target, pkgDownloadLocation, result.Type) - if err != nil { - return spdx.Package{}, xerrors.Errorf("failed to parse application package: %w", err) - } - return langPkg, nil - default: - // unsupported packages - return spdx.Package{}, nil - } -} - -func (m *Marshaler) parseFile(filePath string, d digest.Digest) (spdx.File, error) { - pkgID, err := calcPkgID(m.hasher, filePath) - if err != nil { - return spdx.File{}, xerrors.Errorf("failed to get %s package ID: %w", filePath, err) - } - file := spdx.File{ - FileSPDXIdentifier: spdx.ElementID(fmt.Sprintf("File-%s", pkgID)), - FileName: filePath, - Checksums: digestToSpdxFileChecksum(d), +func (m *Marshaler) packageDownloadLocation(root *core.Component) string { + location := noneField + // this field is used for git/mercurial/subversion/bazaar: + // https://spdx.github.io/spdx-spec/v2.2.2/package-information/#77-package-download-location-field + if root.Type == core.TypeRepository { + // Trivy currently only supports git repositories. Format examples: + // git+https://git.myproject.org/MyProject.git + // git+http://git.myproject.org/MyProject + location = fmt.Sprintf("git+%s", root.Name) } - return file, nil + return location } -func (m *Marshaler) rootPackage(r types.Report, pkgDownloadLocation string) (*spdx.Package, error) { +func (m *Marshaler) rootSPDXPackage(root *core.Component, pkgDownloadLocation string) (*spdx.Package, error) { var externalReferences []*spdx.PackageExternalReference - attributionTexts := []string{attributionText(PropertySchemaVersion, strconv.Itoa(r.SchemaVersion))} - // When the target is a container image, add PURL to the external references of the root package. - if p, err := purl.New(purl.TypeOCI, r.Metadata, ftypes.Package{}); err != nil { - return nil, xerrors.Errorf("failed to new package url for oci: %w", err) - } else if p != nil { - externalReferences = append(externalReferences, purlExternalReference(p.String())) - } - - if r.Metadata.ImageID != "" { - attributionTexts = appendAttributionText(attributionTexts, PropertyImageID, r.Metadata.ImageID) - } - if r.Metadata.Size != 0 { - attributionTexts = appendAttributionText(attributionTexts, PropertySize, strconv.FormatInt(r.Metadata.Size, 10)) + if root.PkgID.PURL != nil { + externalReferences = append(externalReferences, m.purlExternalReference(root.PkgID.PURL.String())) } - for _, d := range r.Metadata.RepoDigests { - attributionTexts = appendAttributionText(attributionTexts, PropertyRepoDigest, d) - } - for _, d := range r.Metadata.DiffIDs { - attributionTexts = appendAttributionText(attributionTexts, PropertyDiffID, d) - } - for _, t := range r.Metadata.RepoTags { - attributionTexts = appendAttributionText(attributionTexts, PropertyRepoTag, t) - } - - pkgID, err := calcPkgID(m.hasher, fmt.Sprintf("%s-%s", r.ArtifactName, r.ArtifactType)) + pkgID, err := calcPkgID(m.hasher, fmt.Sprintf("%s-%s", root.Name, root.Type)) if err != nil { return nil, xerrors.Errorf("failed to get %s package ID: %w", pkgID, err) } pkgPurpose := PackagePurposeSource - if r.ArtifactType == ftypes.ArtifactContainerImage { + if root.Type == core.TypeContainerImage { pkgPurpose = PackagePurposeContainer } return &spdx.Package{ - PackageName: r.ArtifactName, - PackageSPDXIdentifier: elementID(camelCase(string(r.ArtifactType)), pkgID), + PackageName: root.Name, + PackageSPDXIdentifier: elementID(camelCase(string(root.Type)), pkgID), PackageDownloadLocation: pkgDownloadLocation, - PackageAttributionTexts: attributionTexts, + PackageAttributionTexts: m.spdxAttributionTexts(root), PackageExternalReferences: externalReferences, PrimaryPackagePurpose: pkgPurpose, }, nil } -func (m *Marshaler) osPackage(osFound *ftypes.OS, pkgDownloadLocation string) (spdx.Package, error) { - if osFound == nil { - return spdx.Package{}, nil - } - - pkgID, err := calcPkgID(m.hasher, osFound) - if err != nil { - return spdx.Package{}, xerrors.Errorf("failed to get os metadata package ID: %w", err) +func (m *Marshaler) appendAttributionText(attributionTexts []string, key, value string) []string { + if value == "" { + return attributionTexts } - - return spdx.Package{ - PackageName: string(osFound.Family), - PackageVersion: osFound.Name, - PackageSPDXIdentifier: elementID(ElementOperatingSystem, pkgID), - PackageDownloadLocation: pkgDownloadLocation, - PrimaryPackagePurpose: PackagePurposeOS, - }, nil + return append(attributionTexts, fmt.Sprintf("%s: %s", key, value)) } -func (m *Marshaler) langPackage(target, pkgDownloadLocation string, appType ftypes.LangType) (spdx.Package, error) { - pkgID, err := calcPkgID(m.hasher, fmt.Sprintf("%s-%s", target, appType)) - if err != nil { - return spdx.Package{}, xerrors.Errorf("failed to get %s package ID: %w", target, err) +func (m *Marshaler) purlExternalReference(packageURL string) *spdx.PackageExternalReference { + return &spdx.PackageExternalReference{ + Category: CategoryPackageManager, + RefType: RefTypePurl, + Locator: packageURL, } - - return spdx.Package{ - PackageName: string(appType), - PackageSourceInfo: target, // TODO: Files seems better - PackageSPDXIdentifier: elementID(ElementApplication, pkgID), - PackageDownloadLocation: pkgDownloadLocation, - PrimaryPackagePurpose: PackagePurposeApplication, - }, nil } -func (m *Marshaler) pkgToSpdxPackage(t ftypes.TargetType, pkgDownloadLocation string, class types.ResultClass, metadata types.Metadata, pkg ftypes.Package) (spdx.Package, error) { - license := GetLicense(pkg) - - pkgID, err := calcPkgID(m.hasher, pkg) +func (m *Marshaler) spdxPackage(c *core.Component, pkgDownloadLocation string) (spdx.Package, error) { + pkgID, err := calcPkgID(m.hasher, c) if err != nil { - return spdx.Package{}, xerrors.Errorf("failed to get %s package ID: %w", pkg.Name, err) + return spdx.Package{}, xerrors.Errorf("failed to get os metadata package ID: %w", err) } - var pkgSrcInfo string - if class == types.ClassOSPkg && pkg.SrcName != "" { - pkgSrcInfo = fmt.Sprintf("%s: %s %s", SourcePackagePrefix, pkg.SrcName, utils.FormatSrcVersion(pkg)) + var elementType, purpose, license, sourceInfo string + var supplier *spdx.Supplier + switch c.Type { + case core.TypeOS: + elementType = ElementOperatingSystem + purpose = PackagePurposeOS + case core.TypeApplication: + elementType = ElementApplication + purpose = PackagePurposeApplication + case core.TypeLibrary: + elementType = ElementPackage + purpose = PackagePurposeLibrary + license = m.spdxLicense(c) + + if c.SrcName != "" { + sourceInfo = fmt.Sprintf("%s: %s %s", SourcePackagePrefix, c.SrcName, c.SrcVersion) + } else if c.SrcFile != "" { + sourceInfo = fmt.Sprintf("%s: %s", SourceFilePrefix, c.SrcFile) + } + + supplier = &spdx.Supplier{Supplier: PackageSupplierNoAssertion} + if c.Supplier != "" { + supplier = &spdx.Supplier{ + SupplierType: PackageSupplierOrganization, // Always use "Organization" at the moment as it is difficult to distinguish between "Person" or "Organization". + Supplier: c.Supplier, + } + } } var pkgExtRefs []*spdx.PackageExternalReference - if pkg.Identifier.PURL != nil { - pkgExtRefs = []*spdx.PackageExternalReference{purlExternalReference(pkg.Identifier.PURL.String())} + if c.PkgID.PURL != nil { + pkgExtRefs = []*spdx.PackageExternalReference{m.purlExternalReference(c.PkgID.PURL.String())} } - var attrTexts []string - attrTexts = appendAttributionText(attrTexts, PropertyPkgID, pkg.ID) - attrTexts = appendAttributionText(attrTexts, PropertyLayerDigest, pkg.Layer.Digest) - attrTexts = appendAttributionText(attrTexts, PropertyLayerDiffID, pkg.Layer.DiffID) - - supplier := &spdx.Supplier{Supplier: PackageSupplierNoAssertion} - if pkg.Maintainer != "" { - supplier = &spdx.Supplier{ - SupplierType: PackageSupplierOrganization, // Always use "Organization" at the moment as it is difficult to distinguish between "Person" or "Organization". - Supplier: pkg.Maintainer, + var digests []digest.Digest + for _, f := range c.Files { + // The file digests are stored separately. + if f.Path != "" { + continue } - } - - var checksum []spdx.Checksum - if pkg.Digest != "" && class == types.ClassOSPkg { - checksum = digestToSpdxFileChecksum(pkg.Digest) + digests = append(digests, f.Digests...) } return spdx.Package{ - PackageName: pkg.Name, - PackageVersion: utils.FormatVersion(pkg), - PackageSPDXIdentifier: elementID(ElementPackage, pkgID), - PackageDownloadLocation: pkgDownloadLocation, - PackageSourceInfo: pkgSrcInfo, + PackageSPDXIdentifier: elementID(elementType, pkgID), + PackageName: spdxPkgName(c), + PackageVersion: c.Version, + PrimaryPackagePurpose: purpose, + PackageDownloadLocation: pkgDownloadLocation, + PackageExternalReferences: pkgExtRefs, + PackageAttributionTexts: m.spdxAttributionTexts(c), + PackageSourceInfo: sourceInfo, + PackageSupplier: supplier, + PackageChecksums: m.spdxChecksums(digests), // The Declared License is what the authors of a project believe govern the package PackageLicenseConcluded: license, // The Concluded License field is the license the SPDX file creator believes governs the package PackageLicenseDeclared: license, - - PackageExternalReferences: pkgExtRefs, - PackageAttributionTexts: attrTexts, - PrimaryPackagePurpose: PackagePurposeLibrary, - PackageSupplier: supplier, - PackageChecksums: checksum, }, nil } -func (m *Marshaler) pkgFiles(pkg ftypes.Package) ([]*spdx.File, error) { - if pkg.FilePath == "" { - return nil, nil +func spdxPkgName(component *core.Component) string { + if p := component.PkgID.PURL; p != nil && component.Group != "" { + if p.Type == packageurl.TypeMaven || p.Type == packageurl.TypeGradle { + return component.Group + ":" + component.Name + } + return component.Group + "/" + component.Name } + return component.Name +} - file, err := m.parseFile(pkg.FilePath, pkg.Digest) - if err != nil { - return nil, xerrors.Errorf("failed to parse file: %w", err) +func (m *Marshaler) spdxAttributionTexts(c *core.Component) []string { + var texts []string + for _, p := range c.Properties { + // Add properties that are not in other fields. + if !slices.Contains(duplicateProperties, p.Name) { + texts = m.appendAttributionText(texts, p.Name, p.Value) + } } - return []*spdx.File{ - &file, - }, nil + return texts } -func elementID(elementType, pkgID string) spdx.ElementID { - return spdx.ElementID(fmt.Sprintf("%s-%s", elementType, pkgID)) +func (m *Marshaler) spdxLicense(c *core.Component) string { + if len(c.Licenses) == 0 { + return noneField + } + return NormalizeLicense(c.Licenses) +} + +func (m *Marshaler) spdxChecksums(digests []digest.Digest) []common.Checksum { + var checksums []common.Checksum + for _, d := range digests { + var alg spdx.ChecksumAlgorithm + switch d.Algorithm() { + case digest.SHA1: + alg = spdx.SHA1 + case digest.SHA256: + alg = spdx.SHA256 + case digest.MD5: + alg = spdx.MD5 + default: + return nil + } + checksums = append(checksums, spdx.Checksum{ + Algorithm: alg, + Value: d.Encoded(), + }) + } + + return checksums +} + +func (m *Marshaler) spdxFiles(c *core.Component) ([]*spdx.File, error) { + var files []*spdx.File + for _, file := range c.Files { + if file.Path == "" || len(file.Digests) == 0 { + continue + } + spdxFile, err := m.spdxFile(file.Path, file.Digests) + if err != nil { + return nil, xerrors.Errorf("failed to parse file: %w", err) + } + files = append(files, spdxFile) + } + return files, nil +} + +func (m *Marshaler) spdxFile(filePath string, digests []digest.Digest) (*spdx.File, error) { + pkgID, err := calcPkgID(m.hasher, filePath) + if err != nil { + return nil, xerrors.Errorf("failed to get %s package ID: %w", filePath, err) + } + return &spdx.File{ + FileSPDXIdentifier: spdx.ElementID(fmt.Sprintf("File-%s", pkgID)), + FileName: filePath, + Checksums: m.spdxChecksums(digests), + }, nil } -func relationShip(refA, refB spdx.ElementID, operator string) *spdx.Relationship { +func (m *Marshaler) spdxRelationShip(refA, refB spdx.ElementID, operator string) *spdx.Relationship { ref := spdx.Relationship{ RefA: common.MakeDocElementID("", string(refA)), RefB: common.MakeDocElementID("", string(refB)), @@ -405,51 +424,65 @@ func relationShip(refA, refB spdx.ElementID, operator string) *spdx.Relationship return &ref } -func appendAttributionText(attributionTexts []string, key, value string) []string { - if value == "" { - return attributionTexts +func (m *Marshaler) spdxRelationshipType(relType core.RelationshipType) string { + switch relType { + case core.RelationshipDependsOn: + return RelationShipDependsOn + case core.RelationshipContains: + return RelationShipContains + case core.RelationshipDescribes: + return RelationShipDescribe + default: + return RelationShipDependsOn } - return append(attributionTexts, attributionText(key, value)) } -func attributionText(key, value string) string { - return fmt.Sprintf("%s: %s", key, value) +func sortPackages(pkgs []*spdx.Package) { + sort.Slice(pkgs, func(i, j int) bool { + switch { + case pkgs[i].PrimaryPackagePurpose != pkgs[j].PrimaryPackagePurpose: + return pkgs[i].PrimaryPackagePurpose < pkgs[j].PrimaryPackagePurpose + case pkgs[i].PackageName != pkgs[j].PackageName: + return pkgs[i].PackageName < pkgs[j].PackageName + default: + return pkgs[i].PackageSPDXIdentifier < pkgs[j].PackageSPDXIdentifier + } + }) } -func purlExternalReference(packageURL string) *spdx.PackageExternalReference { - return &spdx.PackageExternalReference{ - Category: CategoryPackageManager, - RefType: RefTypePurl, - Locator: packageURL, - } +func sortRelationships(rels []*spdx.Relationship) { + sort.Slice(rels, func(i, j int) bool { + switch { + case rels[i].RefA.ElementRefID != rels[j].RefA.ElementRefID: + return rels[i].RefA.ElementRefID < rels[j].RefA.ElementRefID + case rels[i].RefB.ElementRefID != rels[j].RefB.ElementRefID: + return rels[i].RefB.ElementRefID < rels[j].RefB.ElementRefID + default: + return rels[i].Relationship < rels[j].Relationship + } + }) } -func GetLicense(p ftypes.Package) string { - if len(p.Licenses) == 0 { - return noneField - } - - license := strings.Join(lo.Map(p.Licenses, func(license string, index int) string { - // e.g. GPL-3.0-with-autoconf-exception - license = strings.ReplaceAll(license, "-with-", " WITH ") - license = strings.ReplaceAll(license, "-WITH-", " WITH ") +func sortFiles(files []*spdx.File) { + sort.Slice(files, func(i, j int) bool { + switch { + case files[i].FileName != files[j].FileName: + return files[i].FileName < files[j].FileName + default: + return files[i].FileSPDXIdentifier < files[j].FileSPDXIdentifier + } + }) +} - return fmt.Sprintf("(%s)", license) - }), " AND ") - s, err := expression.Normalize(license, licensing.Normalize, expression.NormalizeForSPDX) - if err != nil { - // Not fail on the invalid license - log.Logger.Warnf("Unable to marshal SPDX licenses %q", license) - return "" - } - return s +func elementID(elementType, pkgID string) spdx.ElementID { + return spdx.ElementID(fmt.Sprintf("%s-%s", elementType, pkgID)) } -func getDocumentNamespace(r types.Report, m *Marshaler) string { +func getDocumentNamespace(root *core.Component) string { return fmt.Sprintf("%s/%s/%s-%s", DocumentNamespace, - string(r.ArtifactType), - strings.ReplaceAll(strings.ReplaceAll(r.ArtifactName, "https://", ""), "http://", ""), // remove http(s):// prefix when scanning repos + string(root.Type), + strings.ReplaceAll(strings.ReplaceAll(root.Name, "https://", ""), "http://", ""), // remove http(s):// prefix when scanning repos uuid.New().String(), ) } @@ -487,40 +520,19 @@ func camelCase(inputUnderScoreStr string) (camelCase string) { return } -func getPackageDownloadLocation(t ftypes.ArtifactType, artifactName string) string { - location := noneField - // this field is used for git/mercurial/subversion/bazaar: - // https://spdx.github.io/spdx-spec/v2.2.2/package-information/#77-package-download-location-field - if t == ftypes.ArtifactRepository { - // Trivy currently only supports git repositories. Format examples: - // git+https://git.myproject.org/MyProject.git - // git+http://git.myproject.org/MyProject - location = fmt.Sprintf("git+%s", artifactName) - } - return location -} - -func digestToSpdxFileChecksum(d digest.Digest) []common.Checksum { - if d == "" { - return nil - } - - var alg spdx.ChecksumAlgorithm - switch d.Algorithm() { - case digest.SHA1: - alg = spdx.SHA1 - case digest.SHA256: - alg = spdx.SHA256 - case digest.MD5: - alg = spdx.MD5 - default: - return nil - } +func NormalizeLicense(licenses []string) string { + license := strings.Join(lo.Map(licenses, func(license string, index int) string { + // e.g. GPL-3.0-with-autoconf-exception + license = strings.ReplaceAll(license, "-with-", " WITH ") + license = strings.ReplaceAll(license, "-WITH-", " WITH ") - return []spdx.Checksum{ - { - Algorithm: alg, - Value: d.Encoded(), - }, + return fmt.Sprintf("(%s)", license) + }), " AND ") + s, err := expression.Normalize(license, licensing.Normalize, expression.NormalizeForSPDX) + if err != nil { + // Not fail on the invalid license + log.Logger.Warnf("Unable to marshal SPDX licenses %q", license) + return "" } + return s } diff --git a/pkg/sbom/spdx/marshal_test.go b/pkg/sbom/spdx/marshal_test.go index a66a1d5ee46c..c7757de8ca81 100644 --- a/pkg/sbom/spdx/marshal_test.go +++ b/pkg/sbom/spdx/marshal_test.go @@ -2,6 +2,7 @@ package spdx_test import ( "context" + "github.com/aquasecurity/trivy/pkg/sbom/core" "github.com/package-url/packageurl-go" "hash/fnv" "testing" @@ -144,7 +145,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "rails:latest", - DocumentNamespace: "http://aquasecurity.github.io/trivy/container_image/rails:latest-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/container_image/rails:latest-3ff14136-e09f-4df9-80ea-000000000009", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -160,12 +161,56 @@ func TestMarshaler_Marshal(t *testing.T) { }, Packages: []*spdx.Package{ { - PackageSPDXIdentifier: spdx.ElementID("Package-eb0263038c3b445b"), + PackageSPDXIdentifier: spdx.ElementID("Application-9f48cdd13858abaf"), + PackageDownloadLocation: "NONE", + PackageName: "app/Gemfile.lock", + PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageAttributionTexts: []string{ + "Class: lang-pkgs", + "Type: bundler", + }, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Application-692290f4b2235359"), + PackageDownloadLocation: "NONE", + PackageName: "app/subproject/Gemfile.lock", + PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageAttributionTexts: []string{ + "Class: lang-pkgs", + "Type: bundler", + }, + }, + { + PackageSPDXIdentifier: spdx.ElementID("ContainerImage-9396d894cd0cb6cb"), + PackageDownloadLocation: "NONE", + PackageName: "rails:latest", + PackageExternalReferences: []*spdx.PackageExternalReference{ + { + Category: tspdx.CategoryPackageManager, + RefType: tspdx.RefTypePurl, + Locator: "pkg:oci/rails@sha256%3Aa27fd8080b517143cbbbab9dfb7c8571c40d67d534bbdee55bd6c473f432b177?arch=arm64&repository_url=index.docker.io%2Flibrary%2Frails", + }, + }, + PackageAttributionTexts: []string{ + "DiffID: sha256:d871dadfb37b53ef1ca45be04fc527562b91989991a8f545345ae3be0b93f92a", + "ImageID: sha256:5d0da3dc976460b72c77d94c8a1ad043720b0416bfc16c52c45d4847e53fadb6", + "RepoDigest: rails@sha256:a27fd8080b517143cbbbab9dfb7c8571c40d67d534bbdee55bd6c473f432b177", + "RepoTag: rails:latest", + "SchemaVersion: 2", + "Size: 1024", + }, + PrimaryPackagePurpose: tspdx.PackagePurposeContainer, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Package-b8d4663e6d412e7"), PackageDownloadLocation: "NONE", PackageName: "actioncontroller", PackageVersion: "7.0.1", PackageLicenseConcluded: "NONE", PackageLicenseDeclared: "NONE", + PackageAttributionTexts: []string{ + "PkgType: bundler", + }, PackageExternalReferences: []*spdx.PackageExternalReference{ { Category: tspdx.CategoryPackageManager, @@ -175,14 +220,39 @@ func TestMarshaler_Marshal(t *testing.T) { }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: app/subproject/Gemfile.lock", + }, + { + PackageSPDXIdentifier: spdx.ElementID("Package-3b51e821f6796568"), + PackageDownloadLocation: "NONE", + PackageName: "actionpack", + PackageVersion: "7.0.1", + PackageLicenseConcluded: "NONE", + PackageLicenseDeclared: "NONE", + PackageAttributionTexts: []string{ + "PkgType: bundler", + }, + PackageExternalReferences: []*spdx.PackageExternalReference{ + { + Category: tspdx.CategoryPackageManager, + RefType: tspdx.RefTypePurl, + Locator: "pkg:gem/actionpack@7.0.1", + }, + }, + PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, + PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: app/subproject/Gemfile.lock", }, { - PackageSPDXIdentifier: spdx.ElementID("Package-826226d056ff30c0"), + PackageSPDXIdentifier: spdx.ElementID("Package-fb5630bc7d55a21c"), PackageDownloadLocation: "NONE", PackageName: "actionpack", PackageVersion: "7.0.1", PackageLicenseConcluded: "NONE", PackageLicenseDeclared: "NONE", + PackageAttributionTexts: []string{ + "PkgType: bundler", + }, PackageExternalReferences: []*spdx.PackageExternalReference{ { Category: tspdx.CategoryPackageManager, @@ -192,14 +262,18 @@ func TestMarshaler_Marshal(t *testing.T) { }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: app/Gemfile.lock", }, { - PackageSPDXIdentifier: spdx.ElementID("Package-fd0dc3cf913d5bc3"), + PackageSPDXIdentifier: spdx.ElementID("Package-5d43902b18ed2e2c"), PackageDownloadLocation: "NONE", PackageName: "binutils", PackageVersion: "2.30-93.el8", PackageLicenseConcluded: "GPL-3.0-or-later", PackageLicenseDeclared: "GPL-3.0-or-later", + PackageAttributionTexts: []string{ + "PkgType: centos", + }, PackageSupplier: &spdx.Supplier{ SupplierType: tspdx.PackageSupplierOrganization, Supplier: "CentOS", @@ -221,87 +295,56 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, { - PackageSPDXIdentifier: spdx.ElementID("Application-73c871d73f3c8248"), - PackageDownloadLocation: "NONE", - PackageName: "bundler", - PackageSourceInfo: "app/subproject/Gemfile.lock", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, - }, - { - PackageSPDXIdentifier: spdx.ElementID("Application-c3fac92c1ac0a9fa"), - PackageDownloadLocation: "NONE", - PackageName: "bundler", - PackageSourceInfo: "app/Gemfile.lock", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, - }, - { - PackageSPDXIdentifier: spdx.ElementID("OperatingSystem-197f9a00ebcb51f0"), + PackageSPDXIdentifier: spdx.ElementID("OperatingSystem-20f7fa3049cc748c"), PackageDownloadLocation: "NONE", PackageName: "centos", PackageVersion: "8.3.2011", PrimaryPackagePurpose: tspdx.PackagePurposeOS, - }, - { - PackageSPDXIdentifier: spdx.ElementID("ContainerImage-9396d894cd0cb6cb"), - PackageDownloadLocation: "NONE", - PackageName: "rails:latest", - PackageExternalReferences: []*spdx.PackageExternalReference{ - { - Category: tspdx.CategoryPackageManager, - RefType: tspdx.RefTypePurl, - Locator: "pkg:oci/rails@sha256%3Aa27fd8080b517143cbbbab9dfb7c8571c40d67d534bbdee55bd6c473f432b177?arch=arm64&repository_url=index.docker.io%2Flibrary%2Frails", - }, - }, PackageAttributionTexts: []string{ - "SchemaVersion: 2", - "ImageID: sha256:5d0da3dc976460b72c77d94c8a1ad043720b0416bfc16c52c45d4847e53fadb6", - "Size: 1024", - "RepoDigest: rails@sha256:a27fd8080b517143cbbbab9dfb7c8571c40d67d534bbdee55bd6c473f432b177", - "DiffID: sha256:d871dadfb37b53ef1ca45be04fc527562b91989991a8f545345ae3be0b93f92a", - "RepoTag: rails:latest", + "Class: os-pkgs", + "Type: centos", }, - PrimaryPackagePurpose: tspdx.PackagePurposeContainer, }, }, Relationships: []*spdx.Relationship{ { - RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, - Relationship: "DESCRIBES", + RefA: spdx.DocElementID{ElementRefID: "Application-692290f4b2235359"}, + RefB: spdx.DocElementID{ElementRefID: "Package-3b51e821f6796568"}, + Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, - RefB: spdx.DocElementID{ElementRefID: "OperatingSystem-197f9a00ebcb51f0"}, + RefA: spdx.DocElementID{ElementRefID: "Application-692290f4b2235359"}, + RefB: spdx.DocElementID{ElementRefID: "Package-b8d4663e6d412e7"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "OperatingSystem-197f9a00ebcb51f0"}, - RefB: spdx.DocElementID{ElementRefID: "Package-fd0dc3cf913d5bc3"}, + RefA: spdx.DocElementID{ElementRefID: "Application-9f48cdd13858abaf"}, + RefB: spdx.DocElementID{ElementRefID: "Package-fb5630bc7d55a21c"}, Relationship: "CONTAINS", }, { RefA: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, - RefB: spdx.DocElementID{ElementRefID: "Application-73c871d73f3c8248"}, + RefB: spdx.DocElementID{ElementRefID: "Application-692290f4b2235359"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-73c871d73f3c8248"}, - RefB: spdx.DocElementID{ElementRefID: "Package-826226d056ff30c0"}, + RefA: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, + RefB: spdx.DocElementID{ElementRefID: "Application-9f48cdd13858abaf"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-73c871d73f3c8248"}, - RefB: spdx.DocElementID{ElementRefID: "Package-eb0263038c3b445b"}, + RefA: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, + RefB: spdx.DocElementID{ElementRefID: "OperatingSystem-20f7fa3049cc748c"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, - RefB: spdx.DocElementID{ElementRefID: "Application-c3fac92c1ac0a9fa"}, - Relationship: "CONTAINS", + RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, + RefB: spdx.DocElementID{ElementRefID: "ContainerImage-9396d894cd0cb6cb"}, + Relationship: "DESCRIBES", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-c3fac92c1ac0a9fa"}, - RefB: spdx.DocElementID{ElementRefID: "Package-826226d056ff30c0"}, + RefA: spdx.DocElementID{ElementRefID: "OperatingSystem-20f7fa3049cc748c"}, + RefB: spdx.DocElementID{ElementRefID: "Package-5d43902b18ed2e2c"}, Relationship: "CONTAINS", }, }, @@ -420,7 +463,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "centos:latest", - DocumentNamespace: "http://aquasecurity.github.io/trivy/container_image/centos:latest-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/container_image/centos:latest-3ff14136-e09f-4df9-80ea-000000000006", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -436,12 +479,27 @@ func TestMarshaler_Marshal(t *testing.T) { }, Packages: []*spdx.Package{ { - PackageSPDXIdentifier: spdx.ElementID("Package-d8dccb186bafaf37"), + PackageName: "centos:latest", + PackageSPDXIdentifier: "ContainerImage-413bfede37ad01fc", + PackageDownloadLocation: "NONE", + PackageAttributionTexts: []string{ + "ImageID: sha256:5d0da3dc976460b72c77d94c8a1ad043720b0416bfc16c52c45d4847e53fadb6", + "RepoTag: centos:latest", + "SchemaVersion: 2", + "Size: 1024", + }, + PrimaryPackagePurpose: tspdx.PackagePurposeContainer, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Package-40c4059fe08523bf"), PackageDownloadLocation: "NONE", PackageName: "acl", PackageVersion: "1:2.2.53-1.el8", PackageLicenseConcluded: "GPL-2.0-or-later", PackageLicenseDeclared: "GPL-2.0-or-later", + PackageAttributionTexts: []string{ + "PkgType: centos", + }, PackageExternalReferences: []*spdx.PackageExternalReference{ { Category: tspdx.CategoryPackageManager, @@ -460,7 +518,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, { - PackageSPDXIdentifier: spdx.ElementID("Package-13fe667a0805e6b7"), + PackageSPDXIdentifier: spdx.ElementID("Package-69f68dd639314edd"), PackageDownloadLocation: "NONE", PackageName: "actionpack", PackageVersion: "7.0.1", @@ -475,6 +533,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, PackageAttributionTexts: []string{ "LayerDiffID: sha256:ccb64cf0b7ba2e50741d0b64cae324eb5de3b1e2f580bbf177e721b67df38488", + "PkgType: gemspec", }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, @@ -484,7 +543,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, { - PackageSPDXIdentifier: spdx.ElementID("Package-d5443dbcbba0dbd4"), + PackageSPDXIdentifier: spdx.ElementID("Package-da2cda24d2ecbfe6"), PackageDownloadLocation: "NONE", PackageName: "actionpack", PackageVersion: "7.0.1", @@ -499,6 +558,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, PackageAttributionTexts: []string{ "LayerDiffID: sha256:ccb64cf0b7ba2e50741d0b64cae324eb5de3b1e2f580bbf177e721b67df38488", + "PkgType: gemspec", }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, @@ -508,93 +568,73 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, { - PackageSPDXIdentifier: spdx.ElementID("OperatingSystem-197f9a00ebcb51f0"), + PackageSPDXIdentifier: spdx.ElementID("OperatingSystem-20f7fa3049cc748c"), PackageDownloadLocation: "NONE", PackageName: "centos", PackageVersion: "8.3.2011", PrimaryPackagePurpose: tspdx.PackagePurposeOS, - }, - { - PackageName: "centos:latest", - PackageSPDXIdentifier: "ContainerImage-413bfede37ad01fc", - PackageDownloadLocation: "NONE", PackageAttributionTexts: []string{ - "SchemaVersion: 2", - "ImageID: sha256:5d0da3dc976460b72c77d94c8a1ad043720b0416bfc16c52c45d4847e53fadb6", - "Size: 1024", - "RepoTag: centos:latest", + "Class: os-pkgs", + "Type: centos", }, - PrimaryPackagePurpose: tspdx.PackagePurposeContainer, - }, - { - PackageSPDXIdentifier: spdx.ElementID("Application-441a648f2aeeee72"), - PackageDownloadLocation: "NONE", - PackageName: "gemspec", - PackageSourceInfo: "Ruby", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, }, }, Files: []*spdx.File{ { - FileSPDXIdentifier: "File-6a540784b0dc6d55", - FileName: "tools/project-john/specifications/actionpack.gemspec", + FileSPDXIdentifier: "File-fa42187221d0d0a8", + FileName: "tools/project-doe/specifications/actionpack.gemspec", Checksums: []spdx.Checksum{ { Algorithm: spdx.SHA1, - Value: "d2f9f9aed5161f6e4116a3f9573f41cd832f137c", + Value: "413f98442c83808042b5d1d2611a346b999bdca5", }, }, }, { - FileSPDXIdentifier: "File-fa42187221d0d0a8", - FileName: "tools/project-doe/specifications/actionpack.gemspec", + FileSPDXIdentifier: "File-6a540784b0dc6d55", + FileName: "tools/project-john/specifications/actionpack.gemspec", Checksums: []spdx.Checksum{ { Algorithm: spdx.SHA1, - Value: "413f98442c83808042b5d1d2611a346b999bdca5", + Value: "d2f9f9aed5161f6e4116a3f9573f41cd832f137c", }, }, }, }, Relationships: []*spdx.Relationship{ - { - RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{ElementRefID: "ContainerImage-413bfede37ad01fc"}, - Relationship: "DESCRIBES", - }, { RefA: spdx.DocElementID{ElementRefID: "ContainerImage-413bfede37ad01fc"}, - RefB: spdx.DocElementID{ElementRefID: "OperatingSystem-197f9a00ebcb51f0"}, + RefB: spdx.DocElementID{ElementRefID: "OperatingSystem-20f7fa3049cc748c"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "OperatingSystem-197f9a00ebcb51f0"}, - RefB: spdx.DocElementID{ElementRefID: "Package-d8dccb186bafaf37"}, + RefA: spdx.DocElementID{ElementRefID: "ContainerImage-413bfede37ad01fc"}, + RefB: spdx.DocElementID{ElementRefID: "Package-69f68dd639314edd"}, Relationship: "CONTAINS", }, { RefA: spdx.DocElementID{ElementRefID: "ContainerImage-413bfede37ad01fc"}, - RefB: spdx.DocElementID{ElementRefID: "Application-441a648f2aeeee72"}, + RefB: spdx.DocElementID{ElementRefID: "Package-da2cda24d2ecbfe6"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-441a648f2aeeee72"}, - RefB: spdx.DocElementID{ElementRefID: "Package-d5443dbcbba0dbd4"}, - Relationship: "CONTAINS", + RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, + RefB: spdx.DocElementID{ElementRefID: "ContainerImage-413bfede37ad01fc"}, + Relationship: "DESCRIBES", }, { - RefA: spdx.DocElementID{ElementRefID: "Package-d5443dbcbba0dbd4"}, - RefB: spdx.DocElementID{ElementRefID: "File-6a540784b0dc6d55"}, + RefA: spdx.DocElementID{ElementRefID: "OperatingSystem-20f7fa3049cc748c"}, + RefB: spdx.DocElementID{ElementRefID: "Package-40c4059fe08523bf"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-441a648f2aeeee72"}, - RefB: spdx.DocElementID{ElementRefID: "Package-13fe667a0805e6b7"}, + RefA: spdx.DocElementID{ElementRefID: "Package-69f68dd639314edd"}, + RefB: spdx.DocElementID{ElementRefID: "File-fa42187221d0d0a8"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Package-13fe667a0805e6b7"}, - RefB: spdx.DocElementID{ElementRefID: "File-fa42187221d0d0a8"}, + RefA: spdx.DocElementID{ElementRefID: "Package-da2cda24d2ecbfe6"}, + RefB: spdx.DocElementID{ElementRefID: "File-6a540784b0dc6d55"}, Relationship: "CONTAINS", }, }, @@ -629,6 +669,26 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, }, + { + Target: "pom.xml", + Class: types.ClassLangPkg, + Type: ftypes.Pom, + Packages: []ftypes.Package{ + { + ID: "com.example:example:1.0.0", + Name: "com.example:example", + Version: "1.0.0", + Identifier: ftypes.PkgIdentifier{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeMaven, + Namespace: "com.example", + Name: "example", + Version: "1.0.0", + }, + }, + }, + }, + }, }, }, wantSBOM: &spdx.Document{ @@ -636,7 +696,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "masahiro331/CVE-2021-41098", - DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/masahiro331/CVE-2021-41098-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/masahiro331/CVE-2021-41098-3ff14136-e09f-4df9-80ea-000000000006", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -652,7 +712,27 @@ func TestMarshaler_Marshal(t *testing.T) { }, Packages: []*spdx.Package{ { - PackageSPDXIdentifier: spdx.ElementID("Package-3da61e86d0530402"), + PackageSPDXIdentifier: spdx.ElementID("Application-ed046c4a6b4da30f"), + PackageDownloadLocation: "NONE", + PackageName: "Gemfile.lock", + PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageAttributionTexts: []string{ + "Class: lang-pkgs", + "Type: bundler", + }, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Application-800d9e6e0f88ab3a"), + PackageDownloadLocation: "NONE", + PackageName: "pom.xml", + PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageAttributionTexts: []string{ + "Class: lang-pkgs", + "Type: pom", + }, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Package-e78eaf94802a53dc"), PackageDownloadLocation: "NONE", PackageName: "actioncable", PackageVersion: "6.1.4.1", @@ -667,13 +747,32 @@ func TestMarshaler_Marshal(t *testing.T) { }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: Gemfile.lock", + PackageAttributionTexts: []string{ + "PkgType: bundler", + }, }, { - PackageSPDXIdentifier: spdx.ElementID("Application-9dd4a4ba7077cc5a"), + PackageSPDXIdentifier: spdx.ElementID("Package-69cd7625c68537c7"), PackageDownloadLocation: "NONE", - PackageName: "bundler", - PackageSourceInfo: "Gemfile.lock", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageName: "com.example:example", + PackageVersion: "1.0.0", + PackageLicenseConcluded: "NONE", + PackageLicenseDeclared: "NONE", + PackageExternalReferences: []*spdx.PackageExternalReference{ + { + Category: tspdx.CategoryPackageManager, + RefType: tspdx.RefTypePurl, + Locator: "pkg:maven/com.example/example@1.0.0", + }, + }, + PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, + PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: pom.xml", + PackageAttributionTexts: []string{ + "PkgID: com.example:example:1.0.0", + "PkgType: pom", + }, }, { PackageSPDXIdentifier: spdx.ElementID("Filesystem-5af0f1f08c20909a"), @@ -686,6 +785,16 @@ func TestMarshaler_Marshal(t *testing.T) { }, }, Relationships: []*spdx.Relationship{ + { + RefA: spdx.DocElementID{ElementRefID: "Application-800d9e6e0f88ab3a"}, + RefB: spdx.DocElementID{ElementRefID: "Package-69cd7625c68537c7"}, + Relationship: "CONTAINS", + }, + { + RefA: spdx.DocElementID{ElementRefID: "Application-ed046c4a6b4da30f"}, + RefB: spdx.DocElementID{ElementRefID: "Package-e78eaf94802a53dc"}, + Relationship: "CONTAINS", + }, { RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, RefB: spdx.DocElementID{ElementRefID: "Filesystem-5af0f1f08c20909a"}, @@ -693,12 +802,12 @@ func TestMarshaler_Marshal(t *testing.T) { }, { RefA: spdx.DocElementID{ElementRefID: "Filesystem-5af0f1f08c20909a"}, - RefB: spdx.DocElementID{ElementRefID: "Application-9dd4a4ba7077cc5a"}, + RefB: spdx.DocElementID{ElementRefID: "Application-800d9e6e0f88ab3a"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-9dd4a4ba7077cc5a"}, - RefB: spdx.DocElementID{ElementRefID: "Package-3da61e86d0530402"}, + RefA: spdx.DocElementID{ElementRefID: "Filesystem-5af0f1f08c20909a"}, + RefB: spdx.DocElementID{ElementRefID: "Application-ed046c4a6b4da30f"}, Relationship: "CONTAINS", }, }, @@ -730,6 +839,7 @@ func TestMarshaler_Marshal(t *testing.T) { Layer: ftypes.Layer{ DiffID: "sha256:661c3fd3cc16b34c070f3620ca6b03b6adac150f9a7e5d0e3c707a159990f88e", }, + Digest: "sha256:a5efa82f08774597165e8c1a102d45d0406913b74c184883ac91f409ae26009d", FilePath: "usr/local/lib/ruby/gems/3.1.0/gems/typeprof-0.21.1/vscode/package.json", }, }, @@ -741,7 +851,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "http://test-aggregate", - DocumentNamespace: "http://aquasecurity.github.io/trivy/repository/test-aggregate-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/repository/test-aggregate-3ff14136-e09f-4df9-80ea-000000000003", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -757,23 +867,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, Packages: []*spdx.Package{ { - PackageName: "http://test-aggregate", - PackageSPDXIdentifier: "Repository-1a78857c1a6a759e", - PackageDownloadLocation: "git+http://test-aggregate", - PackageAttributionTexts: []string{ - "SchemaVersion: 2", - }, - PrimaryPackagePurpose: tspdx.PackagePurposeSource, - }, - { - PackageSPDXIdentifier: "Application-24f8a80152e2c0fc", - PackageDownloadLocation: "git+http://test-aggregate", - PackageName: "node-pkg", - PackageSourceInfo: "Node.js", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, - }, - { - PackageSPDXIdentifier: spdx.ElementID("Package-daedb173cfd43058"), + PackageSPDXIdentifier: spdx.ElementID("Package-52b8e939bac2d133"), PackageDownloadLocation: "git+http://test-aggregate", PackageName: "ruby-typeprof", PackageVersion: "0.20.1", @@ -788,6 +882,7 @@ func TestMarshaler_Marshal(t *testing.T) { }, PackageAttributionTexts: []string{ "LayerDiffID: sha256:661c3fd3cc16b34c070f3620ca6b03b6adac150f9a7e5d0e3c707a159990f88e", + "PkgType: node-pkg", }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, @@ -796,11 +891,26 @@ func TestMarshaler_Marshal(t *testing.T) { Value: "da39a3ee5e6b4b0d3255bfef95601890afd80709", }, }, + { + PackageSPDXIdentifier: "Repository-1a78857c1a6a759e", + PackageName: "http://test-aggregate", + PackageDownloadLocation: "git+http://test-aggregate", + PackageAttributionTexts: []string{ + "SchemaVersion: 2", + }, + PrimaryPackagePurpose: tspdx.PackagePurposeSource, + }, }, Files: []*spdx.File{ { FileName: "usr/local/lib/ruby/gems/3.1.0/gems/typeprof-0.21.1/vscode/package.json", FileSPDXIdentifier: "File-a52825a3e5bc6dfe", + Checksums: []common.Checksum{ + { + Algorithm: common.SHA256, + Value: "a5efa82f08774597165e8c1a102d45d0406913b74c184883ac91f409ae26009d", + }, + }, }, }, Relationships: []*spdx.Relationship{ @@ -810,18 +920,13 @@ func TestMarshaler_Marshal(t *testing.T) { Relationship: "DESCRIBES", }, { - RefA: spdx.DocElementID{ElementRefID: "Repository-1a78857c1a6a759e"}, - RefB: spdx.DocElementID{ElementRefID: "Application-24f8a80152e2c0fc"}, - Relationship: "CONTAINS", - }, - { - RefA: spdx.DocElementID{ElementRefID: "Application-24f8a80152e2c0fc"}, - RefB: spdx.DocElementID{ElementRefID: "Package-daedb173cfd43058"}, + RefA: spdx.DocElementID{ElementRefID: "Package-52b8e939bac2d133"}, + RefB: spdx.DocElementID{ElementRefID: "File-a52825a3e5bc6dfe"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Package-daedb173cfd43058"}, - RefB: spdx.DocElementID{ElementRefID: "File-a52825a3e5bc6dfe"}, + RefA: spdx.DocElementID{ElementRefID: "Repository-1a78857c1a6a759e"}, + RefB: spdx.DocElementID{ElementRefID: "Package-52b8e939bac2d133"}, Relationship: "CONTAINS", }, }, @@ -840,7 +945,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "empty/path", - DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/empty/path-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/empty/path-3ff14136-e09f-4df9-80ea-000000000002", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ @@ -903,8 +1008,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "secret", - DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/secret-3ff14136-e09f-4df9-80ea-000000000001", - + DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/secret-3ff14136-e09f-4df9-80ea-000000000002", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -946,7 +1050,7 @@ func TestMarshaler_Marshal(t *testing.T) { ArtifactType: ftypes.ArtifactFilesystem, Results: types.Results{ { - Target: "artifact", + Target: "/usr/local/bin/test", Class: types.ClassLangPkg, Type: ftypes.GoBinary, Packages: []ftypes.Package{ @@ -975,7 +1079,7 @@ func TestMarshaler_Marshal(t *testing.T) { DataLicense: spdx.DataLicense, SPDXIdentifier: "DOCUMENT", DocumentName: "go-artifact", - DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/go-artifact-3ff14136-e09f-4df9-80ea-000000000001", + DocumentNamespace: "http://aquasecurity.github.io/trivy/filesystem/go-artifact-3ff14136-e09f-4df9-80ea-000000000005", CreationInfo: &spdx.CreationInfo{ Creators: []common.Creator{ { @@ -991,7 +1095,17 @@ func TestMarshaler_Marshal(t *testing.T) { }, Packages: []*spdx.Package{ { - PackageSPDXIdentifier: spdx.ElementID("Package-9164ae38c5cdf815"), + PackageSPDXIdentifier: spdx.ElementID("Application-aab0f4e8cf174c67"), + PackageDownloadLocation: "NONE", + PackageName: "/usr/local/bin/test", + PrimaryPackagePurpose: tspdx.PackagePurposeApplication, + PackageAttributionTexts: []string{ + "Class: lang-pkgs", + "Type: gobinary", + }, + }, + { + PackageSPDXIdentifier: spdx.ElementID("Package-9a16e221e11f8a90"), PackageDownloadLocation: "NONE", PackageName: "./private_repos/cnrm.googlesource.com/cnrm/", PackageVersion: "(devel)", @@ -999,25 +1113,13 @@ func TestMarshaler_Marshal(t *testing.T) { PackageLicenseDeclared: "NONE", PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, - }, - { - PackageName: "go-artifact", - PackageSPDXIdentifier: "Filesystem-e340f27468b382be", - PackageDownloadLocation: "NONE", + PackageSourceInfo: "package found in: /usr/local/bin/test", PackageAttributionTexts: []string{ - "SchemaVersion: 2", + "PkgType: gobinary", }, - PrimaryPackagePurpose: tspdx.PackagePurposeSource, }, { - PackageSPDXIdentifier: spdx.ElementID("Application-6666b83a5d554671"), - PackageDownloadLocation: "NONE", - PackageName: "gobinary", - PackageSourceInfo: "artifact", - PrimaryPackagePurpose: tspdx.PackagePurposeApplication, - }, - { - PackageSPDXIdentifier: spdx.ElementID("Package-8451f2bc8e1f45aa"), + PackageSPDXIdentifier: spdx.ElementID("Package-b9b7ae633941e083"), PackageDownloadLocation: "NONE", PackageName: "golang.org/x/crypto", PackageVersion: "v0.0.1", @@ -1032,27 +1134,40 @@ func TestMarshaler_Marshal(t *testing.T) { }, PrimaryPackagePurpose: tspdx.PackagePurposeLibrary, PackageSupplier: &spdx.Supplier{Supplier: tspdx.PackageSupplierNoAssertion}, + PackageSourceInfo: "package found in: /usr/local/bin/test", + PackageAttributionTexts: []string{ + "PkgType: gobinary", + }, + }, + { + PackageName: "go-artifact", + PackageSPDXIdentifier: "Filesystem-e340f27468b382be", + PackageDownloadLocation: "NONE", + PackageAttributionTexts: []string{ + "SchemaVersion: 2", + }, + PrimaryPackagePurpose: tspdx.PackagePurposeSource, }, }, Relationships: []*spdx.Relationship{ { - RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{ElementRefID: "Filesystem-e340f27468b382be"}, - Relationship: "DESCRIBES", + RefA: spdx.DocElementID{ElementRefID: "Application-aab0f4e8cf174c67"}, + RefB: spdx.DocElementID{ElementRefID: "Package-9a16e221e11f8a90"}, + Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Filesystem-e340f27468b382be"}, - RefB: spdx.DocElementID{ElementRefID: "Application-6666b83a5d554671"}, + RefA: spdx.DocElementID{ElementRefID: "Application-aab0f4e8cf174c67"}, + RefB: spdx.DocElementID{ElementRefID: "Package-b9b7ae633941e083"}, Relationship: "CONTAINS", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-6666b83a5d554671"}, - RefB: spdx.DocElementID{ElementRefID: "Package-9164ae38c5cdf815"}, - Relationship: "CONTAINS", + RefA: spdx.DocElementID{ElementRefID: "DOCUMENT"}, + RefB: spdx.DocElementID{ElementRefID: "Filesystem-e340f27468b382be"}, + Relationship: "DESCRIBES", }, { - RefA: spdx.DocElementID{ElementRefID: "Application-6666b83a5d554671"}, - RefB: spdx.DocElementID{ElementRefID: "Package-8451f2bc8e1f45aa"}, + RefA: spdx.DocElementID{ElementRefID: "Filesystem-e340f27468b382be"}, + RefB: spdx.DocElementID{ElementRefID: "Application-aab0f4e8cf174c67"}, Relationship: "CONTAINS", }, }, @@ -1064,17 +1179,18 @@ func TestMarshaler_Marshal(t *testing.T) { t.Run(tc.name, func(t *testing.T) { // Fake function calculating the hash value h := fnv.New64() - hasher := func(v interface{}, format hashstructure.Format, opts *hashstructure.HashOptions) (uint64, error) { + hasher := func(v any, format hashstructure.Format, opts *hashstructure.HashOptions) (uint64, error) { h.Reset() var str string - switch v.(type) { - case ftypes.Package: - str = v.(ftypes.Package).Name + v.(ftypes.Package).FilePath + switch vv := v.(type) { + case *core.Component: + str = vv.Name + vv.Version + vv.SrcFile + for _, f := range vv.Files { + str += f.Path + } case string: - str = v.(string) - case *ftypes.OS: - str = v.(*ftypes.OS).Name + str = vv default: require.Failf(t, "unknown type", "%T", v) } @@ -1090,7 +1206,7 @@ func TestMarshaler_Marshal(t *testing.T) { uuid.SetFakeUUID(t, "3ff14136-e09f-4df9-80ea-%012d") marshaler := tspdx.NewMarshaler("0.38.1", tspdx.WithHasher(hasher)) - spdxDoc, err := marshaler.Marshal(ctx, tc.inputReport) + spdxDoc, err := marshaler.MarshalReport(ctx, tc.inputReport) require.NoError(t, err) assert.Equal(t, tc.wantSBOM, spdxDoc) @@ -1101,62 +1217,52 @@ func TestMarshaler_Marshal(t *testing.T) { func Test_GetLicense(t *testing.T) { tests := []struct { name string - input ftypes.Package + input []string want string }{ { name: "happy path", - input: ftypes.Package{ - Licenses: []string{ - "GPLv2+", - }, + input: []string{ + "GPLv2+", }, want: "GPL-2.0-or-later", }, { name: "happy path with multi license", - input: ftypes.Package{ - Licenses: []string{ - "GPLv2+", - "GPLv3+", - }, + input: []string{ + "GPLv2+", + "GPLv3+", }, want: "GPL-2.0-or-later AND GPL-3.0-or-later", }, { name: "happy path with OR operator", - input: ftypes.Package{ - Licenses: []string{ - "GPLv2+", - "LGPL 2.0 or GNU LESSER", - }, + input: []string{ + "GPLv2+", + "LGPL 2.0 or GNU LESSER", }, want: "GPL-2.0-or-later AND (LGPL-2.0-only OR LGPL-3.0-only)", }, { name: "happy path with AND operator", - input: ftypes.Package{ - Licenses: []string{ - "GPLv2+", - "LGPL 2.0 and GNU LESSER", - }, + input: []string{ + "GPLv2+", + "LGPL 2.0 and GNU LESSER", }, want: "GPL-2.0-or-later AND LGPL-2.0-only AND LGPL-3.0-only", }, { name: "happy path with WITH operator", - input: ftypes.Package{ - Licenses: []string{ - "AFL 2.0", - "AFL 3.0 with distribution exception", - }, + input: []string{ + "AFL 2.0", + "AFL 3.0 with distribution exception", }, want: "AFL-2.0 AND AFL-3.0 WITH distribution-exception", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - assert.Equalf(t, tt.want, tspdx.GetLicense(tt.input), "getLicense(%v)", tt.input) + assert.Equal(t, tt.want, tspdx.NormalizeLicense(tt.input)) }) } } diff --git a/pkg/sbom/spdx/testdata/sad/invalid-source-info.json b/pkg/sbom/spdx/testdata/sad/invalid-purl.json similarity index 92% rename from pkg/sbom/spdx/testdata/sad/invalid-source-info.json rename to pkg/sbom/spdx/testdata/sad/invalid-purl.json index 1c761c1f53fa..da87237d54b7 100644 --- a/pkg/sbom/spdx/testdata/sad/invalid-source-info.json +++ b/pkg/sbom/spdx/testdata/sad/invalid-purl.json @@ -27,13 +27,13 @@ "externalRefs": [ { "referenceCategory": "PACKAGE-MANAGER", - "referenceLocator": "pkg:apk/alpine/musl@1.2.3-r0?distro=3.16.0", + "referenceLocator": "pkg:invalid", "referenceType": "purl" } ], "filesAnalyzed": false, "name": "musl", - "sourceInfo": "built package from: invalid", + "sourceInfo": "built package from: musl", "versionInfo": "1.2.3-r0" } ], diff --git a/pkg/sbom/spdx/unmarshal.go b/pkg/sbom/spdx/unmarshal.go index 718bdd608886..5b1d4138e7cb 100644 --- a/pkg/sbom/spdx/unmarshal.go +++ b/pkg/sbom/spdx/unmarshal.go @@ -2,13 +2,10 @@ package spdx import ( "bytes" - "errors" "fmt" "io" - "sort" "strings" - version "github.com/knqyf263/go-rpm-version" "github.com/package-url/packageurl-go" "github.com/samber/lo" "github.com/spdx/tools-golang/json" @@ -17,17 +14,14 @@ import ( "github.com/spdx/tools-golang/tagvalue" "golang.org/x/xerrors" - ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" - "github.com/aquasecurity/trivy/pkg/purl" - "github.com/aquasecurity/trivy/pkg/types" -) - -var ( - errUnknownPackageFormat = xerrors.New("unknown package format") + "github.com/aquasecurity/trivy/pkg/sbom/core" ) type SPDX struct { - *types.SBOM + *core.BOM + + trivySBOM bool + pkgFilePaths map[common.ElementID]string } func NewTVDecoder(r io.Reader) *TVDecoder { @@ -48,8 +42,7 @@ func (tv *TVDecoder) Decode(v interface{}) error { if !ok { return xerrors.Errorf("invalid struct type tag-value decoder needed SPDX struct") } - err = a.unmarshal(spdxDocument) - if err != nil { + if err = a.unmarshal(spdxDocument); err != nil { return xerrors.Errorf("failed to unmarshal spdx: %w", err) } @@ -57,292 +50,219 @@ func (tv *TVDecoder) Decode(v interface{}) error { } func (s *SPDX) UnmarshalJSON(b []byte) error { + if s.BOM == nil { + s.BOM = core.NewBOM(core.Options{}) + } + if s.pkgFilePaths == nil { + s.pkgFilePaths = make(map[common.ElementID]string) + } + spdxDocument, err := json.Read(bytes.NewReader(b)) if err != nil { return xerrors.Errorf("failed to load spdx json: %w", err) } - err = s.unmarshal(spdxDocument) - if err != nil { + + if err = s.unmarshal(spdxDocument); err != nil { return xerrors.Errorf("failed to unmarshal spdx: %w", err) } return nil } func (s *SPDX) unmarshal(spdxDocument *spdx.Document) error { - var osPkgs []ftypes.Package - apps := make(map[common.ElementID]*ftypes.Application) - packageSPDXIdentifierMap := createPackageSPDXIdentifierMap(spdxDocument.Packages) - packageFilePaths := getPackageFilePaths(spdxDocument) + s.trivySBOM = s.isTrivySBOM(spdxDocument) - // Hold packages that are not processed by relationships - orphanPkgs := createPackageSPDXIdentifierMap(spdxDocument.Packages) + // Parse files and find file paths for packages + s.parseFiles(spdxDocument) - relationships := lo.Filter(spdxDocument.Relationships, func(rel *spdx.Relationship, _ int) bool { - // Skip the DESCRIBES relationship. - return rel.Relationship != common.TypeRelationshipDescribe && rel.Relationship != "DESCRIBE" - }) + // Convert all SPDX packages into Trivy components + components, err := s.parsePackages(spdxDocument) + if err != nil { + return xerrors.Errorf("package parse error: %w", err) + } - // Package relationships would be as belows: - // - Root (container image, filesystem, etc.) - // - Operating System (debian 10) - // - OS package A - // - OS package B - // - Application 1 (package-lock.json) - // - Node.js package A - // - Node.js package B - // - Application 2 (Pipfile.lock) - // - Python package A - // - Python package B - for _, rel := range relationships { - pkgA := packageSPDXIdentifierMap[rel.RefA.ElementRefID] - pkgB := packageSPDXIdentifierMap[rel.RefB.ElementRefID] - - if pkgA == nil || pkgB == nil { - // Skip the missing pkg relationship. + // Parse relationships and build the dependency graph + for _, rel := range spdxDocument.Relationships { + // Skip the DESCRIBES relationship. + if rel.Relationship == common.TypeRelationshipDescribe || rel.Relationship == "DESCRIBE" { continue } - switch { - // Relationship: root package => OS - case isOperatingSystem(pkgB.PackageSPDXIdentifier): - s.SBOM.Metadata.OS = parseOS(*pkgB) - delete(orphanPkgs, pkgB.PackageSPDXIdentifier) - // Relationship: OS => OS package - case isOperatingSystem(pkgA.PackageSPDXIdentifier): - pkg, _, err := parsePkg(*pkgB, packageFilePaths) - if errors.Is(err, errUnknownPackageFormat) { - continue - } else if err != nil { - return xerrors.Errorf("failed to parse os package: %w", err) - } - osPkgs = append(osPkgs, *pkg) - delete(orphanPkgs, pkgB.PackageSPDXIdentifier) - // Relationship: root package => application - case isApplication(pkgB.PackageSPDXIdentifier): - // pass - // Relationship: application => language-specific package - case isApplication(pkgA.PackageSPDXIdentifier): - app, ok := apps[pkgA.PackageSPDXIdentifier] - if !ok { - app = initApplication(*pkgA) - apps[pkgA.PackageSPDXIdentifier] = app - } - - lib, _, err := parsePkg(*pkgB, packageFilePaths) - if errors.Is(err, errUnknownPackageFormat) { - continue - } else if err != nil { - return xerrors.Errorf("failed to parse language-specific package: %w", err) - } - app.Libraries = append(app.Libraries, *lib) - - // They are no longer orphan packages - delete(orphanPkgs, pkgA.PackageSPDXIdentifier) - delete(orphanPkgs, pkgB.PackageSPDXIdentifier) - } - } - - // Fill OS packages - if len(osPkgs) > 0 { - s.Packages = []ftypes.PackageInfo{{Packages: osPkgs}} - } - - // Fill applications - for _, app := range apps { - s.SBOM.Applications = append(s.SBOM.Applications, *app) - } - - // Fallback for when there are no effective relationships. - if err := s.parsePackages(orphanPkgs); err != nil { - return err + compA := components[rel.RefA.ElementRefID] + compB := components[rel.RefB.ElementRefID] + s.BOM.AddRelationship(compA, compB, s.parseRelationshipType(rel.Relationship)) } return nil } -// parsePackages processes the packages and categorizes them into OS packages and application packages. -// Note that all language-specific packages are treated as a single application. -func (s *SPDX) parsePackages(pkgs map[common.ElementID]*spdx.Package) error { - var ( - osPkgs []ftypes.Package - apps = make(map[ftypes.LangType]ftypes.Application) - ) - - for _, p := range pkgs { - pkg, pkgURL, err := parsePkg(*p, nil) - if errors.Is(err, errUnknownPackageFormat) { +// parseFiles parses Relationships and finds filepaths for packages +func (s *SPDX) parseFiles(spdxDocument *spdx.Document) { + fileSPDXIdentifierMap := lo.SliceToMap(spdxDocument.Files, func(file *spdx.File) (common.ElementID, *spdx.File) { + return file.FileSPDXIdentifier, file + }) + + for _, rel := range spdxDocument.Relationships { + if rel.Relationship != common.TypeRelationshipContains && rel.Relationship != "CONTAIN" { + // Skip the DESCRIBES relationship. continue - } else if err != nil { - return xerrors.Errorf("failed to parse package: %w", err) } - switch pkgURL.Class() { - case types.ClassOSPkg: - osPkgs = append(osPkgs, *pkg) - case types.ClassLangPkg: - // Language-specific packages - pkgType := pkgURL.LangType() - app, ok := apps[pkgType] - if !ok { - app.Type = pkgType + + // hasFiles field is deprecated + // https://github.com/spdx/tools-golang/issues/171 + // hasFiles values converted in Relationships + // https://github.com/spdx/tools-golang/pull/201 + if isFile(rel.RefB.ElementRefID) { + file, ok := fileSPDXIdentifierMap[rel.RefB.ElementRefID] + if ok { + // Save filePaths for packages + // Insert filepath will be later + s.pkgFilePaths[rel.RefA.ElementRefID] = file.FileName } - app.Libraries = append(app.Libraries, *pkg) - apps[pkgType] = app + continue } } - if len(osPkgs) > 0 { - s.Packages = []ftypes.PackageInfo{{Packages: osPkgs}} - } - for _, app := range apps { - sort.Sort(app.Libraries) - s.SBOM.Applications = append(s.SBOM.Applications, app) - } - return nil } -func createPackageSPDXIdentifierMap(packages []*spdx.Package) map[common.ElementID]*spdx.Package { - return lo.SliceToMap(packages, func(pkg *spdx.Package) (common.ElementID, *spdx.Package) { - return pkg.PackageSPDXIdentifier, pkg - }) -} - -func createFileSPDXIdentifierMap(files []*spdx.File) map[string]*spdx.File { - ret := make(map[string]*spdx.File) - for _, file := range files { - ret[string(file.FileSPDXIdentifier)] = file +func (s *SPDX) parsePackages(spdxDocument *spdx.Document) (map[common.ElementID]*core.Component, error) { + // Find a root package + var rootID common.ElementID + for _, rel := range spdxDocument.Relationships { + if rel.RefA.ElementRefID == DocumentSPDXIdentifier && rel.Relationship == RelationShipDescribe { + rootID = rel.RefB.ElementRefID + break + } } - return ret -} - -func isOperatingSystem(elementID spdx.ElementID) bool { - return strings.HasPrefix(string(elementID), ElementOperatingSystem) -} -func isApplication(elementID spdx.ElementID) bool { - return strings.HasPrefix(string(elementID), ElementApplication) -} - -func isFile(elementID spdx.ElementID) bool { - return strings.HasPrefix(string(elementID), ElementFile) -} + // Convert packages into components + components := make(map[common.ElementID]*core.Component) + for _, pkg := range spdxDocument.Packages { + component, err := s.parsePackage(*pkg) + if err != nil { + return nil, xerrors.Errorf("failed to parse package: %w", err) + } + components[pkg.PackageSPDXIdentifier] = component -func initApplication(pkg spdx.Package) *ftypes.Application { - app := &ftypes.Application{Type: ftypes.LangType(pkg.PackageName)} - switch app.Type { - case ftypes.NodePkg, ftypes.PythonPkg, ftypes.GemSpec, ftypes.Jar, ftypes.CondaPkg: - app.FilePath = "" - default: - app.FilePath = pkg.PackageSourceInfo + if pkg.PackageSPDXIdentifier == rootID { + component.Root = true + } + s.BOM.AddComponent(component) } - - return app + return components, nil } -func parseOS(pkg spdx.Package) *ftypes.OS { - return &ftypes.OS{ - Family: ftypes.OSType(pkg.PackageName), - Name: pkg.PackageVersion, +func (s *SPDX) parsePackage(spdxPkg spdx.Package) (*core.Component, error) { + var err error + component := &core.Component{ + Type: s.parseType(spdxPkg), + Name: spdxPkg.PackageName, + Version: spdxPkg.PackageVersion, } -} -func parsePkg(spdxPkg spdx.Package, packageFilePaths map[string]string) (*ftypes.Package, *purl.PackageURL, error) { - pkgURL, err := parseExternalReferences(spdxPkg.PackageExternalReferences) - if err != nil { - return nil, nil, xerrors.Errorf("external references error: %w", err) + // PURL + if component.PkgID.PURL, err = s.parseExternalReferences(spdxPkg.PackageExternalReferences); err != nil { + return nil, xerrors.Errorf("external references error: %w", err) } - pkg := pkgURL.Package() + // License if spdxPkg.PackageLicenseDeclared != "NONE" { - pkg.Licenses = strings.Split(spdxPkg.PackageLicenseDeclared, ",") + component.Licenses = strings.Split(spdxPkg.PackageLicenseDeclared, ",") } + // Source package if strings.HasPrefix(spdxPkg.PackageSourceInfo, SourcePackagePrefix) { srcPkgName := strings.TrimPrefix(spdxPkg.PackageSourceInfo, fmt.Sprintf("%s: ", SourcePackagePrefix)) - pkg.SrcEpoch, pkg.SrcName, pkg.SrcVersion, pkg.SrcRelease, err = parseSourceInfo(pkgURL.Type, srcPkgName) - if err != nil { - return nil, nil, xerrors.Errorf("failed to parse source info: %w", err) - } + component.SrcName, component.SrcVersion, _ = strings.Cut(srcPkgName, " ") } - if path, ok := packageFilePaths[string(spdxPkg.PackageSPDXIdentifier)]; ok { - pkg.FilePath = path + // Files + // TODO: handle checksums as well + if path, ok := s.pkgFilePaths[spdxPkg.PackageSPDXIdentifier]; ok { + component.Files = []core.File{ + {Path: path}, + } } else if len(spdxPkg.Files) > 0 { - // Take the first file name - pkg.FilePath = spdxPkg.Files[0].FileName + component.Files = []core.File{ + {Path: spdxPkg.Files[0].FileName}, // Take the first file name + } } - pkg.ID = lookupAttributionTexts(spdxPkg.PackageAttributionTexts, PropertyPkgID) - pkg.Layer.Digest = lookupAttributionTexts(spdxPkg.PackageAttributionTexts, PropertyLayerDigest) - pkg.Layer.DiffID = lookupAttributionTexts(spdxPkg.PackageAttributionTexts, PropertyLayerDiffID) + // Attributions + for _, attr := range spdxPkg.PackageAttributionTexts { + k, v, ok := strings.Cut(attr, ": ") + if !ok { + continue + } + component.Properties = append(component.Properties, core.Property{ + Name: k, + Value: v, + }) + } + + // For backward-compatibility + // Older Trivy versions put the file path in "sourceInfo" and the package type in "name". + if s.trivySBOM && component.Type == core.TypeApplication && spdxPkg.PackageSourceInfo != "" { + component.Name = spdxPkg.PackageSourceInfo + component.Properties = append(component.Properties, core.Property{ + Name: core.PropertyType, + Value: spdxPkg.PackageName, + }) + } - return pkg, pkgURL, nil + return component, nil } -func parseExternalReferences(refs []*spdx.PackageExternalReference) (*purl.PackageURL, error) { +func (s *SPDX) parseType(pkg spdx.Package) core.ComponentType { + id := string(pkg.PackageSPDXIdentifier) + switch { + case strings.HasPrefix(id, ElementOperatingSystem): + return core.TypeOS + case strings.HasPrefix(id, ElementApplication): + return core.TypeApplication + case strings.HasPrefix(id, ElementPackage): + return core.TypeLibrary + default: + return core.TypeLibrary // unknown is handled as a library + } +} + +func (s *SPDX) parseRelationshipType(rel string) core.RelationshipType { + switch rel { + case common.TypeRelationshipDescribe: + return core.RelationshipDescribes + case common.TypeRelationshipContains, "CONTAIN": + return core.RelationshipContains + case common.TypeRelationshipDependsOn: + return core.RelationshipDependsOn + default: + return core.RelationshipContains + } +} + +func (s *SPDX) parseExternalReferences(refs []*spdx.PackageExternalReference) (*packageurl.PackageURL, error) { for _, ref := range refs { // Extract the package information from PURL if ref.RefType != RefTypePurl || ref.Category != CategoryPackageManager { continue } - packageURL, err := purl.FromString(ref.Locator) + packageURL, err := packageurl.FromString(ref.Locator) if err != nil { return nil, xerrors.Errorf("failed to parse purl from string: %w", err) } - return packageURL, nil + return &packageURL, nil } - return nil, errUnknownPackageFormat + return nil, nil } -func lookupAttributionTexts(attributionTexts []string, key string) string { - for _, text := range attributionTexts { - if strings.HasPrefix(text, key) { - return strings.TrimPrefix(text, fmt.Sprintf("%s: ", key)) +func (s *SPDX) isTrivySBOM(spdxDocument *spdx.Document) bool { + for _, c := range spdxDocument.CreationInfo.Creators { + if c.CreatorType == "Tool" && strings.HasPrefix(c.Creator, "trivy") { + return true } } - return "" + return false } -func parseSourceInfo(pkgType, sourceInfo string) (epoch int, name, ver, rel string, err error) { - srcNameVersion := strings.TrimPrefix(sourceInfo, fmt.Sprintf("%s: ", SourcePackagePrefix)) - ss := strings.Split(srcNameVersion, " ") - if len(ss) != 2 { - return 0, "", "", "", xerrors.Errorf("invalid source info (%s)", sourceInfo) - } - name = ss[0] - if pkgType == packageurl.TypeRPM { - v := version.NewVersion(ss[1]) - epoch = v.Epoch() - ver = v.Version() - rel = v.Release() - } else { - ver = ss[1] - } - return epoch, name, ver, rel, nil -} - -// getPackageFilePaths parses Relationships and finds filepaths for packages -func getPackageFilePaths(spdxDocument *spdx.Document) map[string]string { - packageFilePaths := make(map[string]string) - fileSPDXIdentifierMap := createFileSPDXIdentifierMap(spdxDocument.Files) - for _, rel := range spdxDocument.Relationships { - if rel.Relationship != common.TypeRelationshipContains && rel.Relationship != "CONTAIN" { - // Skip the DESCRIBES relationship. - continue - } - - // hasFiles field is deprecated - // https://github.com/spdx/tools-golang/issues/171 - // hasFiles values converted in Relationships - // https://github.com/spdx/tools-golang/pull/201 - if isFile(rel.RefB.ElementRefID) { - file, ok := fileSPDXIdentifierMap[string(rel.RefB.ElementRefID)] - if ok { - // Save filePaths for packages - // Insert filepath will be later - packageFilePaths[string(rel.RefA.ElementRefID)] = file.FileName - } - continue - } - } - return packageFilePaths +func isFile(elementID spdx.ElementID) bool { + return strings.HasPrefix(string(elementID), ElementFile) } diff --git a/pkg/sbom/spdx/unmarshal_test.go b/pkg/sbom/spdx/unmarshal_test.go index cee50461508e..f65294020728 100644 --- a/pkg/sbom/spdx/unmarshal_test.go +++ b/pkg/sbom/spdx/unmarshal_test.go @@ -2,6 +2,7 @@ package spdx_test import ( "encoding/json" + sbomio "github.com/aquasecurity/trivy/pkg/sbom/io" "github.com/package-url/packageurl-go" "os" "sort" @@ -27,6 +28,15 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { inputFile: "testdata/happy/bom.json", want: types.SBOM{ Metadata: types.Metadata{ + ImageID: "sha256:49193a2310dbad4c02382da87ac624a80a92387a4f7536235f9ba590e5bcd7b5", + DiffIDs: []string{ + "sha256:3c79e832b1b4891a1cb4a326ef8524e0bd14a2537150ac0e203a5677176c1ca1", + "sha256:dd565ff850e7003356e2b252758f9bdc1ff2803f61e995e24c7844f6297f8fc3", + }, + RepoTags: []string{ + "maven-test-project:latest", + "tmp-test:latest", + }, OS: &ftypes.OS{ Family: "alpine", Name: "3.16.0", @@ -36,6 +46,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { { Packages: ftypes.Packages{ { + ID: "musl@1.2.3-r0", Name: "musl", Version: "1.2.3-r0", SrcName: "musl", @@ -68,6 +79,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { FilePath: "app/composer/composer.lock", Libraries: ftypes.Packages{ { + ID: "pear/log@1.13.1", Name: "pear/log", Version: "1.13.1", Identifier: ftypes.PkgIdentifier{ @@ -83,7 +95,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { }, }, { - + ID: "pear/pear_exception@v1.0.0", Name: "pear/pear_exception", Version: "v1.0.0", Identifier: ftypes.PkgIdentifier{ @@ -105,6 +117,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { FilePath: "app/gobinary/gobinary", Libraries: ftypes.Packages{ { + ID: "github.com/package-url/packageurl-go@v0.1.1-0.20220203205134-d70459300c8a", Name: "github.com/package-url/packageurl-go", Version: "v0.1.1-0.20220203205134-d70459300c8a", Identifier: ftypes.PkgIdentifier{ @@ -125,6 +138,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { Type: "jar", Libraries: ftypes.Packages{ { + ID: "org.codehaus.mojo:child-project:1.0", Name: "org.codehaus.mojo:child-project", Identifier: ftypes.PkgIdentifier{ PURL: &packageurl.PackageURL{ @@ -145,6 +159,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { Type: "node-pkg", Libraries: ftypes.Packages{ { + ID: "bootstrap@5.0.2", Name: "bootstrap", Version: "5.0.2", Identifier: ftypes.PkgIdentifier{ @@ -170,7 +185,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { want: types.SBOM{ Applications: []ftypes.Application{ { - Type: "node-pkg", + Type: ftypes.NodePkg, Libraries: ftypes.Packages{ { ID: "yargs-parser@21.1.1", @@ -228,6 +243,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { FilePath: "app/composer/composer.lock", Libraries: ftypes.Packages{ { + ID: "pear/log@1.13.1", Name: "pear/log", Version: "1.13.1", Identifier: ftypes.PkgIdentifier{ @@ -240,7 +256,7 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { }, }, { - + ID: "pear/pear_exception@v1.0.0", Name: "pear/pear_exception", Version: "v1.0.0", Identifier: ftypes.PkgIdentifier{ @@ -266,9 +282,10 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { Type: ftypes.Jar, Libraries: ftypes.Packages{ { - FilePath: "modules/apm/elastic-apm-agent-1.36.0.jar", + ID: "co.elastic.apm:apm-agent:1.36.0", Name: "co.elastic.apm:apm-agent", Version: "1.36.0", + FilePath: "modules/apm/elastic-apm-agent-1.36.0.jar", Identifier: ftypes.PkgIdentifier{ PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, @@ -279,9 +296,10 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { }, }, { - FilePath: "modules/apm/elastic-apm-agent-1.36.0.jar", + ID: "co.elastic.apm:apm-agent-cached-lookup-key:1.36.0", Name: "co.elastic.apm:apm-agent-cached-lookup-key", Version: "1.36.0", + FilePath: "modules/apm/elastic-apm-agent-1.36.0.jar", Identifier: ftypes.PkgIdentifier{ PURL: &packageurl.PackageURL{ Type: packageurl.TypeMaven, @@ -315,8 +333,8 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { }, { name: "sad path invalid purl", - inputFile: "testdata/sad/invalid-source-info.json", - wantErr: "failed to parse source info:", + inputFile: "testdata/sad/invalid-purl.json", + wantErr: "purl is missing type or name", }, } @@ -326,22 +344,24 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { require.NoError(t, err) defer f.Close() - v := &spdx.SPDX{SBOM: &types.SBOM{}} - err = json.NewDecoder(f).Decode(v) + var v spdx.SPDX + err = json.NewDecoder(f).Decode(&v) if tt.wantErr != "" { - require.Error(t, err) - assert.Contains(t, err.Error(), tt.wantErr) + assert.ErrorContains(t, err, tt.wantErr) return } - // Not compare the SPDX field - v.BOM = nil + var got types.SBOM + err = sbomio.NewDecoder(v.BOM).Decode(&got) + require.NoError(t, err) + + // Not compare BOM + got.BOM = nil - sort.Slice(v.Applications, func(i, j int) bool { - return v.Applications[i].Type < v.Applications[j].Type + sort.Slice(got.Applications, func(i, j int) bool { + return got.Applications[i].Type < got.Applications[j].Type }) - require.NoError(t, err) - assert.Equal(t, tt.want, *v.SBOM) + assert.Equal(t, tt.want, got) }) } } From eb3ceb323d2646fceecc4e3c18f13eecb3081c0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ivo=20=C5=A0m=C3=ADd?= Date: Mon, 18 Mar 2024 10:34:26 +0100 Subject: [PATCH 02/57] feat(sbom): Support license detection for SBOM scan (#6072) Co-authored-by: DmitriyLewen --- .../references/configuration/cli/trivy.md | 2 +- .../configuration/cli/trivy_sbom.md | 4 +- docs/docs/scanner/license.md | 8 +- docs/docs/target/sbom.md | 5 +- integration/sbom_test.go | 25 +++- .../fixtures/sbom/license-cyclonedx.json | 125 ++++++++++++++++++ .../testdata/license-cyclonedx.json.golden | 65 +++++++++ pkg/commands/app.go | 21 ++- pkg/licensing/normalize.go | 96 ++++++++++++-- 9 files changed, 329 insertions(+), 22 deletions(-) create mode 100644 integration/testdata/fixtures/sbom/license-cyclonedx.json create mode 100644 integration/testdata/license-cyclonedx.json.golden diff --git a/docs/docs/references/configuration/cli/trivy.md b/docs/docs/references/configuration/cli/trivy.md index f11635a25992..f3c543a210f9 100644 --- a/docs/docs/references/configuration/cli/trivy.md +++ b/docs/docs/references/configuration/cli/trivy.md @@ -53,7 +53,7 @@ trivy [global flags] command [flags] target * [trivy plugin](trivy_plugin.md) - Manage plugins * [trivy repository](trivy_repository.md) - Scan a repository * [trivy rootfs](trivy_rootfs.md) - Scan rootfs -* [trivy sbom](trivy_sbom.md) - Scan SBOM for vulnerabilities +* [trivy sbom](trivy_sbom.md) - Scan SBOM for vulnerabilities and licenses * [trivy server](trivy_server.md) - Server mode * [trivy version](trivy_version.md) - Print the version * [trivy vm](trivy_vm.md) - [EXPERIMENTAL] Scan a virtual machine image diff --git a/docs/docs/references/configuration/cli/trivy_sbom.md b/docs/docs/references/configuration/cli/trivy_sbom.md index f30144c34e9d..5d941e9744ba 100644 --- a/docs/docs/references/configuration/cli/trivy_sbom.md +++ b/docs/docs/references/configuration/cli/trivy_sbom.md @@ -1,6 +1,6 @@ ## trivy sbom -Scan SBOM for vulnerabilities +Scan SBOM for vulnerabilities and licenses ``` trivy sbom [flags] SBOM_PATH @@ -36,6 +36,7 @@ trivy sbom [flags] SBOM_PATH --ignore-policy string specify the Rego file path to evaluate each vulnerability --ignore-status strings comma-separated list of vulnerability status to ignore (unknown,not_affected,affected,fixed,under_investigation,will_not_fix,fix_deferred,end_of_life) --ignore-unfixed display only fixed vulnerabilities + --ignored-licenses strings specify a list of license to ignore --ignorefile string specify .trivyignore file (default ".trivyignore") --java-db-repository string OCI repository to retrieve trivy-java-db from (default "ghcr.io/aquasecurity/trivy-java-db:1") --list-all-pkgs enabling the option will output all packages regardless of vulnerability @@ -50,6 +51,7 @@ trivy sbom [flags] SBOM_PATH --rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev") --reset remove all caches and database --sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (oci,rekor) + --scanners strings comma-separated list of what security issues to detect (vuln,license) (default [vuln]) --server string server address in client mode -s, --severity strings severities of security issues to be displayed (UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL) (default [UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL]) --show-suppressed [EXPERIMENTAL] show suppressed vulnerabilities diff --git a/docs/docs/scanner/license.md b/docs/docs/scanner/license.md index dad487965481..6033542e4bea 100644 --- a/docs/docs/scanner/license.md +++ b/docs/docs/scanner/license.md @@ -30,10 +30,10 @@ To configure the confidence level, you can use `--license-confidence-level`. Thi Currently, the standard license scanning doesn't support filesystem and repository scanning. -| License scanning | Image | Rootfs | Filesystem | Repository | -| :-------------------: | :---: | :----: | :--------: | :--------: | -| Standard | ✅ | ✅ | - | - | -| Full (--license-full) | ✅ | ✅ | ✅ | ✅ | +| License scanning | Image | Rootfs | Filesystem | Repository | SBOM | +|:---------------------:|:-----:|:------:|:----------:|:----------:|:----:| +| Standard | ✅ | ✅ | - | - | ✅ | +| Full (--license-full) | ✅ | ✅ | ✅ | ✅ | - | License checking classifies the identified licenses and map the classification to severity. diff --git a/docs/docs/target/sbom.md b/docs/docs/target/sbom.md index a287455fc68a..4ea50035df1c 100644 --- a/docs/docs/target/sbom.md +++ b/docs/docs/target/sbom.md @@ -1,6 +1,6 @@ # SBOM scanning -Trivy can take the following SBOM formats as an input and scan for vulnerabilities. +Trivy can take the following SBOM formats as an input and scan for vulnerabilities and licenses. - CycloneDX - SPDX @@ -17,6 +17,9 @@ $ trivy sbom /path/to/sbom_file ``` +By default, vulnerability scan in SBOM is executed. You can use `--scanners vuln,license` +command property to select also license scan, or `--scanners license` alone. + !!! note Passing SBOMs generated by tool other than Trivy may result in inaccurate detection because Trivy relies on custom properties in SBOM for accurate scanning. diff --git a/integration/sbom_test.go b/integration/sbom_test.go index dc18cb43bceb..65c99f9e9600 100644 --- a/integration/sbom_test.go +++ b/integration/sbom_test.go @@ -6,11 +6,11 @@ import ( "path/filepath" "testing" + ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/types" ) @@ -19,6 +19,7 @@ func TestSBOM(t *testing.T) { input string format string artifactType string + scanners string } tests := []struct { name string @@ -150,6 +151,16 @@ func TestSBOM(t *testing.T) { }, }, }, + { + name: "license check cyclonedx json", + args: args{ + input: "testdata/fixtures/sbom/license-cyclonedx.json", + format: "json", + artifactType: "cyclonedx", + scanners: "license", + }, + golden: "testdata/license-cyclonedx.json.golden", + }, } // Set up testing DB @@ -157,6 +168,11 @@ func TestSBOM(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + scanners := "vuln" + if tt.args.scanners != "" { + scanners = tt.args.scanners + } + osArgs := []string{ "--cache-dir", cacheDir, @@ -165,6 +181,8 @@ func TestSBOM(t *testing.T) { "--skip-db-update", "--format", tt.args.format, + "--scanners", + scanners, } // Set up the output file @@ -223,5 +241,10 @@ func compareSBOMReports(t *testing.T, wantFile, gotFile string, overrideWant typ } got := readReport(t, gotFile) + // when running on Windows FS + got.ArtifactName = filepath.ToSlash(filepath.Clean(got.ArtifactName)) + for i, result := range got.Results { + got.Results[i].Target = filepath.ToSlash(filepath.Clean(result.Target)) + } assert.Equal(t, want, got) } diff --git a/integration/testdata/fixtures/sbom/license-cyclonedx.json b/integration/testdata/fixtures/sbom/license-cyclonedx.json new file mode 100644 index 000000000000..e8353ca609cc --- /dev/null +++ b/integration/testdata/fixtures/sbom/license-cyclonedx.json @@ -0,0 +1,125 @@ +{ + "$schema": "http://cyclonedx.org/schema/bom-1.5.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "serialNumber": "urn:uuid:c09512e3-47e7-4eff-8f76-5d7ae72b26a5", + "version": 1, + "metadata": { + "timestamp": "2024-03-10T14:57:31+00:00", + "tools": { + "components": [ + { + "type": "application", + "group": "aquasecurity", + "name": "trivy", + "version": "dev" + } + ] + }, + "component": { + "bom-ref": "acc9d4aa-4158-4969-a497-637e114fde0c", + "type": "application", + "name": "C:/Users/bedla.czech/IdeaProjects/sbom-demo", + "properties": [ + { + "name": "aquasecurity:trivy:SchemaVersion", + "value": "2" + } + ] + } + }, + "components": [ + { + "bom-ref": "eb56cd49-da98-4b08-bfc8-9880fb063cf1", + "type": "application", + "name": "pom.xml", + "properties": [ + { + "name": "aquasecurity:trivy:Class", + "value": "lang-pkgs" + }, + { + "name": "aquasecurity:trivy:Type", + "value": "pom" + } + ] + }, + { + "bom-ref": "pkg:maven/org.eclipse.sisu/org.eclipse.sisu.plexus@0.3.0.M1", + "type": "library", + "group": "org.eclipse.sisu", + "name": "org.eclipse.sisu.plexus", + "version": "0.3.0.M1", + "licenses": [ + { + "license": { + "name": "EPL-1.0" + } + } + ], + "purl": "pkg:maven/org.eclipse.sisu/org.eclipse.sisu.plexus@0.3.0.M1", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "org.eclipse.sisu:org.eclipse.sisu.plexus:0.3.0.M1" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "pom" + } + ] + }, + { + "bom-ref": "pkg:maven/org.ow2.asm/asm@9.5", + "type": "library", + "group": "org.ow2.asm", + "name": "asm", + "version": "9.5", + "licenses": [ + { + "license": { + "name": "BSD-3-Clause" + } + } + ], + "purl": "pkg:maven/org.ow2.asm/asm@9.5", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "org.ow2.asm:asm:9.5" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "pom" + } + ] + }, + { + "bom-ref": "pkg:maven/org.slf4j/slf4j-api@2.0.11", + "type": "library", + "group": "org.slf4j", + "name": "slf4j-api", + "version": "2.0.11", + "licenses": [ + { + "license": { + "name": "MIT License" + } + } + ], + "purl": "pkg:maven/org.slf4j/slf4j-api@2.0.11", + "properties": [ + { + "name": "aquasecurity:trivy:PkgID", + "value": "org.slf4j:slf4j-api:2.0.11" + }, + { + "name": "aquasecurity:trivy:PkgType", + "value": "pom" + } + ] + } + ], + "dependencies": [], + "vulnerabilities": [] +} diff --git a/integration/testdata/license-cyclonedx.json.golden b/integration/testdata/license-cyclonedx.json.golden new file mode 100644 index 000000000000..cf69da9756ed --- /dev/null +++ b/integration/testdata/license-cyclonedx.json.golden @@ -0,0 +1,65 @@ +{ + "SchemaVersion": 2, + "CreatedAt": "2021-08-25T12:20:30.000000005Z", + "ArtifactName": "testdata/fixtures/sbom/license-cyclonedx.json", + "ArtifactType": "cyclonedx", + "Metadata": { + "ImageConfig": { + "architecture": "", + "created": "0001-01-01T00:00:00Z", + "os": "", + "rootfs": { + "type": "", + "diff_ids": null + }, + "config": {} + } + }, + "Results": [ + { + "Target": "OS Packages", + "Class": "license" + }, + { + "Target": "pom.xml", + "Class": "license" + }, + { + "Target": "Java", + "Class": "license", + "Licenses": [ + { + "Severity": "MEDIUM", + "Category": "reciprocal", + "PkgName": "org.eclipse.sisu:org.eclipse.sisu.plexus", + "FilePath": "", + "Name": "EPL-1.0", + "Confidence": 1, + "Link": "" + }, + { + "Severity": "LOW", + "Category": "notice", + "PkgName": "org.ow2.asm:asm", + "FilePath": "", + "Name": "BSD-3-Clause", + "Confidence": 1, + "Link": "" + }, + { + "Severity": "UNKNOWN", + "Category": "unknown", + "PkgName": "org.slf4j:slf4j-api", + "FilePath": "", + "Name": "MIT License", + "Confidence": 1, + "Link": "" + } + ] + }, + { + "Target": "Loose File License(s)", + "Class": "license-file" + } + ] +} diff --git a/pkg/commands/app.go b/pkg/commands/app.go index 069c9f8b71cd..41d1d2ff645d 100644 --- a/pkg/commands/app.go +++ b/pkg/commands/app.go @@ -1125,11 +1125,24 @@ func NewSBOMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { reportFlagGroup.DependencyTree = nil // disable '--dependency-tree' reportFlagGroup.ReportFormat = nil // TODO: support --report summary + scanners := flag.ScannersFlag.Clone() + scanners.Values = xstrings.ToStringSlice(types.Scanners{ + types.VulnerabilityScanner, + types.LicenseScanner, + }) + scanners.Default = xstrings.ToStringSlice(types.Scanners{ + types.VulnerabilityScanner, + }) scanFlagGroup := flag.NewScanFlagGroup() - scanFlagGroup.Scanners = nil // disable '--scanners' as it always scans for vulnerabilities + scanFlagGroup.Scanners = scanners // allow only 'vuln' and 'license' options for '--scanners' scanFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps' scanFlagGroup.Parallel = nil // disable '--parallel' + licenseFlagGroup := flag.NewLicenseFlagGroup() + // License full-scan and confidence-level are for file content only + licenseFlagGroup.LicenseFull = nil + licenseFlagGroup.LicenseConfidenceLevel = nil + sbomFlags := &flag.Flags{ GlobalFlagGroup: globalFlags, CacheFlagGroup: flag.NewCacheFlagGroup(), @@ -1139,11 +1152,12 @@ func NewSBOMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { ScanFlagGroup: scanFlagGroup, SBOMFlagGroup: flag.NewSBOMFlagGroup(), VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(), + LicenseFlagGroup: licenseFlagGroup, } cmd := &cobra.Command{ Use: "sbom [flags] SBOM_PATH", - Short: "Scan SBOM for vulnerabilities", + Short: "Scan SBOM for vulnerabilities and licenses", GroupID: groupScanning, Example: ` # Scan CycloneDX and show the result in tables $ trivy sbom /path/to/report.cdx @@ -1166,9 +1180,6 @@ func NewSBOMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { return xerrors.Errorf("flag error: %w", err) } - // Scan vulnerabilities - options.Scanners = types.Scanners{types.VulnerabilityScanner} - return artifact.Run(cmd.Context(), options, artifact.TargetSBOM) }, SilenceErrors: true, diff --git a/pkg/licensing/normalize.go b/pkg/licensing/normalize.go index 942d388a3f52..0493747a9430 100644 --- a/pkg/licensing/normalize.go +++ b/pkg/licensing/normalize.go @@ -67,15 +67,93 @@ var mapping = map[string]string{ "MPL 2": MPL20, // BSD - "BSD": BSD3Clause, // 2? 3? - "BSD-2-CLAUSE": BSD2Clause, - "BSD-3-CLAUSE": BSD3Clause, - "BSD-4-CLAUSE": BSD4Clause, - - "APACHE": Apache20, // 1? 2? - "APACHE 2.0": Apache20, - "RUBY": Ruby, - "ZLIB": Zlib, + "BSD": BSD3Clause, // 2? 3? + "BSD-2-CLAUSE": BSD2Clause, + "BSD-3-CLAUSE": BSD3Clause, + "BSD-4-CLAUSE": BSD4Clause, + "BSD 2 CLAUSE": BSD2Clause, + "BSD 2-CLAUSE": BSD2Clause, + "BSD 2-CLAUSE LICENSE": BSD2Clause, + "THE BSD 2-CLAUSE LICENSE": BSD2Clause, + "THE 2-CLAUSE BSD LICENSE": BSD2Clause, + "TWO-CLAUSE BSD-STYLE LICENSE": BSD2Clause, + "BSD 3 CLAUSE": BSD3Clause, + "BSD 3-CLAUSE": BSD3Clause, + "BSD 3-CLAUSE LICENSE": BSD3Clause, + "THE BSD 3-CLAUSE LICENSE": BSD3Clause, + "BSD 3-CLAUSE \"NEW\" OR \"REVISED\" LICENSE (BSD-3-CLAUSE)": BSD3Clause, + "ECLIPSE DISTRIBUTION LICENSE (NEW BSD LICENSE)": BSD3Clause, + "NEW BSD LICENSE": BSD3Clause, + "MODIFIED BSD LICENSE": BSD3Clause, + "REVISED BSD": BSD3Clause, + "REVISED BSD LICENSE": BSD3Clause, + "THE NEW BSD LICENSE": BSD3Clause, + "3-CLAUSE BSD LICENSE": BSD3Clause, + "BSD 3-CLAUSE NEW LICENSE": BSD3Clause, + "BSD LICENSE": BSD3Clause, + "EDL 1.0": BSD3Clause, + "ECLIPSE DISTRIBUTION LICENSE - V 1.0": BSD3Clause, + "ECLIPSE DISTRIBUTION LICENSE V. 1.0": BSD3Clause, + "ECLIPSE DISTRIBUTION LICENSE V1.0": BSD3Clause, + "THE BSD LICENSE": BSD4Clause, + + // APACHE + "APACHE LICENSE": Apache10, + "APACHE SOFTWARE LICENSES": Apache10, + "APACHE": Apache20, // 1? 2? + "APACHE 2.0": Apache20, + "APACHE 2": Apache20, + "APACHE V2": Apache20, + "APACHE 2.0 LICENSE": Apache20, + "APACHE SOFTWARE LICENSE, VERSION 2.0": Apache20, + "THE APACHE SOFTWARE LICENSE, VERSION 2.0": Apache20, + "APACHE LICENSE (V2.0)": Apache20, + "APACHE LICENSE 2.0": Apache20, + "APACHE LICENSE V2.0": Apache20, + "APACHE LICENSE VERSION 2.0": Apache20, + "APACHE LICENSE, VERSION 2.0": Apache20, + "APACHE PUBLIC LICENSE 2.0": Apache20, + "APACHE SOFTWARE LICENSE - VERSION 2.0": Apache20, + "THE APACHE LICENSE, VERSION 2.0": Apache20, + "APACHE-2.0 LICENSE": Apache20, + "APACHE 2 STYLE LICENSE": Apache20, + "ASF 2.0": Apache20, + + // CC0-1.0 + "CC0 1.0 UNIVERSAL": CC010, + "PUBLIC DOMAIN, PER CREATIVE COMMONS CC0": CC010, + + // CDDL 1.0 + "CDDL 1.0": CDDL10, + "CDDL LICENSE": CDDL10, + "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) VERSION 1.0": CDDL10, + "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) V1.0": CDDL10, + + // CDDL 1.1 + "CDDL 1.1": CDDL11, + "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) VERSION 1.1": CDDL11, + "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) V1.1": CDDL11, + + // EPL 1.0 + "ECLIPSE PUBLIC LICENSE - VERSION 1.0": EPL10, + "ECLIPSE PUBLIC LICENSE (EPL) 1.0": EPL10, + "ECLIPSE PUBLIC LICENSE V1.0": EPL10, + "ECLIPSE PUBLIC LICENSE, VERSION 1.0": EPL10, + "ECLIPSE PUBLIC LICENSE - V 1.0": EPL10, + "ECLIPSE PUBLIC LICENSE - V1.0": EPL10, + "ECLIPSE PUBLIC LICENSE (EPL), VERSION 1.0": EPL10, + + // EPL 2.0 + "ECLIPSE PUBLIC LICENSE - VERSION 2.0": EPL20, + "EPL 2.0": EPL20, + "ECLIPSE PUBLIC LICENSE - V 2.0": EPL20, + "ECLIPSE PUBLIC LICENSE V2.0": EPL20, + "ECLIPSE PUBLIC LICENSE, VERSION 2.0": EPL20, + "THE ECLIPSE PUBLIC LICENSE VERSION 2.0": EPL20, + "ECLIPSE PUBLIC LICENSE V. 2.0": EPL20, + + "RUBY": Ruby, + "ZLIB": Zlib, // Public Domain "PUBLIC DOMAIN": Unlicense, From dd9620ef388be0e6e4b14910c635987e143df92a Mon Sep 17 00:00:00 2001 From: Edoardo Vacchi Date: Mon, 18 Mar 2024 10:41:34 +0100 Subject: [PATCH 03/57] chore: updates wazero to v1.7.0 (#6301) Signed-off-by: Edoardo Vacchi --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index ce787a0c2565..1b887bb6e57f 100644 --- a/go.mod +++ b/go.mod @@ -99,7 +99,7 @@ require ( github.com/stretchr/testify v1.8.4 github.com/testcontainers/testcontainers-go v0.28.0 github.com/testcontainers/testcontainers-go/modules/localstack v0.26.0 - github.com/tetratelabs/wazero v1.6.0 + github.com/tetratelabs/wazero v1.7.0 github.com/twitchtv/twirp v8.1.2+incompatible github.com/xeipuuv/gojsonschema v1.2.0 github.com/xlab/treeprint v1.2.0 diff --git a/go.sum b/go.sum index 6319723f6b29..f1e1c352ea6c 100644 --- a/go.sum +++ b/go.sum @@ -1629,8 +1629,8 @@ github.com/testcontainers/testcontainers-go v0.23.0 h1:ERYTSikX01QczBLPZpqsETTBO github.com/testcontainers/testcontainers-go v0.23.0/go.mod h1:3gzuZfb7T9qfcH2pHpV4RLlWrPjeWNQah6XlYQ32c4I= github.com/testcontainers/testcontainers-go/modules/localstack v0.26.0 h1:lpL04dHA9mGFBQLFcV+aEEh1Tf4ohXdIGgoj3J0bacM= github.com/testcontainers/testcontainers-go/modules/localstack v0.26.0/go.mod h1:1xkZPpkBu6coI7CyVn3DXUBnsVrZ+fd/Cc8lx6zk2mk= -github.com/tetratelabs/wazero v1.6.0 h1:z0H1iikCdP8t+q341xqepY4EWvHEw8Es7tlqiVzlP3g= -github.com/tetratelabs/wazero v1.6.0/go.mod h1:0U0G41+ochRKoPKCJlh0jMg1CHkyfK8kDqiirMmKY8A= +github.com/tetratelabs/wazero v1.7.0 h1:jg5qPydno59wqjpGrHph81lbtHzTrWzwwtD4cD88+hQ= +github.com/tetratelabs/wazero v1.7.0/go.mod h1:ytl6Zuh20R/eROuyDaGPkp82O9C/DJfXAwJfQ3X6/7Y= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/twitchtv/twirp v8.1.2+incompatible h1:0O6TfzZW09ZP5r+ORA90XQEE3PTgA6C7MBbl2KxvVgE= From 317792433e4e961441f772c6bd22d63873a8c986 Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Mon, 18 Mar 2024 15:42:54 +0600 Subject: [PATCH 04/57] fix: increase the default buffer size for scanning dpkg status files by 2 times (#6298) --- pkg/fanal/analyzer/pkg/dpkg/scanner.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pkg/fanal/analyzer/pkg/dpkg/scanner.go b/pkg/fanal/analyzer/pkg/dpkg/scanner.go index de5c39a6a174..2e38f06b0cf7 100644 --- a/pkg/fanal/analyzer/pkg/dpkg/scanner.go +++ b/pkg/fanal/analyzer/pkg/dpkg/scanner.go @@ -15,6 +15,11 @@ type dpkgScanner struct { // NewScanner returns a new scanner that splits on empty lines. func NewScanner(r io.Reader) *dpkgScanner { s := bufio.NewScanner(r) + // Package data may exceed default buffer size + // Increase the buffer default size by 2 times + buf := make([]byte, 0, 128*1024) + s.Buffer(buf, 128*1024) + s.Split(emptyLineSplit) return &dpkgScanner{Scanner: s} } From c4022d61b39a4f4139f01f6254f182ab81d2bc35 Mon Sep 17 00:00:00 2001 From: Teppei Fukuda Date: Tue, 19 Mar 2024 09:51:18 +0900 Subject: [PATCH 05/57] feat(vex): consider root component for relationships (#6313) Co-authored-by: DmitriyLewen --- pkg/result/filter.go | 9 +- pkg/vex/csaf.go | 3 +- pkg/vex/cyclonedx.go | 2 +- pkg/vex/openvex.go | 16 ++- pkg/vex/testdata/openvex-oci.json | 26 +++++ pkg/vex/vex.go | 3 +- pkg/vex/vex_test.go | 181 +++++++++++++++++++++--------- 7 files changed, 180 insertions(+), 60 deletions(-) create mode 100644 pkg/vex/testdata/openvex-oci.json diff --git a/pkg/result/filter.go b/pkg/result/filter.go index dad9c0767316..6edcef72046a 100644 --- a/pkg/result/filter.go +++ b/pkg/result/filter.go @@ -14,6 +14,8 @@ import ( "golang.org/x/xerrors" dbTypes "github.com/aquasecurity/trivy-db/pkg/types" + "github.com/aquasecurity/trivy/pkg/sbom/core" + sbomio "github.com/aquasecurity/trivy/pkg/sbom/io" "github.com/aquasecurity/trivy/pkg/types" "github.com/aquasecurity/trivy/pkg/vex" ) @@ -87,11 +89,16 @@ func filterByVEX(report types.Report, opt FilterOption) error { return nil } + bom, err := sbomio.NewEncoder(core.Options{}).Encode(report) + if err != nil { + return xerrors.Errorf("unable to encode the SBOM: %w", err) + } + for i, result := range report.Results { if len(result.Vulnerabilities) == 0 { continue } - vexDoc.Filter(&report.Results[i]) + vexDoc.Filter(&report.Results[i], bom) } return nil } diff --git a/pkg/vex/csaf.go b/pkg/vex/csaf.go index 33c5c8975b3e..d5d68f76adb9 100644 --- a/pkg/vex/csaf.go +++ b/pkg/vex/csaf.go @@ -8,6 +8,7 @@ import ( "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/purl" + "github.com/aquasecurity/trivy/pkg/sbom/core" "github.com/aquasecurity/trivy/pkg/types" ) @@ -23,7 +24,7 @@ func newCSAF(advisory csaf.Advisory) VEX { } } -func (v *CSAF) Filter(result *types.Result) { +func (v *CSAF) Filter(result *types.Result, _ *core.BOM) { result.Vulnerabilities = lo.Filter(result.Vulnerabilities, func(vuln types.DetectedVulnerability, _ int) bool { found, ok := lo.Find(v.advisory.Vulnerabilities, func(item *csaf.Vulnerability) bool { return string(*item.CVE) == vuln.VulnerabilityID diff --git a/pkg/vex/cyclonedx.go b/pkg/vex/cyclonedx.go index a956703da3ae..685fefebf304 100644 --- a/pkg/vex/cyclonedx.go +++ b/pkg/vex/cyclonedx.go @@ -45,7 +45,7 @@ func newCycloneDX(sbom *core.BOM, vex *cdx.BOM) *CycloneDX { } } -func (v *CycloneDX) Filter(result *types.Result) { +func (v *CycloneDX) Filter(result *types.Result, _ *core.BOM) { result.Vulnerabilities = lo.Filter(result.Vulnerabilities, func(vuln types.DetectedVulnerability, _ int) bool { stmt, ok := lo.Find(v.statements, func(item Statement) bool { return item.VulnerabilityID == vuln.VulnerabilityID diff --git a/pkg/vex/openvex.go b/pkg/vex/openvex.go index 24e2bb6cca9c..a6cae6de7ac8 100644 --- a/pkg/vex/openvex.go +++ b/pkg/vex/openvex.go @@ -4,6 +4,7 @@ import ( openvex "github.com/openvex/go-vex/pkg/vex" "github.com/samber/lo" + "github.com/aquasecurity/trivy/pkg/sbom/core" "github.com/aquasecurity/trivy/pkg/types" ) @@ -17,13 +18,13 @@ func newOpenVEX(vex openvex.VEX) VEX { } } -func (v *OpenVEX) Filter(result *types.Result) { +func (v *OpenVEX) Filter(result *types.Result, bom *core.BOM) { result.Vulnerabilities = lo.Filter(result.Vulnerabilities, func(vuln types.DetectedVulnerability, _ int) bool { if vuln.PkgIdentifier.PURL == nil { return true } - stmts := v.vex.Matches(vuln.VulnerabilityID, vuln.PkgIdentifier.PURL.String(), nil) + stmts := v.Matches(vuln, bom) if len(stmts) == 0 { return true } @@ -41,6 +42,17 @@ func (v *OpenVEX) Filter(result *types.Result) { }) } +func (v *OpenVEX) Matches(vuln types.DetectedVulnerability, bom *core.BOM) []openvex.Statement { + root := bom.Root() + if root != nil && root.PkgID.PURL != nil { + stmts := v.vex.Matches(vuln.VulnerabilityID, root.PkgID.PURL.String(), []string{vuln.PkgIdentifier.PURL.String()}) + if len(stmts) != 0 { + return stmts + } + } + return v.vex.Matches(vuln.VulnerabilityID, vuln.PkgIdentifier.PURL.String(), nil) +} + func findingStatus(status openvex.Status) types.FindingStatus { switch status { case openvex.StatusNotAffected: diff --git a/pkg/vex/testdata/openvex-oci.json b/pkg/vex/testdata/openvex-oci.json new file mode 100644 index 000000000000..667ca5e3d049 --- /dev/null +++ b/pkg/vex/testdata/openvex-oci.json @@ -0,0 +1,26 @@ +{ + "@context": "https://openvex.dev/ns/v0.2.0", + "author": "Aqua Security", + "role": "Project Release Bot", + "timestamp": "2023-01-16T19:07:16.853479631-06:00", + "version": 1, + "statements": [ + { + "vulnerability": { + "name": "CVE-2022-3715" + }, + "products": [ + { + "@id": "pkg:oci/debian", + "subcomponents": [ + { + "@id": "pkg:deb/debian/bash" + } + ] + } + ], + "status": "not_affected", + "justification": "vulnerable_code_not_in_execute_path" + } + ] +} diff --git a/pkg/vex/vex.go b/pkg/vex/vex.go index dc2c118b56bc..0e47bf03bf52 100644 --- a/pkg/vex/vex.go +++ b/pkg/vex/vex.go @@ -13,6 +13,7 @@ import ( ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/sbom" + "github.com/aquasecurity/trivy/pkg/sbom/core" "github.com/aquasecurity/trivy/pkg/sbom/cyclonedx" "github.com/aquasecurity/trivy/pkg/types" ) @@ -21,7 +22,7 @@ import ( // Note: This is in the experimental stage and does not yet support many specifications. // The implementation may change significantly. type VEX interface { - Filter(*types.Result) + Filter(*types.Result, *core.BOM) } func New(filePath string, report types.Report) (VEX, error) { diff --git a/pkg/vex/vex_test.go b/pkg/vex/vex_test.go index d591ccfdc6c6..77d2aff3c63e 100644 --- a/pkg/vex/vex_test.go +++ b/pkg/vex/vex_test.go @@ -16,6 +16,48 @@ import ( "github.com/aquasecurity/trivy/pkg/vex" ) +var ( + vuln1 = types.DetectedVulnerability{ + VulnerabilityID: "CVE-2021-44228", + PkgName: "spring-boot", + InstalledVersion: "2.6.0", + PkgIdentifier: ftypes.PkgIdentifier{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeMaven, + Namespace: "org.springframework.boot", + Name: "spring-boot", + Version: "2.6.0", + }, + }, + } + vuln2 = types.DetectedVulnerability{ + VulnerabilityID: "CVE-2021-0001", + PkgName: "spring-boot", + InstalledVersion: "2.6.0", + PkgIdentifier: ftypes.PkgIdentifier{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeMaven, + Namespace: "org.springframework.boot", + Name: "spring-boot", + Version: "2.6.0", + }, + }, + } + vuln3 = types.DetectedVulnerability{ + VulnerabilityID: "CVE-2022-3715", + PkgName: "bash", + InstalledVersion: "5.2.15", + PkgIdentifier: ftypes.PkgIdentifier{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeDebian, + Namespace: "debian", + Name: "bash", + Version: "5.2.15", + }, + }, + } +) + func TestMain(m *testing.M) { log.InitLogger(false, true) os.Exit(m.Run()) @@ -28,6 +70,7 @@ func TestVEX_Filter(t *testing.T) { } type args struct { vulns []types.DetectedVulnerability + bom *core.BOM } tests := []struct { name string @@ -42,21 +85,8 @@ func TestVEX_Filter(t *testing.T) { filePath: "testdata/openvex.json", }, args: args{ - vulns: []types.DetectedVulnerability{ - { - VulnerabilityID: "CVE-2021-44228", - PkgName: "spring-boot", - InstalledVersion: "2.6.0", - PkgIdentifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeMaven, - Namespace: "org.springframework.boot", - Name: "spring-boot", - Version: "2.6.0", - }, - }, - }, - }, + vulns: []types.DetectedVulnerability{vuln1}, + bom: newTestBOM(), }, want: []types.DetectedVulnerability{}, }, @@ -67,49 +97,38 @@ func TestVEX_Filter(t *testing.T) { }, args: args{ vulns: []types.DetectedVulnerability{ - { - VulnerabilityID: "CVE-2021-44228", - PkgName: "spring-boot", - InstalledVersion: "2.6.0", - PkgIdentifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeMaven, - Namespace: "org.springframework.boot", - Name: "spring-boot", - Version: "2.6.0", - }, - }, - }, - { - VulnerabilityID: "CVE-2021-0001", - PkgName: "spring-boot", - InstalledVersion: "2.6.0", - PkgIdentifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeMaven, - Namespace: "org.springframework.boot", - Name: "spring-boot", - Version: "2.6.0", - }, - }, - }, + vuln1, // filtered by VEX + vuln2, }, + bom: newTestBOM(), }, want: []types.DetectedVulnerability{ - { - VulnerabilityID: "CVE-2021-0001", - PkgName: "spring-boot", - InstalledVersion: "2.6.0", - PkgIdentifier: ftypes.PkgIdentifier{ - PURL: &packageurl.PackageURL{ - Type: packageurl.TypeMaven, - Namespace: "org.springframework.boot", - Name: "spring-boot", - Version: "2.6.0", - }, - }, + vuln2, + }, + }, + { + name: "OpenVEX, subcomponents, oci image", + fields: fields{ + filePath: "testdata/openvex-oci.json", + }, + args: args{ + vulns: []types.DetectedVulnerability{ + vuln3, }, + bom: newTestBOM(), + }, + want: []types.DetectedVulnerability{}, + }, + { + name: "OpenVEX, subcomponents, wrong oci image", + fields: fields{ + filePath: "testdata/openvex-oci.json", + }, + args: args{ + vulns: []types.DetectedVulnerability{vuln3}, + bom: newTestBOM2(), }, + want: []types.DetectedVulnerability{vuln3}, }, { name: "CycloneDX SBOM with CycloneDX VEX", @@ -347,8 +366,62 @@ func TestVEX_Filter(t *testing.T) { got := &types.Result{ Vulnerabilities: tt.args.vulns, } - v.Filter(got) + v.Filter(got, tt.args.bom) assert.Equal(t, tt.want, got.Vulnerabilities) }) } } + +func newTestBOM() *core.BOM { + bom := core.NewBOM(core.Options{}) + bom.AddComponent(&core.Component{ + Root: true, + Type: core.TypeContainerImage, + Name: "debian:12", + PkgID: core.PkgID{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeOCI, + Name: "debian", + Version: "sha256:4482958b4461ff7d9fabc24b3a9ab1e9a2c85ece07b2db1840c7cbc01d053e90", + Qualifiers: packageurl.Qualifiers{ + { + Key: "tag", + Value: "12", + }, + { + Key: "repository_url", + Value: "docker.io/library/debian", + }, + }, + }, + }, + }) + return bom +} + +func newTestBOM2() *core.BOM { + bom := core.NewBOM(core.Options{}) + bom.AddComponent(&core.Component{ + Root: true, + Type: core.TypeContainerImage, + Name: "ubuntu:24.04", + PkgID: core.PkgID{ + PURL: &packageurl.PackageURL{ + Type: packageurl.TypeOCI, + Name: "ubuntu", + Version: "sha256:4482958b4461ff7d9fabc24b3a9ab1e9a2c85ece07b2db1840c7cbc01d053e90", + Qualifiers: packageurl.Qualifiers{ + { + Key: "tag", + Value: "24.04", + }, + { + Key: "repository_url", + Value: "docker.io/library/ubuntu", + }, + }, + }, + }, + }) + return bom +} From f6c5d5800166f1686403e0799cc7a330eb6197a7 Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Tue, 19 Mar 2024 06:59:31 +0600 Subject: [PATCH 06/57] feat(java): add support licenses and graph for gradle lock files (#6140) --- docs/docs/configuration/reporting.md | 2 + docs/docs/coverage/language/java.md | 34 +- .../parser/gradle/lockfile/parse.go | 4 + .../parser/gradle/lockfile/parse_test.go | 21 +- .../analyzer/language/java/gradle/lockfile.go | 88 ++- .../language/java/gradle/lockfile_test.go | 99 ++- .../analyzer/language/java/gradle/pom.go | 166 +++++ .../analyzer/language/java/gradle/pom_test.go | 101 +++ .../junit-4.13.pom | 587 ++++++++++++++++++ .../hamcrest-core-1.3.pom | 18 + .../java/gradle/testdata/happy.lockfile | 5 - .../empty/gradle.lockfile} | 0 .../testdata/lockfiles/happy/gradle.lockfile | 6 + .../testdata/poms/dep-version-as-property.pom | 21 + .../java/gradle/testdata/poms/happy.pom | 23 + .../poms/without-groupid-and-version.pom | 19 + .../poms/without-licenses-and-deps.pom | 10 + 17 files changed, 1152 insertions(+), 52 deletions(-) create mode 100644 pkg/fanal/analyzer/language/java/gradle/pom.go create mode 100644 pkg/fanal/analyzer/language/java/gradle/pom_test.go create mode 100644 pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/junit/junit/4.13/5c17760663fae422643fc859fd352c68a1d91bfc/junit-4.13.pom create mode 100644 pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/org.hamcrest/hamcrest-core/1.3/872e413497b906e7c9fa85ccc96046c5d1ef7ece/hamcrest-core-1.3.pom delete mode 100644 pkg/fanal/analyzer/language/java/gradle/testdata/happy.lockfile rename pkg/fanal/analyzer/language/java/gradle/testdata/{empty.lockfile => lockfiles/empty/gradle.lockfile} (100%) create mode 100644 pkg/fanal/analyzer/language/java/gradle/testdata/lockfiles/happy/gradle.lockfile create mode 100644 pkg/fanal/analyzer/language/java/gradle/testdata/poms/dep-version-as-property.pom create mode 100644 pkg/fanal/analyzer/language/java/gradle/testdata/poms/happy.pom create mode 100644 pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-groupid-and-version.pom create mode 100644 pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-licenses-and-deps.pom diff --git a/docs/docs/configuration/reporting.md b/docs/docs/configuration/reporting.md index 93468222e99b..117db88de866 100644 --- a/docs/docs/configuration/reporting.md +++ b/docs/docs/configuration/reporting.md @@ -63,6 +63,7 @@ The following languages are currently supported: | Go | [go.mod][go-mod] | | PHP | [composer.lock][composer-lock] | | Java | [pom.xml][pom-xml] | +| | [*gradle.lockfile][gradle-lockfile] | | Dart | [pubspec.lock][pubspec-lock] | This tree is the reverse of the dependency graph. @@ -445,5 +446,6 @@ $ trivy convert --format table --severity CRITICAL result.json [go-mod]: ../coverage/language/golang.md#go-modules [composer-lock]: ../coverage/language/php.md#composer [pom-xml]: ../coverage/language/java.md#pomxml +[gradle-lockfile]: ../coverage/language/java.md#gradlelock [pubspec-lock]: ../coverage/language/dart.md#dart [cargo-binaries]: ../coverage/language/rust.md#binaries \ No newline at end of file diff --git a/docs/docs/coverage/language/java.md b/docs/docs/coverage/language/java.md index 59a9ba571506..e2e97b46c61f 100644 --- a/docs/docs/coverage/language/java.md +++ b/docs/docs/coverage/language/java.md @@ -3,11 +3,11 @@ Trivy supports three types of Java scanning: `JAR/WAR/PAR/EAR`, `pom.xml` and `* Each artifact supports the following scanners: -| Artifact | SBOM | Vulnerability | License | -| ---------------- | :---: | :-----------: | :-----: | -| JAR/WAR/PAR/EAR | ✓ | ✓ | - | -| pom.xml | ✓ | ✓ | ✓ | -| *gradle.lockfile | ✓ | ✓ | - | +| Artifact | SBOM | Vulnerability | License | +|------------------|:----:|:-------------:|:-------:| +| JAR/WAR/PAR/EAR | ✓ | ✓ | - | +| pom.xml | ✓ | ✓ | ✓ | +| *gradle.lockfile | ✓ | ✓ | ✓ | The following table provides an outline of the features Trivy offers. @@ -15,7 +15,7 @@ The following table provides an outline of the features Trivy offers. |------------------|:---------------------:|:----------------:|:------------------------------------:|:--------:| | JAR/WAR/PAR/EAR | Trivy Java DB | Include | - | - | | pom.xml | Maven repository [^1] | Exclude | ✓ | ✓[^7] | -| *gradle.lockfile | - | Exclude | - | ✓ | +| *gradle.lockfile | - | Exclude | ✓ | ✓ | These may be enabled or disabled depending on the target. See [here](./index.md) for the detail. @@ -64,11 +64,24 @@ If you need to show them, use the `--include-dev-deps` flag. ## Gradle.lock -`gradle.lock` files contain all necessary information about used dependencies. -Trivy simply parses the file, extract dependencies, and finds vulnerabilities for them. -It doesn't require the internet access. +`gradle.lock` files only contain information about used dependencies. + +!!!note + All necessary files are checked locally. Gradle file scanning doesn't require internet access. + +### Dependency-tree +!!! warning "EXPERIMENTAL" + This feature might change without preserving backwards compatibility. +Trivy finds child dependencies from `*.pom` files in the cache[^8] directory. + +But there is no reliable way to determine direct dependencies (even using other files). +Therefore, we mark all dependencies as indirect to use logic to guess direct dependencies and build a dependency tree. + +### Licenses +Trity also can detect licenses for dependencies. + +Make sure that you have cache[^8] directory to find licenses from `*.pom` dependency files. -[^1]: https://github.com/aquasecurity/trivy-java-db [^1]: Uses maven repository to get information about dependencies. Internet access required. [^2]: It means `*.jar`, `*.war`, `*.par` and `*.ear` file [^3]: `ArtifactID`, `GroupID` and `Version` @@ -76,6 +89,7 @@ It doesn't require the internet access. [^5]: When you use dependency path in `relativePath` field in pom.xml file [^6]: `/Users//.m2/repository` (for Linux and Mac) and `C:/Users//.m2/repository` (for Windows) by default [^7]: To avoid confusion, Trivy only finds locations for direct dependencies from the base pom.xml file. +[^8]: The supported directories are `$GRADLE_USER_HOME/caches` and `$HOME/.gradle/caches` (`%HOMEPATH%\.gradle\caches` for Windows). [dependency-graph]: ../../configuration/reporting.md#show-origins-of-vulnerable-dependencies [maven-invoker-plugin]: https://maven.apache.org/plugins/maven-invoker-plugin/usage.html \ No newline at end of file diff --git a/pkg/dependency/parser/gradle/lockfile/parse.go b/pkg/dependency/parser/gradle/lockfile/parse.go index 3a60f3f58872..6d466570d2ff 100644 --- a/pkg/dependency/parser/gradle/lockfile/parse.go +++ b/pkg/dependency/parser/gradle/lockfile/parse.go @@ -46,6 +46,10 @@ func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, er EndLine: lineNum, }, }, + // There is no reliable way to determine direct dependencies (even using other files). + // Therefore, we mark all dependencies as Indirect. + // This is necessary to try to guess direct dependencies and build a dependency tree. + Indirect: true, }) } diff --git a/pkg/dependency/parser/gradle/lockfile/parse_test.go b/pkg/dependency/parser/gradle/lockfile/parse_test.go index e9f76883e4e5..49cc7fe1c3a3 100644 --- a/pkg/dependency/parser/gradle/lockfile/parse_test.go +++ b/pkg/dependency/parser/gradle/lockfile/parse_test.go @@ -21,9 +21,10 @@ func TestParser_Parse(t *testing.T) { inputFile: "testdata/happy.lockfile", want: []types.Library{ { - ID: "cglib:cglib-nodep:2.1.2", - Name: "cglib:cglib-nodep", - Version: "2.1.2", + ID: "cglib:cglib-nodep:2.1.2", + Name: "cglib:cglib-nodep", + Version: "2.1.2", + Indirect: true, Locations: []types.Location{ { StartLine: 4, @@ -32,9 +33,10 @@ func TestParser_Parse(t *testing.T) { }, }, { - ID: "org.springframework:spring-asm:3.1.3.RELEASE", - Name: "org.springframework:spring-asm", - Version: "3.1.3.RELEASE", + ID: "org.springframework:spring-asm:3.1.3.RELEASE", + Name: "org.springframework:spring-asm", + Version: "3.1.3.RELEASE", + Indirect: true, Locations: []types.Location{ { StartLine: 5, @@ -43,9 +45,10 @@ func TestParser_Parse(t *testing.T) { }, }, { - ID: "org.springframework:spring-beans:5.0.5.RELEASE", - Name: "org.springframework:spring-beans", - Version: "5.0.5.RELEASE", + ID: "org.springframework:spring-beans:5.0.5.RELEASE", + Name: "org.springframework:spring-beans", + Version: "5.0.5.RELEASE", + Indirect: true, Locations: []types.Location{ { StartLine: 6, diff --git a/pkg/fanal/analyzer/language/java/gradle/lockfile.go b/pkg/fanal/analyzer/language/java/gradle/lockfile.go index 55661782fb66..5dddb0b49c3c 100644 --- a/pkg/fanal/analyzer/language/java/gradle/lockfile.go +++ b/pkg/fanal/analyzer/language/java/gradle/lockfile.go @@ -2,36 +2,104 @@ package gradle import ( "context" + "fmt" + "io" + "io/fs" "os" + "sort" "strings" + "github.com/samber/lo" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency/parser/gradle/lockfile" + godeptypes "github.com/aquasecurity/trivy/pkg/dependency/types" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language" "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" + "github.com/aquasecurity/trivy/pkg/utils/fsutils" ) func init() { - analyzer.RegisterAnalyzer(&gradleLockAnalyzer{}) + analyzer.RegisterPostAnalyzer(analyzer.TypeGradleLock, newGradleLockAnalyzer) } const ( - version = 1 + version = 2 fileNameSuffix = "gradle.lockfile" ) // gradleLockAnalyzer analyzes '*gradle.lockfile' -type gradleLockAnalyzer struct{} +type gradleLockAnalyzer struct { + parser godeptypes.Parser +} + +func newGradleLockAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { + return &gradleLockAnalyzer{ + parser: lockfile.NewParser(), + }, nil +} -func (a gradleLockAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { - p := lockfile.NewParser() - res, err := language.Analyze(types.Gradle, input.FilePath, input.Content, p) +func (a gradleLockAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysisInput) (*analyzer.AnalysisResult, error) { + poms, err := parsePoms() if err != nil { - return nil, xerrors.Errorf("%s parse error: %w", input.FilePath, err) + log.Logger.Warnf("Unable to get licenses and dependsOn: %s", err) + } + + required := func(path string, d fs.DirEntry) bool { + return a.Required(path, nil) } - return res, nil + + var apps []types.Application + err = fsutils.WalkDir(input.FS, ".", required, func(filePath string, _ fs.DirEntry, r io.Reader) error { + var app *types.Application + app, err = language.Parse(types.Gradle, filePath, r, a.parser) + if err != nil { + return xerrors.Errorf("%s parse error: %w", filePath, err) + } + + if app == nil { + return nil + } + + libs := lo.SliceToMap(app.Libraries, func(lib types.Package) (string, struct{}) { + return lib.ID, struct{}{} + }) + + for i, lib := range app.Libraries { + pom := poms[lib.ID] + + // Fill licenses from pom file + if len(pom.Licenses.License) > 0 { + app.Libraries[i].Licenses = lo.Map(pom.Licenses.License, func(license License, _ int) string { + return license.Name + }) + } + + // File child deps from pom file + var deps []string + for _, dep := range pom.Dependencies.Dependency { + id := packageID(dep.GroupID, dep.ArtifactID, dep.Version) + if _, ok := libs[id]; ok { + deps = append(deps, id) + } + } + sort.Strings(deps) + app.Libraries[i].DependsOn = deps + } + + sort.Sort(app.Libraries) + apps = append(apps, *app) + return nil + }) + if err != nil { + return nil, xerrors.Errorf("walk error: %w", err) + } + + return &analyzer.AnalysisResult{ + Applications: apps, + }, nil } func (a gradleLockAnalyzer) Required(filePath string, _ os.FileInfo) bool { @@ -45,3 +113,7 @@ func (a gradleLockAnalyzer) Type() analyzer.Type { func (a gradleLockAnalyzer) Version() int { return version } + +func packageID(groupId, artifactId, ver string) string { + return fmt.Sprintf("%s:%s:%s", groupId, artifactId, ver) +} diff --git a/pkg/fanal/analyzer/language/java/gradle/lockfile_test.go b/pkg/fanal/analyzer/language/java/gradle/lockfile_test.go index e48ce885865b..b1868fecb936 100644 --- a/pkg/fanal/analyzer/language/java/gradle/lockfile_test.go +++ b/pkg/fanal/analyzer/language/java/gradle/lockfile_test.go @@ -1,6 +1,7 @@ package gradle import ( + "context" "os" "testing" @@ -13,23 +14,70 @@ import ( func Test_gradleLockAnalyzer_Analyze(t *testing.T) { tests := []struct { - name string - inputFile string - want *analyzer.AnalysisResult + name string + dir string + cacheDir string + want *analyzer.AnalysisResult }{ { - name: "happy path", - inputFile: "testdata/happy.lockfile", + name: "happy path", + dir: "testdata/lockfiles/happy", + cacheDir: "testdata/cache", + want: &analyzer.AnalysisResult{ + Applications: []types.Application{ + { + Type: types.Gradle, + FilePath: "gradle.lockfile", + Libraries: types.Packages{ + { + ID: "junit:junit:4.13", + Name: "junit:junit", + Version: "4.13", + Indirect: true, + Locations: []types.Location{ + { + StartLine: 4, + EndLine: 4, + }, + }, + Licenses: []string{ + "Eclipse Public License 1.0", + }, + DependsOn: []string{ + "org.hamcrest:hamcrest-core:1.3", + }, + }, + { + ID: "org.hamcrest:hamcrest-core:1.3", + Name: "org.hamcrest:hamcrest-core", + Version: "1.3", + Indirect: true, + Locations: []types.Location{ + { + StartLine: 5, + EndLine: 5, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "happy path without cache", + dir: "testdata/lockfiles/happy", want: &analyzer.AnalysisResult{ Applications: []types.Application{ { Type: types.Gradle, - FilePath: "testdata/happy.lockfile", + FilePath: "gradle.lockfile", Libraries: types.Packages{ { - ID: "com.example:example:0.0.1", - Name: "com.example:example", - Version: "0.0.1", + ID: "junit:junit:4.13", + Name: "junit:junit", + Version: "4.13", + Indirect: true, Locations: []types.Location{ { StartLine: 4, @@ -37,30 +85,41 @@ func Test_gradleLockAnalyzer_Analyze(t *testing.T) { }, }, }, + { + ID: "org.hamcrest:hamcrest-core:1.3", + Name: "org.hamcrest:hamcrest-core", + Version: "1.3", + Indirect: true, + Locations: []types.Location{ + { + StartLine: 5, + EndLine: 5, + }, + }, + }, }, }, }, }, }, { - name: "empty file", - inputFile: "testdata/empty.lockfile", + name: "empty file", + dir: "testdata/lockfiles/empty", + want: &analyzer.AnalysisResult{}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - f, err := os.Open(tt.inputFile) + if tt.cacheDir != "" { + t.Setenv("GRADLE_USER_HOME", tt.cacheDir) + } + + a, err := newGradleLockAnalyzer(analyzer.AnalyzerOptions{}) require.NoError(t, err) - defer func() { - err = f.Close() - assert.NoError(t, err) - }() - a := gradleLockAnalyzer{} - got, err := a.Analyze(nil, analyzer.AnalysisInput{ - FilePath: tt.inputFile, - Content: f, + got, err := a.PostAnalyze(context.Background(), analyzer.PostAnalysisInput{ + FS: os.DirFS(tt.dir), }) assert.NoError(t, err) diff --git a/pkg/fanal/analyzer/language/java/gradle/pom.go b/pkg/fanal/analyzer/language/java/gradle/pom.go new file mode 100644 index 000000000000..638b5c9fd61b --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/pom.go @@ -0,0 +1,166 @@ +package gradle + +import ( + "encoding/xml" + "io" + "io/fs" + "os" + "path/filepath" + "runtime" + "strings" + + "golang.org/x/net/html/charset" + "golang.org/x/xerrors" + + "github.com/aquasecurity/trivy/pkg/fanal/log" + "github.com/aquasecurity/trivy/pkg/utils/fsutils" +) + +type pomXML struct { + GroupId string `xml:"groupId"` + ArtifactId string `xml:"artifactId"` + Version string `xml:"version"` + Properties Properties `xml:"properties"` + Dependencies Dependencies `xml:"dependencies"` + Licenses Licenses `xml:"licenses"` +} +type Dependencies struct { + Dependency []Dependency `xml:"dependency"` +} + +type Dependency struct { + GroupID string `xml:"groupId"` + ArtifactID string `xml:"artifactId"` + Version string `xml:"version"` +} + +type Licenses struct { + License []License `xml:"license"` +} + +type License struct { + Name string `xml:"name"` +} + +type Properties map[string]string + +type property struct { + XMLName xml.Name + Value string `xml:",chardata"` +} + +func (props *Properties) UnmarshalXML(d *xml.Decoder, _ xml.StartElement) error { + *props = Properties{} + for { + var p property + err := d.Decode(&p) + if err == io.EOF { + break + } else if err != nil { + return xerrors.Errorf("XML decode error: %w", err) + } + + (*props)[p.XMLName.Local] = p.Value + } + return nil +} + +func parsePoms() (map[string]pomXML, error) { + cacheDir := detectCacheDir() + // Cache dir is not found + if cacheDir == "" { + return nil, nil + } + + required := func(path string, d fs.DirEntry) bool { + return filepath.Ext(path) == ".pom" + } + + var poms = make(map[string]pomXML) + err := fsutils.WalkDir(os.DirFS(cacheDir), ".", required, func(path string, _ fs.DirEntry, r io.Reader) error { + pom, err := parsePom(r, path) + if err != nil { + log.Logger.Debugf("Unable to parse %q: %s", path, err) + return nil + } + + if pom.ArtifactId != "" { + poms[packageID(pom.GroupId, pom.ArtifactId, pom.Version)] = pom + } + return nil + }) + if err != nil { + return nil, xerrors.Errorf("gradle licenses walk error: %w", err) + } + + return poms, nil +} + +func parsePom(r io.Reader, path string) (pomXML, error) { + pom := pomXML{} + decoder := xml.NewDecoder(r) + decoder.CharsetReader = charset.NewReaderLabel + if err := decoder.Decode(&pom); err != nil { + return pomXML{}, xerrors.Errorf("xml decode error: %w", err) + } + + // We only need pom's with licenses or dependencies + if len(pom.Licenses.License) == 0 && len(pom.Dependencies.Dependency) == 0 { + return pomXML{}, nil + } + + // If pom file doesn't contain GroupID or Version: + // find these values from filepath + // e.g. caches/modules-2/files-2.1/com.google.code.gson/gson/2.9.1/f0cf3edcef8dcb74d27cb427544a309eb718d772/gson-2.9.1.pom + dirs := strings.Split(filepath.ToSlash(path), "/") + if pom.GroupId == "" { + pom.GroupId = dirs[len(dirs)-5] + } + if pom.Version == "" { + pom.Version = dirs[len(dirs)-3] + } + + if err := pom.resolveDependencyVersions(); err != nil { + return pomXML{}, xerrors.Errorf("unable to resolve dependency version: %w", err) + } + + return pom, nil +} + +// resolveDependencyVersions resolves versions from properties +func (pom *pomXML) resolveDependencyVersions() error { + for i, dep := range pom.Dependencies.Dependency { + if strings.HasPrefix(dep.Version, "${") && strings.HasSuffix(dep.Version, "}") { + dep.Version = strings.TrimPrefix(strings.TrimSuffix(dep.Version, "}"), "${") + if resolvedVer, ok := pom.Properties[dep.Version]; ok { + pom.Dependencies.Dependency[i].Version = resolvedVer + } else if dep.Version == "${project.version}" { + pom.Dependencies.Dependency[i].Version = dep.Version + } else { + // We use simplified logic to resolve properties. + // If necessary, update and use the logic for maven pom's + return xerrors.Errorf("Unable to resolve %q version. Please open a new discussion to update the Trivy logic.", dep.Version) + } + } + } + return nil +} + +func detectCacheDir() string { + // https://docs.gradle.org/current/userguide/directory_layout.html + dir := os.Getenv("GRADLE_USER_HOME") + if dir == "" { + if runtime.GOOS == "windows" { + dir = filepath.Join(os.Getenv("%HOMEPATH%"), ".gradle") + } else { + dir = filepath.Join(os.Getenv("HOME"), ".gradle") + } + } + dir = filepath.Join(dir, "caches") + + if !fsutils.DirExists(dir) { + log.Logger.Debug("Unable to get licenses and dependsOn. Gradle cache dir doesn't exist.") + return "" + } + return dir +} diff --git a/pkg/fanal/analyzer/language/java/gradle/pom_test.go b/pkg/fanal/analyzer/language/java/gradle/pom_test.go new file mode 100644 index 000000000000..4ca85c647e2e --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/pom_test.go @@ -0,0 +1,101 @@ +package gradle + +import ( + "github.com/stretchr/testify/require" + "os" + "path/filepath" + "testing" +) + +func Test_parsePom(t *testing.T) { + tests := []struct { + name string + inputFile string + inputPath string + want pomXML + }{ + { + name: "happy path", + inputFile: filepath.Join("testdata", "poms", "happy.pom"), + inputPath: "cache/caches/modules-2/files-2.1/org.example/example-core/1.0/872e413497b906e7c9fa85ccc96046c5d1ef7ece/example-core-1.0.pom", + want: pomXML{ + GroupId: "org.example", + ArtifactId: "example-core", + Version: "1.0.0", + Licenses: Licenses{ + License: []License{ + { + Name: "Apache License, Version 2.0", + }, + }, + }, + Dependencies: Dependencies{ + Dependency: []Dependency{ + { + GroupID: "org.example", + ArtifactID: "example-api", + Version: "2.0.0", + }, + }, + }, + }, + }, + { + name: "happy path. Take GroupID and Version from path", + inputFile: filepath.Join("testdata", "poms", "without-groupid-and-version.pom"), + inputPath: "cache/caches/modules-2/files-2.1/org.example/example-core/1.0.0/872e413497b906e7c9fa85ccc96046c5d1ef7ece/example-core-1.0.pom", + want: pomXML{ + GroupId: "org.example", + ArtifactId: "example-core", + Version: "1.0.0", + Licenses: Licenses{ + License: []License{ + { + Name: "Apache License, Version 2.0", + }, + }, + }, + }, + }, + { + name: "happy path. Dependency version as property.", + inputFile: filepath.Join("testdata", "poms", "dep-version-as-property.pom"), + inputPath: "cache/caches/modules-2/files-2.1/org.example/example-core/1.0.0/872e413497b906e7c9fa85ccc96046c5d1ef7ece/example-core-1.0.pom", + want: pomXML{ + GroupId: "org.example", + ArtifactId: "example-core", + Version: "1.0.0", + Properties: Properties{ + "coreVersion": "2.0.1", + }, + Dependencies: Dependencies{ + Dependency: []Dependency{ + { + GroupID: "org.example", + ArtifactID: "example-api", + Version: "2.0.1", + }, + }, + }, + }, + }, + { + name: "happy path. Dependency version as property.", + inputFile: filepath.Join("testdata", "poms", "without-licenses-and-deps.pom"), + inputPath: "cache/caches/modules-2/files-2.1/org.example/example-core/1.0.0/872e413497b906e7c9fa85ccc96046c5d1ef7ece/example-core-1.0.pom", + want: pomXML{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f, err := os.Open(tt.inputFile) + require.NoError(t, err) + + pom, err := parsePom(f, tt.inputPath) + require.NoError(t, err) + + require.Equal(t, tt.want, pom) + }) + } +} diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/junit/junit/4.13/5c17760663fae422643fc859fd352c68a1d91bfc/junit-4.13.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/junit/junit/4.13/5c17760663fae422643fc859fd352c68a1d91bfc/junit-4.13.pom new file mode 100644 index 000000000000..40d49278c416 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/junit/junit/4.13/5c17760663fae422643fc859fd352c68a1d91bfc/junit-4.13.pom @@ -0,0 +1,587 @@ + + + 4.0.0 + + junit + junit + 4.13 + + JUnit + JUnit is a unit testing framework for Java, created by Erich Gamma and Kent Beck. + http://junit.org + 2002 + + JUnit + http://www.junit.org + + + + Eclipse Public License 1.0 + http://www.eclipse.org/legal/epl-v10.html + repo + + + + + + dsaff + David Saff + david@saff.net + + + kcooney + Kevin Cooney + kcooney@google.com + + + stefanbirkner + Stefan Birkner + mail@stefan-birkner.de + + + marcphilipp + Marc Philipp + mail@marcphilipp.de + + + + + JUnit contributors + JUnit + team@junit.org + https://github.com/junit-team/junit4/graphs/contributors + + developers + + + + + + 3.0.4 + + + + scm:git:git://github.com/junit-team/junit4.git + scm:git:git@github.com:junit-team/junit4.git + http://github.com/junit-team/junit4/tree/master + r4.13 + + + github + https://github.com/junit-team/junit4/issues + + + travis + https://travis-ci.org/junit-team/junit4 + + + https://github.com/junit-team/junit4/wiki/Download-and-Install + + junit-snapshot-repo + Nexus Snapshot Repository + https://oss.sonatype.org/content/repositories/snapshots/ + + + junit-releases-repo + Nexus Release Repository + https://oss.sonatype.org/service/local/staging/deploy/maven2/ + + + junit.github.io + gitsite:git@github.com/junit-team/junit4.git + + + + + 1.5 + 2.19.1 + 1.3 + ISO-8859-1 + + 67893CC4 + + + + + org.hamcrest + hamcrest-core + ${hamcrestVersion} + + + + org.hamcrest + hamcrest-library + ${hamcrestVersion} + test + + + + + + + ${project.basedir}/src/main/resources + + + ${project.basedir} + + LICENSE-junit.txt + + + + + + + + maven-enforcer-plugin + 1.4 + + + enforce-versions + initialize + + enforce + + + true + + + + Current version of Maven ${maven.version} required to build the project + should be ${project.prerequisites.maven}, or higher! + + [${project.prerequisites.maven},) + + + Current JDK version ${java.version} should be ${jdkVersion}, or higher! + + ${jdkVersion} + + + Best Practice is to never define repositories in pom.xml (use a repository + manager instead). + + + + No Snapshots Dependencies Allowed! + + + + + + + + + com.google.code.maven-replacer-plugin + replacer + 1.5.3 + + + process-sources + + replace + + + + + false + ${project.build.sourceDirectory}/junit/runner/Version.java.template + ${project.build.sourceDirectory}/junit/runner/Version.java + false + @version@ + ${project.version} + + + + + maven-compiler-plugin + 3.3 + + ${project.build.sourceEncoding} + ${jdkVersion} + ${jdkVersion} + ${jdkVersion} + ${jdkVersion} + 1.5 + true + true + true + true + + -Xlint:unchecked + + 128m + + + + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + signature-check + test + + check + + + + org.codehaus.mojo.signature + java15 + 1.0 + + + + + + + + maven-surefire-plugin + ${surefireVersion} + + org/junit/tests/AllTests.java + true + false + + + + org.apache.maven.surefire + surefire-junit47 + ${surefireVersion} + + + + + + maven-source-plugin + 2.4 + + + + maven-javadoc-plugin + 2.10.3 + + ${basedir}/src/main/javadoc/stylesheet.css + protected + false + false + false + true + true + true + JUnit API + UTF-8 + en + ${jdkVersion} + + + api_${jdkVersion} + http://docs.oracle.com/javase/${jdkVersion}.0/docs/api/ + + + *.internal.* + true + 32m + 128m + true + true + + org.hamcrest:hamcrest-core:* + + + + + maven-release-plugin + 2.5.2 + + forked-path + false + -Pgenerate-docs,junit-release ${arguments} + r@{project.version} + + + + maven-site-plugin + 3.4 + + + com.github.stephenc.wagon + wagon-gitsite + 0.4.1 + + + org.apache.maven.doxia + doxia-module-markdown + 1.5 + + + + + maven-jar-plugin + 2.6 + + + false + + true + + + junit + + + + + + maven-clean-plugin + 2.6.1 + + + maven-deploy-plugin + 2.8.2 + + + maven-install-plugin + 2.5.2 + + + maven-resources-plugin + 2.7 + + + + + + + + maven-project-info-reports-plugin + 2.8 + + false + + + + + + index + dependency-info + modules + license + project-team + scm + issue-tracking + mailing-list + dependency-management + dependencies + dependency-convergence + cim + distribution-management + + + + + + maven-javadoc-plugin + 2.10.3 + + javadoc/latest + ${basedir}/src/main/javadoc/stylesheet.css + protected + false + false + false + true + true + true + JUnit API + UTF-8 + en + ${jdkVersion} + + + api_${jdkVersion} + http://docs.oracle.com/javase/${jdkVersion}.0/docs/api/ + + + junit.*,*.internal.* + true + 32m + 128m + true + true + + org.hamcrest:hamcrest-core:* + + + + + + javadoc + + + + + + + + + + junit-release + + + + + + maven-gpg-plugin + 1.6 + + + gpg-sign + verify + + sign + + + + + + + + + generate-docs + + + + + maven-source-plugin + + + attach-sources + prepare-package + + jar-no-fork + + + + + + maven-javadoc-plugin + + + attach-javadoc + package + + jar + + + + + + + + + restrict-doclint + + + [1.8,) + + + + + maven-compiler-plugin + + + -Xlint:unchecked + -Xdoclint:accessibility,reference,syntax + + + + + maven-javadoc-plugin + + -Xdoclint:accessibility -Xdoclint:reference + + + + + + + + maven-javadoc-plugin + + -Xdoclint:accessibility -Xdoclint:reference + + + + + + + java9 + + [1.9,) + + + + 1.6 + + + + + maven-javadoc-plugin + + 1.6 + + + + + + + + maven-javadoc-plugin + + 1.6 + + + + + + + diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/org.hamcrest/hamcrest-core/1.3/872e413497b906e7c9fa85ccc96046c5d1ef7ece/hamcrest-core-1.3.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/org.hamcrest/hamcrest-core/1.3/872e413497b906e7c9fa85ccc96046c5d1ef7ece/hamcrest-core-1.3.pom new file mode 100644 index 000000000000..0721781c99a0 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/cache/caches/modules-2/files-2.1/org.hamcrest/hamcrest-core/1.3/872e413497b906e7c9fa85ccc96046c5d1ef7ece/hamcrest-core-1.3.pom @@ -0,0 +1,18 @@ + + + 4.0.0 + + + org.hamcrest + hamcrest-parent + 1.3 + + + hamcrest-core + jar + Hamcrest Core + + This is the core API of hamcrest matcher framework to be used by third-party framework providers. This includes the a foundation set of matcher implementations for common operations. + + diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/happy.lockfile b/pkg/fanal/analyzer/language/java/gradle/testdata/happy.lockfile deleted file mode 100644 index 3b965af31665..000000000000 --- a/pkg/fanal/analyzer/language/java/gradle/testdata/happy.lockfile +++ /dev/null @@ -1,5 +0,0 @@ -# This is a Gradle generated file for dependency locking. -# Manual edits can break the build and are not advised. -# This file is expected to be part of source control. -com.example:example:0.0.1=classpath -empty= \ No newline at end of file diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/empty.lockfile b/pkg/fanal/analyzer/language/java/gradle/testdata/lockfiles/empty/gradle.lockfile similarity index 100% rename from pkg/fanal/analyzer/language/java/gradle/testdata/empty.lockfile rename to pkg/fanal/analyzer/language/java/gradle/testdata/lockfiles/empty/gradle.lockfile diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/lockfiles/happy/gradle.lockfile b/pkg/fanal/analyzer/language/java/gradle/testdata/lockfiles/happy/gradle.lockfile new file mode 100644 index 000000000000..957bb968cc8f --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/lockfiles/happy/gradle.lockfile @@ -0,0 +1,6 @@ +# This is a Gradle generated file for dependency locking. +# Manual edits can break the build and are not advised. +# This file is expected to be part of source control. +junit:junit:4.13=compileClasspath,runtimeClasspath,testCompileClasspath,testRuntimeClasspath +org.hamcrest:hamcrest-core:1.3=compileClasspath,runtimeClasspath,testCompileClasspath,testRuntimeClasspath +empty=annotationProcessor,testAnnotationProcessor \ No newline at end of file diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/poms/dep-version-as-property.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/dep-version-as-property.pom new file mode 100644 index 000000000000..7b2cd75b39f1 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/dep-version-as-property.pom @@ -0,0 +1,21 @@ + + + 4.0.0 + + org.example + example-core + 1.0.0 + + + 2.0.1 + + + + + org.example + example-api + ${coreVersion} + + + diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/poms/happy.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/happy.pom new file mode 100644 index 000000000000..896fb1df5981 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/happy.pom @@ -0,0 +1,23 @@ + + + 4.0.0 + + org.example + example-core + 1.0.0 + + + + Apache License, Version 2.0 + + + + + + org.example + example-api + 2.0.0 + + + diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-groupid-and-version.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-groupid-and-version.pom new file mode 100644 index 000000000000..e94fcbaaaca2 --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-groupid-and-version.pom @@ -0,0 +1,19 @@ + + + 4.0.0 + + + org.example + example-parent + 1.3 + + + example-core + + + + Apache License, Version 2.0 + + + diff --git a/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-licenses-and-deps.pom b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-licenses-and-deps.pom new file mode 100644 index 000000000000..5c83a401353d --- /dev/null +++ b/pkg/fanal/analyzer/language/java/gradle/testdata/poms/without-licenses-and-deps.pom @@ -0,0 +1,10 @@ + + + 4.0.0 + + org.example + example-core + 1.0.0 + + From 8ec3938e01a93855503e3400eae9831abbb5de4a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Mar 2024 05:04:16 +0400 Subject: [PATCH 07/57] chore(deps): bump google.golang.org/protobuf from 1.32.0 to 1.33.0 (#6321) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 1b887bb6e57f..4cea01b959e2 100644 --- a/go.mod +++ b/go.mod @@ -112,7 +112,7 @@ require ( golang.org/x/term v0.17.0 golang.org/x/text v0.14.0 golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 - google.golang.org/protobuf v1.32.0 + google.golang.org/protobuf v1.33.0 gopkg.in/yaml.v3 v3.0.1 k8s.io/api v0.29.1 k8s.io/utils v0.0.0-20231127182322-b307cd553661 diff --git a/go.sum b/go.sum index f1e1c352ea6c..bdd0da3db721 100644 --- a/go.sum +++ b/go.sum @@ -2380,8 +2380,8 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= From 6c81e5505e628dce8175d8e126d7b498d7c12481 Mon Sep 17 00:00:00 2001 From: Stefan Mayr Date: Sun, 24 Mar 2024 08:26:49 +0100 Subject: [PATCH 08/57] chore(ubuntu): Add Ubuntu 22.04 EOL date (#6371) --- misc/eol/data/ubuntu.csv | 1 + 1 file changed, 1 insertion(+) diff --git a/misc/eol/data/ubuntu.csv b/misc/eol/data/ubuntu.csv index 98a9646d6142..bd9e81a95bbc 100644 --- a/misc/eol/data/ubuntu.csv +++ b/misc/eol/data/ubuntu.csv @@ -30,3 +30,4 @@ 19.04,Disco Dingo,disco,2018-10-18,2019-04-18,2020-01-18 19.10,Eoan Ermine,eoan,2019-04-18,2019-10-17,2020-07-17 20.04 LTS,Focal Fossa,focal,2020-04-23,2025-04-23,2030-04-23 +22.04 LTS,Jammy Jellyfish,jammy,2022-04-21,2027-04-21,2032-04-21 From 1870f28461c0faa9c1cf2a7b1d756356a16af4fc Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Sun, 24 Mar 2024 15:44:40 +0600 Subject: [PATCH 09/57] fix(report): don't include empty strings in `.vulnerabilities[].identifiers[].url` when `gitlab.tpl` is used (#6348) --- contrib/gitlab.tpl | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/contrib/gitlab.tpl b/contrib/gitlab.tpl index 187438776b72..744c0c9394cb 100644 --- a/contrib/gitlab.tpl +++ b/contrib/gitlab.tpl @@ -73,8 +73,11 @@ {{- /* TODO: Type not extractable - https://github.com/aquasecurity/trivy-db/pull/24 */}} "type": "cve", "name": "{{ .VulnerabilityID }}", - "value": "{{ .VulnerabilityID }}", + "value": "{{ .VulnerabilityID }}" + {{- /* cf. https://gitlab.com/gitlab-org/security-products/security-report-schemas/-/blob/e3d280d7f0862ca66a1555ea8b24016a004bb914/dist/container-scanning-report-format.json#L157-179 */}} + {{- if .PrimaryURL | regexMatch "^(https?|ftp)://.+" -}}, "url": "{{ .PrimaryURL }}" + {{- end }} } ], "links": [ @@ -85,9 +88,13 @@ {{- else -}} , {{- end -}} + {{- if . | regexMatch "^(https?|ftp)://.+" -}} { - "url": "{{ regexFind "[^ ]+" . }}" + "url": "{{ . }}" } + {{- else -}} + {{- $l_first = true }} + {{- end -}} {{- end }} ] } From e866bd5b5d4a654c13436f516ef9c258a8367e86 Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Sun, 24 Mar 2024 15:45:45 +0600 Subject: [PATCH 10/57] fix(sbom): add check for `CreationInfo` to nil when detecting SPDX created using Trivy (#6346) --- pkg/sbom/spdx/unmarshal.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkg/sbom/spdx/unmarshal.go b/pkg/sbom/spdx/unmarshal.go index 5b1d4138e7cb..14b34c1c0caf 100644 --- a/pkg/sbom/spdx/unmarshal.go +++ b/pkg/sbom/spdx/unmarshal.go @@ -255,6 +255,10 @@ func (s *SPDX) parseExternalReferences(refs []*spdx.PackageExternalReference) (* } func (s *SPDX) isTrivySBOM(spdxDocument *spdx.Document) bool { + if spdxDocument == nil || spdxDocument.CreationInfo == nil || spdxDocument.CreationInfo.Creators == nil { + return false + } + for _, c := range spdxDocument.CreationInfo.Creators { if c.CreatorType == "Tool" && strings.HasPrefix(c.Creator, "trivy") { return true From a2482c14e12df8e242b83361a9a3379691f65d95 Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Sun, 24 Mar 2024 15:46:56 +0600 Subject: [PATCH 11/57] fix(server): add Locations for `Packages` in client/server mode (#6366) --- integration/client_server_test.go | 14 + pkg/rpc/convert.go | 24 + pkg/rpc/convert_test.go | 40 ++ rpc/common/service.pb.go | 1036 ++++++++++++++++------------- rpc/common/service.proto | 28 +- 5 files changed, 655 insertions(+), 487 deletions(-) diff --git a/integration/client_server_test.go b/integration/client_server_test.go index 2b21479e0f16..f217021658ae 100644 --- a/integration/client_server_test.go +++ b/integration/client_server_test.go @@ -242,6 +242,16 @@ func TestClientServer(t *testing.T) { }, golden: "testdata/pom.json.golden", }, + { + name: "scan package-lock.json with repo command in client/server mode", + args: csArgs{ + Command: "repo", + RemoteAddrOption: "--server", + Target: "testdata/fixtures/repo/npm/", + ListAllPackages: true, + }, + golden: "testdata/npm.json.golden", + }, { name: "scan sample.pem with repo command in client/server mode", args: csArgs{ @@ -588,6 +598,10 @@ func setupClient(t *testing.T, c csArgs, addr string, cacheDir string, golden st osArgs = append(osArgs, "--format", "json") } + if c.ListAllPackages { + osArgs = append(osArgs, "--list-all-pkgs") + } + if c.IgnoreUnfixed { osArgs = append(osArgs, "--ignore-unfixed") } diff --git a/pkg/rpc/convert.go b/pkg/rpc/convert.go index 2bf7ba3e5d96..4edec4f1f24e 100644 --- a/pkg/rpc/convert.go +++ b/pkg/rpc/convert.go @@ -65,6 +65,7 @@ func ConvertToRPCPkgs(pkgs []ftypes.Package) []*common.Package { SrcRelease: pkg.SrcRelease, SrcEpoch: int32(pkg.SrcEpoch), Licenses: pkg.Licenses, + Locations: ConvertToRPCLocations(pkg.Locations), Layer: ConvertToRPCLayer(pkg.Layer), FilePath: pkg.FilePath, DependsOn: pkg.DependsOn, @@ -90,6 +91,17 @@ func ConvertToRPCPkgIdentifier(pkg ftypes.PkgIdentifier) *common.PkgIdentifier { } } +func ConvertToRPCLocations(pkgLocs []ftypes.Location) []*common.Location { + var locations []*common.Location + for _, pkgLoc := range pkgLocs { + locations = append(locations, &common.Location{ + StartLine: int32(pkgLoc.StartLine), + EndLine: int32(pkgLoc.EndLine), + }) + } + return locations +} + func ConvertToRPCCustomResources(resources []ftypes.CustomResource) []*common.CustomResource { var rpcResources []*common.CustomResource for _, r := range resources { @@ -207,6 +219,7 @@ func ConvertFromRPCPkgs(rpcPkgs []*common.Package) []ftypes.Package { SrcRelease: pkg.SrcRelease, SrcEpoch: int(pkg.SrcEpoch), Licenses: pkg.Licenses, + Locations: ConvertFromRPCLocation(pkg.Locations), Layer: ConvertFromRPCLayer(pkg.Layer), FilePath: pkg.FilePath, DependsOn: pkg.DependsOn, @@ -237,6 +250,17 @@ func ConvertFromRPCPkgIdentifier(pkg *common.PkgIdentifier) ftypes.PkgIdentifier return pkgID } +func ConvertFromRPCLocation(locs []*common.Location) []ftypes.Location { + var pkgLocs []ftypes.Location + for _, loc := range locs { + pkgLocs = append(pkgLocs, ftypes.Location{ + StartLine: int(loc.StartLine), + EndLine: int(loc.EndLine), + }) + } + return pkgLocs +} + // ConvertToRPCVulns returns common.Vulnerability func ConvertToRPCVulns(vulns []types.DetectedVulnerability) []*common.Vulnerability { var rpcVulns []*common.Vulnerability diff --git a/pkg/rpc/convert_test.go b/pkg/rpc/convert_test.go index e804e50bcb08..a74f8eecb99b 100644 --- a/pkg/rpc/convert_test.go +++ b/pkg/rpc/convert_test.go @@ -39,6 +39,16 @@ func TestConvertToRpcPkgs(t *testing.T) { SrcRelease: "1", SrcEpoch: 2, Licenses: []string{"MIT"}, + Locations: []ftypes.Location{ + { + StartLine: 10, + EndLine: 20, + }, + { + StartLine: 22, + EndLine: 32, + }, + }, Layer: ftypes.Layer{ Digest: "sha256:6a428f9f83b0a29f1fdd2ccccca19a9bab805a925b8eddf432a5a3d3da04afbc", DiffID: "sha256:39982b2a789afc156fff00c707d0ff1c6ab4af8f1666a8df4787714059ce24e7", @@ -60,6 +70,16 @@ func TestConvertToRpcPkgs(t *testing.T) { SrcRelease: "1", SrcEpoch: 2, Licenses: []string{"MIT"}, + Locations: []*common.Location{ + { + StartLine: 10, + EndLine: 20, + }, + { + StartLine: 22, + EndLine: 32, + }, + }, Layer: &common.Layer{ Digest: "sha256:6a428f9f83b0a29f1fdd2ccccca19a9bab805a925b8eddf432a5a3d3da04afbc", DiffId: "sha256:39982b2a789afc156fff00c707d0ff1c6ab4af8f1666a8df4787714059ce24e7", @@ -101,6 +121,16 @@ func TestConvertFromRpcPkgs(t *testing.T) { SrcRelease: "1", SrcEpoch: 2, Licenses: []string{"MIT"}, + Locations: []*common.Location{ + { + StartLine: 10, + EndLine: 20, + }, + { + StartLine: 22, + EndLine: 32, + }, + }, Layer: &common.Layer{ Digest: "sha256:6a428f9f83b0a29f1fdd2ccccca19a9bab805a925b8eddf432a5a3d3da04afbc", DiffId: "sha256:39982b2a789afc156fff00c707d0ff1c6ab4af8f1666a8df4787714059ce24e7", @@ -122,6 +152,16 @@ func TestConvertFromRpcPkgs(t *testing.T) { SrcRelease: "1", SrcEpoch: 2, Licenses: []string{"MIT"}, + Locations: []ftypes.Location{ + { + StartLine: 10, + EndLine: 20, + }, + { + StartLine: 22, + EndLine: 32, + }, + }, Layer: ftypes.Layer{ Digest: "sha256:6a428f9f83b0a29f1fdd2ccccca19a9bab805a925b8eddf432a5a3d3da04afbc", DiffID: "sha256:39982b2a789afc156fff00c707d0ff1c6ab4af8f1666a8df4787714059ce24e7", diff --git a/rpc/common/service.pb.go b/rpc/common/service.pb.go index 33e8f40c7c2f..0174ab2c7f5f 100644 --- a/rpc/common/service.pb.go +++ b/rpc/common/service.pb.go @@ -138,7 +138,7 @@ func (x LicenseCategory_Enum) Number() protoreflect.EnumNumber { // Deprecated: Use LicenseCategory_Enum.Descriptor instead. func (LicenseCategory_Enum) EnumDescriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{23, 0} + return file_rpc_common_service_proto_rawDescGZIP(), []int{24, 0} } type LicenseType_Enum int32 @@ -190,7 +190,7 @@ func (x LicenseType_Enum) Number() protoreflect.EnumNumber { // Deprecated: Use LicenseType_Enum.Descriptor instead. func (LicenseType_Enum) EnumDescriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{24, 0} + return file_rpc_common_service_proto_rawDescGZIP(), []int{25, 0} } type OS struct { @@ -453,17 +453,18 @@ type Package struct { Arch string `protobuf:"bytes,5,opt,name=arch,proto3" json:"arch,omitempty"` // src package containing some binary packages // e.g. bind - SrcName string `protobuf:"bytes,6,opt,name=src_name,json=srcName,proto3" json:"src_name,omitempty"` - SrcVersion string `protobuf:"bytes,7,opt,name=src_version,json=srcVersion,proto3" json:"src_version,omitempty"` - SrcRelease string `protobuf:"bytes,8,opt,name=src_release,json=srcRelease,proto3" json:"src_release,omitempty"` - SrcEpoch int32 `protobuf:"varint,9,opt,name=src_epoch,json=srcEpoch,proto3" json:"src_epoch,omitempty"` - Licenses []string `protobuf:"bytes,15,rep,name=licenses,proto3" json:"licenses,omitempty"` - Layer *Layer `protobuf:"bytes,11,opt,name=layer,proto3" json:"layer,omitempty"` - FilePath string `protobuf:"bytes,12,opt,name=file_path,json=filePath,proto3" json:"file_path,omitempty"` - DependsOn []string `protobuf:"bytes,14,rep,name=depends_on,json=dependsOn,proto3" json:"depends_on,omitempty"` - Digest string `protobuf:"bytes,16,opt,name=digest,proto3" json:"digest,omitempty"` - Dev bool `protobuf:"varint,17,opt,name=dev,proto3" json:"dev,omitempty"` - Indirect bool `protobuf:"varint,18,opt,name=indirect,proto3" json:"indirect,omitempty"` + SrcName string `protobuf:"bytes,6,opt,name=src_name,json=srcName,proto3" json:"src_name,omitempty"` + SrcVersion string `protobuf:"bytes,7,opt,name=src_version,json=srcVersion,proto3" json:"src_version,omitempty"` + SrcRelease string `protobuf:"bytes,8,opt,name=src_release,json=srcRelease,proto3" json:"src_release,omitempty"` + SrcEpoch int32 `protobuf:"varint,9,opt,name=src_epoch,json=srcEpoch,proto3" json:"src_epoch,omitempty"` + Licenses []string `protobuf:"bytes,15,rep,name=licenses,proto3" json:"licenses,omitempty"` + Locations []*Location `protobuf:"bytes,20,rep,name=locations,proto3" json:"locations,omitempty"` + Layer *Layer `protobuf:"bytes,11,opt,name=layer,proto3" json:"layer,omitempty"` + FilePath string `protobuf:"bytes,12,opt,name=file_path,json=filePath,proto3" json:"file_path,omitempty"` + DependsOn []string `protobuf:"bytes,14,rep,name=depends_on,json=dependsOn,proto3" json:"depends_on,omitempty"` + Digest string `protobuf:"bytes,16,opt,name=digest,proto3" json:"digest,omitempty"` + Dev bool `protobuf:"varint,17,opt,name=dev,proto3" json:"dev,omitempty"` + Indirect bool `protobuf:"varint,18,opt,name=indirect,proto3" json:"indirect,omitempty"` } func (x *Package) Reset() { @@ -582,6 +583,13 @@ func (x *Package) GetLicenses() []string { return nil } +func (x *Package) GetLocations() []*Location { + if x != nil { + return x.Locations + } + return nil +} + func (x *Package) GetLayer() *Layer { if x != nil { return x.Layer @@ -679,6 +687,61 @@ func (x *PkgIdentifier) GetBomRef() string { return "" } +type Location struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + StartLine int32 `protobuf:"varint,1,opt,name=start_line,json=startLine,proto3" json:"start_line,omitempty"` + EndLine int32 `protobuf:"varint,2,opt,name=end_line,json=endLine,proto3" json:"end_line,omitempty"` +} + +func (x *Location) Reset() { + *x = Location{} + if protoimpl.UnsafeEnabled { + mi := &file_rpc_common_service_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Location) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Location) ProtoMessage() {} + +func (x *Location) ProtoReflect() protoreflect.Message { + mi := &file_rpc_common_service_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Location.ProtoReflect.Descriptor instead. +func (*Location) Descriptor() ([]byte, []int) { + return file_rpc_common_service_proto_rawDescGZIP(), []int{6} +} + +func (x *Location) GetStartLine() int32 { + if x != nil { + return x.StartLine + } + return 0 +} + +func (x *Location) GetEndLine() int32 { + if x != nil { + return x.EndLine + } + return 0 +} + type Misconfiguration struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -695,7 +758,7 @@ type Misconfiguration struct { func (x *Misconfiguration) Reset() { *x = Misconfiguration{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[6] + mi := &file_rpc_common_service_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -708,7 +771,7 @@ func (x *Misconfiguration) String() string { func (*Misconfiguration) ProtoMessage() {} func (x *Misconfiguration) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[6] + mi := &file_rpc_common_service_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -721,7 +784,7 @@ func (x *Misconfiguration) ProtoReflect() protoreflect.Message { // Deprecated: Use Misconfiguration.ProtoReflect.Descriptor instead. func (*Misconfiguration) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{6} + return file_rpc_common_service_proto_rawDescGZIP(), []int{7} } func (x *Misconfiguration) GetFileType() string { @@ -780,7 +843,7 @@ type MisconfResult struct { func (x *MisconfResult) Reset() { *x = MisconfResult{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[7] + mi := &file_rpc_common_service_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -793,7 +856,7 @@ func (x *MisconfResult) String() string { func (*MisconfResult) ProtoMessage() {} func (x *MisconfResult) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[7] + mi := &file_rpc_common_service_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -806,7 +869,7 @@ func (x *MisconfResult) ProtoReflect() protoreflect.Message { // Deprecated: Use MisconfResult.ProtoReflect.Descriptor instead. func (*MisconfResult) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{7} + return file_rpc_common_service_proto_rawDescGZIP(), []int{8} } func (x *MisconfResult) GetNamespace() string { @@ -855,7 +918,7 @@ type PolicyMetadata struct { func (x *PolicyMetadata) Reset() { *x = PolicyMetadata{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[8] + mi := &file_rpc_common_service_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -868,7 +931,7 @@ func (x *PolicyMetadata) String() string { func (*PolicyMetadata) ProtoMessage() {} func (x *PolicyMetadata) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[8] + mi := &file_rpc_common_service_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -881,7 +944,7 @@ func (x *PolicyMetadata) ProtoReflect() protoreflect.Message { // Deprecated: Use PolicyMetadata.ProtoReflect.Descriptor instead. func (*PolicyMetadata) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{8} + return file_rpc_common_service_proto_rawDescGZIP(), []int{9} } func (x *PolicyMetadata) GetId() string { @@ -965,7 +1028,7 @@ type DetectedMisconfiguration struct { func (x *DetectedMisconfiguration) Reset() { *x = DetectedMisconfiguration{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[9] + mi := &file_rpc_common_service_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -978,7 +1041,7 @@ func (x *DetectedMisconfiguration) String() string { func (*DetectedMisconfiguration) ProtoMessage() {} func (x *DetectedMisconfiguration) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[9] + mi := &file_rpc_common_service_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -991,7 +1054,7 @@ func (x *DetectedMisconfiguration) ProtoReflect() protoreflect.Message { // Deprecated: Use DetectedMisconfiguration.ProtoReflect.Descriptor instead. func (*DetectedMisconfiguration) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{9} + return file_rpc_common_service_proto_rawDescGZIP(), []int{10} } func (x *DetectedMisconfiguration) GetType() string { @@ -1133,7 +1196,7 @@ type Vulnerability struct { func (x *Vulnerability) Reset() { *x = Vulnerability{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[10] + mi := &file_rpc_common_service_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1146,7 +1209,7 @@ func (x *Vulnerability) String() string { func (*Vulnerability) ProtoMessage() {} func (x *Vulnerability) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[10] + mi := &file_rpc_common_service_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1159,7 +1222,7 @@ func (x *Vulnerability) ProtoReflect() protoreflect.Message { // Deprecated: Use Vulnerability.ProtoReflect.Descriptor instead. func (*Vulnerability) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{10} + return file_rpc_common_service_proto_rawDescGZIP(), []int{11} } func (x *Vulnerability) GetVulnerabilityId() string { @@ -1343,7 +1406,7 @@ type DataSource struct { func (x *DataSource) Reset() { *x = DataSource{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[11] + mi := &file_rpc_common_service_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1356,7 +1419,7 @@ func (x *DataSource) String() string { func (*DataSource) ProtoMessage() {} func (x *DataSource) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[11] + mi := &file_rpc_common_service_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1369,7 +1432,7 @@ func (x *DataSource) ProtoReflect() protoreflect.Message { // Deprecated: Use DataSource.ProtoReflect.Descriptor instead. func (*DataSource) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{11} + return file_rpc_common_service_proto_rawDescGZIP(), []int{12} } func (x *DataSource) GetId() string { @@ -1406,7 +1469,7 @@ type Layer struct { func (x *Layer) Reset() { *x = Layer{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[12] + mi := &file_rpc_common_service_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1419,7 +1482,7 @@ func (x *Layer) String() string { func (*Layer) ProtoMessage() {} func (x *Layer) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[12] + mi := &file_rpc_common_service_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1432,7 +1495,7 @@ func (x *Layer) ProtoReflect() protoreflect.Message { // Deprecated: Use Layer.ProtoReflect.Descriptor instead. func (*Layer) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{12} + return file_rpc_common_service_proto_rawDescGZIP(), []int{13} } func (x *Layer) GetDigest() string { @@ -1472,7 +1535,7 @@ type CauseMetadata struct { func (x *CauseMetadata) Reset() { *x = CauseMetadata{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[13] + mi := &file_rpc_common_service_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1485,7 +1548,7 @@ func (x *CauseMetadata) String() string { func (*CauseMetadata) ProtoMessage() {} func (x *CauseMetadata) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[13] + mi := &file_rpc_common_service_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1498,7 +1561,7 @@ func (x *CauseMetadata) ProtoReflect() protoreflect.Message { // Deprecated: Use CauseMetadata.ProtoReflect.Descriptor instead. func (*CauseMetadata) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{13} + return file_rpc_common_service_proto_rawDescGZIP(), []int{14} } func (x *CauseMetadata) GetResource() string { @@ -1557,7 +1620,7 @@ type CVSS struct { func (x *CVSS) Reset() { *x = CVSS{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[14] + mi := &file_rpc_common_service_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1570,7 +1633,7 @@ func (x *CVSS) String() string { func (*CVSS) ProtoMessage() {} func (x *CVSS) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[14] + mi := &file_rpc_common_service_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1583,7 +1646,7 @@ func (x *CVSS) ProtoReflect() protoreflect.Message { // Deprecated: Use CVSS.ProtoReflect.Descriptor instead. func (*CVSS) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{14} + return file_rpc_common_service_proto_rawDescGZIP(), []int{15} } func (x *CVSS) GetV2Vector() string { @@ -1628,7 +1691,7 @@ type CustomResource struct { func (x *CustomResource) Reset() { *x = CustomResource{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[15] + mi := &file_rpc_common_service_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1641,7 +1704,7 @@ func (x *CustomResource) String() string { func (*CustomResource) ProtoMessage() {} func (x *CustomResource) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[15] + mi := &file_rpc_common_service_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1654,7 +1717,7 @@ func (x *CustomResource) ProtoReflect() protoreflect.Message { // Deprecated: Use CustomResource.ProtoReflect.Descriptor instead. func (*CustomResource) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{15} + return file_rpc_common_service_proto_rawDescGZIP(), []int{16} } func (x *CustomResource) GetType() string { @@ -1703,7 +1766,7 @@ type Line struct { func (x *Line) Reset() { *x = Line{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[16] + mi := &file_rpc_common_service_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1716,7 +1779,7 @@ func (x *Line) String() string { func (*Line) ProtoMessage() {} func (x *Line) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[16] + mi := &file_rpc_common_service_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1729,7 +1792,7 @@ func (x *Line) ProtoReflect() protoreflect.Message { // Deprecated: Use Line.ProtoReflect.Descriptor instead. func (*Line) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{16} + return file_rpc_common_service_proto_rawDescGZIP(), []int{17} } func (x *Line) GetNumber() int32 { @@ -1799,7 +1862,7 @@ type Code struct { func (x *Code) Reset() { *x = Code{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[17] + mi := &file_rpc_common_service_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1812,7 +1875,7 @@ func (x *Code) String() string { func (*Code) ProtoMessage() {} func (x *Code) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[17] + mi := &file_rpc_common_service_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1825,7 +1888,7 @@ func (x *Code) ProtoReflect() protoreflect.Message { // Deprecated: Use Code.ProtoReflect.Descriptor instead. func (*Code) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{17} + return file_rpc_common_service_proto_rawDescGZIP(), []int{18} } func (x *Code) GetLines() []*Line { @@ -1854,7 +1917,7 @@ type SecretFinding struct { func (x *SecretFinding) Reset() { *x = SecretFinding{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[18] + mi := &file_rpc_common_service_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1867,7 +1930,7 @@ func (x *SecretFinding) String() string { func (*SecretFinding) ProtoMessage() {} func (x *SecretFinding) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[18] + mi := &file_rpc_common_service_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1880,7 +1943,7 @@ func (x *SecretFinding) ProtoReflect() protoreflect.Message { // Deprecated: Use SecretFinding.ProtoReflect.Descriptor instead. func (*SecretFinding) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{18} + return file_rpc_common_service_proto_rawDescGZIP(), []int{19} } func (x *SecretFinding) GetRuleId() string { @@ -1958,7 +2021,7 @@ type Secret struct { func (x *Secret) Reset() { *x = Secret{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[19] + mi := &file_rpc_common_service_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1971,7 +2034,7 @@ func (x *Secret) String() string { func (*Secret) ProtoMessage() {} func (x *Secret) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[19] + mi := &file_rpc_common_service_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1984,7 +2047,7 @@ func (x *Secret) ProtoReflect() protoreflect.Message { // Deprecated: Use Secret.ProtoReflect.Descriptor instead. func (*Secret) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{19} + return file_rpc_common_service_proto_rawDescGZIP(), []int{20} } func (x *Secret) GetFilepath() string { @@ -2018,7 +2081,7 @@ type DetectedLicense struct { func (x *DetectedLicense) Reset() { *x = DetectedLicense{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[20] + mi := &file_rpc_common_service_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2031,7 +2094,7 @@ func (x *DetectedLicense) String() string { func (*DetectedLicense) ProtoMessage() {} func (x *DetectedLicense) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[20] + mi := &file_rpc_common_service_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2044,7 +2107,7 @@ func (x *DetectedLicense) ProtoReflect() protoreflect.Message { // Deprecated: Use DetectedLicense.ProtoReflect.Descriptor instead. func (*DetectedLicense) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{20} + return file_rpc_common_service_proto_rawDescGZIP(), []int{21} } func (x *DetectedLicense) GetSeverity() Severity { @@ -2111,7 +2174,7 @@ type LicenseFile struct { func (x *LicenseFile) Reset() { *x = LicenseFile{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[21] + mi := &file_rpc_common_service_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2124,7 +2187,7 @@ func (x *LicenseFile) String() string { func (*LicenseFile) ProtoMessage() {} func (x *LicenseFile) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[21] + mi := &file_rpc_common_service_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2137,7 +2200,7 @@ func (x *LicenseFile) ProtoReflect() protoreflect.Message { // Deprecated: Use LicenseFile.ProtoReflect.Descriptor instead. func (*LicenseFile) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{21} + return file_rpc_common_service_proto_rawDescGZIP(), []int{22} } func (x *LicenseFile) GetLicenseType() LicenseType_Enum { @@ -2189,7 +2252,7 @@ type LicenseFinding struct { func (x *LicenseFinding) Reset() { *x = LicenseFinding{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[22] + mi := &file_rpc_common_service_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2202,7 +2265,7 @@ func (x *LicenseFinding) String() string { func (*LicenseFinding) ProtoMessage() {} func (x *LicenseFinding) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[22] + mi := &file_rpc_common_service_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2215,7 +2278,7 @@ func (x *LicenseFinding) ProtoReflect() protoreflect.Message { // Deprecated: Use LicenseFinding.ProtoReflect.Descriptor instead. func (*LicenseFinding) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{22} + return file_rpc_common_service_proto_rawDescGZIP(), []int{23} } func (x *LicenseFinding) GetCategory() LicenseCategory_Enum { @@ -2258,7 +2321,7 @@ type LicenseCategory struct { func (x *LicenseCategory) Reset() { *x = LicenseCategory{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[23] + mi := &file_rpc_common_service_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2271,7 +2334,7 @@ func (x *LicenseCategory) String() string { func (*LicenseCategory) ProtoMessage() {} func (x *LicenseCategory) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[23] + mi := &file_rpc_common_service_proto_msgTypes[24] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2284,7 +2347,7 @@ func (x *LicenseCategory) ProtoReflect() protoreflect.Message { // Deprecated: Use LicenseCategory.ProtoReflect.Descriptor instead. func (*LicenseCategory) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{23} + return file_rpc_common_service_proto_rawDescGZIP(), []int{24} } type LicenseType struct { @@ -2296,7 +2359,7 @@ type LicenseType struct { func (x *LicenseType) Reset() { *x = LicenseType{} if protoimpl.UnsafeEnabled { - mi := &file_rpc_common_service_proto_msgTypes[24] + mi := &file_rpc_common_service_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2309,7 +2372,7 @@ func (x *LicenseType) String() string { func (*LicenseType) ProtoMessage() {} func (x *LicenseType) ProtoReflect() protoreflect.Message { - mi := &file_rpc_common_service_proto_msgTypes[24] + mi := &file_rpc_common_service_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2322,7 +2385,7 @@ func (x *LicenseType) ProtoReflect() protoreflect.Message { // Deprecated: Use LicenseType.ProtoReflect.Descriptor instead. func (*LicenseType) Descriptor() ([]byte, []int) { - return file_rpc_common_service_proto_rawDescGZIP(), []int{24} + return file_rpc_common_service_proto_rawDescGZIP(), []int{25} } var File_rpc_common_service_proto protoreflect.FileDescriptor @@ -2357,7 +2420,7 @@ var file_rpc_common_service_proto_rawDesc = []byte{ 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x33, 0x0a, 0x09, 0x6c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x50, 0x61, 0x63, 0x6b, 0x61, - 0x67, 0x65, 0x52, 0x09, 0x6c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x69, 0x65, 0x73, 0x22, 0x8b, 0x04, + 0x67, 0x65, 0x52, 0x09, 0x6c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x69, 0x65, 0x73, 0x22, 0xc1, 0x04, 0x0a, 0x07, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, @@ -2379,330 +2442,337 @@ var file_rpc_common_service_proto_rawDesc = []byte{ 0x6c, 0x65, 0x61, 0x73, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x72, 0x63, 0x5f, 0x65, 0x70, 0x6f, 0x63, 0x68, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x73, 0x72, 0x63, 0x45, 0x70, 0x6f, 0x63, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x73, 0x18, 0x0f, - 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x73, 0x12, 0x29, - 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, - 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, - 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, - 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, - 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, - 0x73, 0x5f, 0x6f, 0x6e, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x64, 0x65, 0x70, 0x65, - 0x6e, 0x64, 0x73, 0x4f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, - 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, - 0x03, 0x64, 0x65, 0x76, 0x18, 0x11, 0x20, 0x01, 0x28, 0x08, 0x52, 0x03, 0x64, 0x65, 0x76, 0x12, - 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x18, 0x12, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x08, 0x69, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x22, 0x3c, 0x0a, 0x0d, 0x50, - 0x6b, 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, - 0x70, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x75, 0x72, 0x6c, - 0x12, 0x17, 0x0a, 0x07, 0x62, 0x6f, 0x6d, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x06, 0x62, 0x6f, 0x6d, 0x52, 0x65, 0x66, 0x22, 0xb6, 0x02, 0x0a, 0x10, 0x4d, 0x69, - 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1b, - 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, - 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, - 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x39, 0x0a, 0x09, 0x73, 0x75, 0x63, 0x63, - 0x65, 0x73, 0x73, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, - 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, - 0x6e, 0x66, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x09, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, - 0x73, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x08, 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x73, 0x18, - 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x73, 0x12, 0x34, + 0x0a, 0x09, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x14, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x6c, 0x6f, 0x63, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x0b, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, + 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, + 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x0c, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1d, 0x0a, 0x0a, + 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x73, 0x5f, 0x6f, 0x6e, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x09, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x73, 0x4f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x64, + 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x69, 0x67, + 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x65, 0x76, 0x18, 0x11, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x03, 0x64, 0x65, 0x76, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, + 0x74, 0x18, 0x12, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, + 0x74, 0x22, 0x3c, 0x0a, 0x0d, 0x50, 0x6b, 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, + 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x70, 0x75, 0x72, 0x6c, 0x12, 0x17, 0x0a, 0x07, 0x62, 0x6f, 0x6d, 0x5f, 0x72, 0x65, + 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x62, 0x6f, 0x6d, 0x52, 0x65, 0x66, 0x22, + 0x44, 0x0a, 0x08, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1d, 0x0a, 0x0a, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, + 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x6e, + 0x64, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x65, 0x6e, + 0x64, 0x4c, 0x69, 0x6e, 0x65, 0x22, 0xb6, 0x02, 0x0a, 0x10, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, + 0x6c, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, + 0x69, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, + 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, + 0x50, 0x61, 0x74, 0x68, 0x12, 0x39, 0x0a, 0x09, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x65, + 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, + 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x52, 0x09, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x65, 0x73, 0x12, + 0x37, 0x0a, 0x08, 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x08, + 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x37, 0x0a, 0x08, 0x66, 0x61, 0x69, 0x6c, + 0x75, 0x72, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, + 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, + 0x66, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x08, 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, + 0x73, 0x12, 0x3b, 0x0a, 0x0a, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, + 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, 0x73, 0x75, - 0x6c, 0x74, 0x52, 0x08, 0x77, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x37, 0x0a, 0x08, - 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, - 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, - 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x08, 0x66, 0x61, 0x69, - 0x6c, 0x75, 0x72, 0x65, 0x73, 0x12, 0x3b, 0x0a, 0x0a, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, - 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, - 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x0a, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x22, 0xf3, 0x01, 0x0a, 0x0d, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, - 0x73, 0x75, 0x6c, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x45, 0x0a, 0x0f, - 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, - 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, - 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x52, 0x0e, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x12, 0x42, 0x0a, 0x0e, 0x63, 0x61, 0x75, 0x73, 0x65, 0x5f, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, - 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x75, 0x73, 0x65, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0d, 0x63, 0x61, 0x75, 0x73, 0x65, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x07, 0x52, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x52, 0x02, 0x69, 0x64, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x52, 0x08, - 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x22, 0xf0, 0x01, 0x0a, 0x0e, 0x50, 0x6f, 0x6c, - 0x69, 0x63, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x0e, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x61, - 0x64, 0x76, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x61, 0x64, 0x76, - 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x20, 0x0a, 0x0b, - 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1a, - 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x2f, 0x0a, 0x13, 0x72, 0x65, - 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, - 0x6e, 0x64, 0x65, 0x64, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x72, - 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x22, 0xf7, 0x03, 0x0a, 0x18, - 0x44, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, - 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, - 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, - 0x6c, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, - 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1c, - 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1e, 0x0a, 0x0a, - 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0a, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x32, 0x0a, 0x08, - 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, - 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, - 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, - 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x75, 0x72, 0x6c, 0x18, - 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x55, 0x72, - 0x6c, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, - 0x0a, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, - 0x73, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, - 0x65, 0x72, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, - 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, - 0x61, 0x79, 0x65, 0x72, 0x12, 0x42, 0x0a, 0x0e, 0x63, 0x61, 0x75, 0x73, 0x65, 0x5f, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, - 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x75, 0x73, - 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0d, 0x63, 0x61, 0x75, 0x73, 0x65, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x15, 0x0a, 0x06, 0x61, 0x76, 0x64, 0x5f, - 0x69, 0x64, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x61, 0x76, 0x64, 0x49, 0x64, 0x12, - 0x14, 0x0a, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x71, 0x75, 0x65, 0x72, 0x79, 0x22, 0xff, 0x09, 0x0a, 0x0d, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x29, 0x0a, 0x10, 0x76, 0x75, 0x6c, 0x6e, 0x65, - 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0f, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, - 0x49, 0x64, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2b, 0x0a, - 0x11, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6c, 0x6c, 0x65, 0x64, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6c, - 0x6c, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x69, - 0x78, 0x65, 0x64, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0c, 0x66, 0x69, 0x78, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, - 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x32, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, - 0x69, 0x74, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, - 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, - 0x79, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x1e, 0x0a, 0x0a, 0x72, - 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x42, 0x0a, 0x0e, 0x70, - 0x6b, 0x67, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x19, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, - 0x6f, 0x6e, 0x2e, 0x50, 0x6b, 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, - 0x52, 0x0d, 0x70, 0x6b, 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, - 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, - 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, - 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x65, - 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x0b, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x12, 0x39, 0x0a, 0x04, 0x63, 0x76, 0x73, 0x73, 0x18, 0x0c, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x25, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, - 0x2e, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x43, - 0x76, 0x73, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x04, 0x63, 0x76, 0x73, 0x73, 0x12, 0x17, - 0x0a, 0x07, 0x63, 0x77, 0x65, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x0d, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x06, 0x63, 0x77, 0x65, 0x49, 0x64, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6d, 0x61, - 0x72, 0x79, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, - 0x69, 0x6d, 0x61, 0x72, 0x79, 0x55, 0x72, 0x6c, 0x12, 0x41, 0x0a, 0x0e, 0x70, 0x75, 0x62, 0x6c, - 0x69, 0x73, 0x68, 0x65, 0x64, 0x5f, 0x64, 0x61, 0x74, 0x65, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0d, 0x70, 0x75, - 0x62, 0x6c, 0x69, 0x73, 0x68, 0x65, 0x64, 0x44, 0x61, 0x74, 0x65, 0x12, 0x48, 0x0a, 0x12, 0x6c, - 0x61, 0x73, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x64, 0x61, 0x74, - 0x65, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x52, 0x10, 0x6c, 0x61, 0x73, 0x74, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, - 0x64, 0x44, 0x61, 0x74, 0x65, 0x12, 0x48, 0x0a, 0x14, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, - 0x61, 0x64, 0x76, 0x69, 0x73, 0x6f, 0x72, 0x79, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x11, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x12, 0x63, 0x75, 0x73, - 0x74, 0x6f, 0x6d, 0x41, 0x64, 0x76, 0x69, 0x73, 0x6f, 0x72, 0x79, 0x44, 0x61, 0x74, 0x61, 0x12, - 0x40, 0x0a, 0x10, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x76, 0x75, 0x6c, 0x6e, 0x5f, 0x64, - 0x61, 0x74, 0x61, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x52, 0x0e, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x56, 0x75, 0x6c, 0x6e, 0x44, 0x61, 0x74, - 0x61, 0x12, 0x1d, 0x0a, 0x0a, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x73, 0x18, - 0x13, 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x49, 0x64, 0x73, - 0x12, 0x39, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, - 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, - 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, - 0x0a, 0x64, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x58, 0x0a, 0x0f, 0x76, - 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x15, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, - 0x6d, 0x6f, 0x6e, 0x2e, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, - 0x79, 0x2e, 0x56, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0e, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x53, 0x65, 0x76, - 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x70, 0x61, 0x74, - 0x68, 0x18, 0x16, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67, 0x50, 0x61, 0x74, 0x68, - 0x12, 0x15, 0x0a, 0x06, 0x70, 0x6b, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x17, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x70, 0x6b, 0x67, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x18, 0x18, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x1a, - 0x4b, 0x0a, 0x09, 0x43, 0x76, 0x73, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x28, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, - 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x56, 0x53, - 0x53, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x59, 0x0a, 0x13, - 0x56, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, - 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x42, 0x0a, 0x0a, 0x44, 0x61, 0x74, 0x61, 0x53, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x22, 0x57, 0x0a, 0x05, 0x4c, - 0x61, 0x79, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, - 0x64, 0x69, 0x66, 0x66, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, - 0x69, 0x66, 0x66, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, - 0x5f, 0x62, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x64, 0x42, 0x79, 0x22, 0xc3, 0x01, 0x0a, 0x0d, 0x43, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x18, - 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, - 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x6c, - 0x69, 0x6e, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x4c, 0x69, - 0x6e, 0x65, 0x12, 0x26, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, - 0x43, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x76, 0x0a, 0x04, 0x43, 0x56, - 0x53, 0x53, 0x12, 0x1b, 0x0a, 0x09, 0x76, 0x32, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x76, 0x32, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, - 0x1b, 0x0a, 0x09, 0x76, 0x33, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x76, 0x33, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x19, 0x0a, 0x08, - 0x76, 0x32, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, - 0x76, 0x32, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x76, 0x33, 0x5f, 0x73, 0x63, - 0x6f, 0x72, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x76, 0x33, 0x53, 0x63, 0x6f, - 0x72, 0x65, 0x22, 0x98, 0x01, 0x0a, 0x0e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x52, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, - 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, - 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, - 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, - 0x72, 0x12, 0x2a, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x6c, 0x74, 0x52, 0x0a, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xf3, + 0x01, 0x0a, 0x0d, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x18, + 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x45, 0x0a, 0x0f, 0x70, 0x6f, 0x6c, 0x69, + 0x63, 0x79, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, + 0x0e, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, + 0x42, 0x0a, 0x0e, 0x63, 0x61, 0x75, 0x73, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, + 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x52, 0x0d, 0x63, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x07, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x52, + 0x02, 0x69, 0x64, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, + 0x72, 0x69, 0x74, 0x79, 0x22, 0xf0, 0x01, 0x0a, 0x0e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x4d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x61, 0x64, 0x76, 0x5f, 0x69, + 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x61, 0x64, 0x76, 0x49, 0x64, 0x12, 0x12, + 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, + 0x70, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x65, + 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x65, + 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x2f, 0x0a, 0x13, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x12, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x65, 0x64, + 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x22, 0xf7, 0x03, 0x0a, 0x18, 0x44, 0x65, 0x74, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x4d, 0x69, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x20, + 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, + 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, + 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x73, 0x6f, + 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, + 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x32, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, + 0x72, 0x69, 0x74, 0x79, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, + 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, + 0x74, 0x79, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x1f, 0x0a, 0x0b, + 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x09, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x55, 0x72, 0x6c, 0x12, 0x1e, 0x0a, + 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x16, 0x0a, + 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x0c, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, + 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, + 0x12, 0x42, 0x0a, 0x0e, 0x63, 0x61, 0x75, 0x73, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, + 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0d, 0x63, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x12, 0x15, 0x0a, 0x06, 0x61, 0x76, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x0e, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x61, 0x76, 0x64, 0x49, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x71, + 0x75, 0x65, 0x72, 0x79, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, + 0x79, 0x22, 0xff, 0x09, 0x0a, 0x0d, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, + 0x69, 0x74, 0x79, 0x12, 0x29, 0x0a, 0x10, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x76, + 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x64, 0x12, 0x19, + 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2b, 0x0a, 0x11, 0x69, 0x6e, 0x73, + 0x74, 0x61, 0x6c, 0x6c, 0x65, 0x64, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6c, 0x6c, 0x65, 0x64, 0x56, + 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x69, 0x78, 0x65, 0x64, 0x5f, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, + 0x69, 0x78, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x74, + 0x69, 0x74, 0x6c, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, + 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x32, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, + 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, + 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x42, 0x0a, 0x0e, 0x70, 0x6b, 0x67, 0x5f, 0x69, + 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x50, + 0x6b, 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0d, 0x70, 0x6b, + 0x67, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x29, 0x0a, 0x05, 0x6c, + 0x61, 0x79, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, + 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, + 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, + 0x74, 0x79, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0e, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, + 0x39, 0x0a, 0x04, 0x63, 0x76, 0x73, 0x73, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, + 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x56, 0x75, 0x6c, + 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x43, 0x76, 0x73, 0x73, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x52, 0x04, 0x63, 0x76, 0x73, 0x73, 0x12, 0x17, 0x0a, 0x07, 0x63, 0x77, + 0x65, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x0d, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x63, 0x77, 0x65, + 0x49, 0x64, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x75, + 0x72, 0x6c, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, + 0x79, 0x55, 0x72, 0x6c, 0x12, 0x41, 0x0a, 0x0e, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x65, + 0x64, 0x5f, 0x64, 0x61, 0x74, 0x65, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0d, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x73, + 0x68, 0x65, 0x64, 0x44, 0x61, 0x74, 0x65, 0x12, 0x48, 0x0a, 0x12, 0x6c, 0x61, 0x73, 0x74, 0x5f, + 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x64, 0x61, 0x74, 0x65, 0x18, 0x10, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, + 0x10, 0x6c, 0x61, 0x73, 0x74, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x44, 0x61, 0x74, + 0x65, 0x12, 0x48, 0x0a, 0x14, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x61, 0x64, 0x76, 0x69, + 0x73, 0x6f, 0x72, 0x79, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0xf3, 0x01, - 0x0a, 0x04, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x18, - 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x69, 0x73, 0x5f, 0x63, - 0x61, 0x75, 0x73, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x69, 0x73, 0x43, 0x61, - 0x75, 0x73, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, 0x64, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, - 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x68, 0x69, 0x67, 0x68, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x68, 0x69, 0x67, 0x68, 0x6c, 0x69, 0x67, 0x68, - 0x74, 0x65, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x63, 0x61, 0x75, - 0x73, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, - 0x61, 0x75, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x63, 0x61, 0x75, - 0x73, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x6c, 0x61, 0x73, 0x74, 0x43, 0x61, - 0x75, 0x73, 0x65, 0x22, 0x30, 0x0a, 0x04, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x28, 0x0a, 0x05, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, - 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x6e, 0x65, 0x52, 0x05, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x22, 0x9f, 0x02, 0x0a, 0x0d, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, - 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x17, 0x0a, 0x07, 0x72, 0x75, 0x6c, 0x65, 0x5f, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x75, 0x6c, 0x65, 0x49, 0x64, - 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x1a, 0x0a, 0x08, - 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, - 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, - 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x1d, - 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x05, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x19, 0x0a, - 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x07, 0x65, 0x6e, 0x64, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x26, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, - 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, - 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, - 0x12, 0x14, 0x0a, 0x05, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, - 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, - 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, - 0x72, 0x4a, 0x04, 0x08, 0x09, 0x10, 0x0a, 0x22, 0x5d, 0x0a, 0x06, 0x53, 0x65, 0x63, 0x72, 0x65, - 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68, 0x12, 0x37, 0x0a, - 0x08, 0x66, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, - 0x65, 0x63, 0x72, 0x65, 0x74, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x66, 0x69, - 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x85, 0x02, 0x0a, 0x0f, 0x44, 0x65, 0x74, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x12, 0x32, 0x0a, 0x08, 0x73, 0x65, - 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, - 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, - 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x3e, - 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x22, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, - 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x2e, - 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x19, - 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, - 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, - 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0a, - 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, - 0x6e, 0x6b, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x22, 0xed, - 0x01, 0x0a, 0x0b, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x41, - 0x0a, 0x0c, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, - 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, - 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x0b, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x19, - 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x08, 0x66, 0x69, 0x6e, - 0x67, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x72, - 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, - 0x73, 0x65, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x66, 0x69, 0x6e, 0x67, 0x69, - 0x6e, 0x67, 0x73, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, - 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x22, 0x98, - 0x01, 0x0a, 0x0e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, - 0x67, 0x12, 0x3e, 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, - 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, - 0x72, 0x79, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, - 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, - 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x22, 0x95, 0x01, 0x0a, 0x0f, 0x4c, 0x69, - 0x63, 0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x22, 0x81, 0x01, - 0x0a, 0x04, 0x45, 0x6e, 0x75, 0x6d, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, - 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x46, 0x4f, 0x52, 0x42, 0x49, - 0x44, 0x44, 0x45, 0x4e, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x52, 0x45, 0x53, 0x54, 0x52, 0x49, - 0x43, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x52, 0x45, 0x43, 0x49, 0x50, 0x52, - 0x4f, 0x43, 0x41, 0x4c, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x4e, 0x4f, 0x54, 0x49, 0x43, 0x45, - 0x10, 0x04, 0x12, 0x0e, 0x0a, 0x0a, 0x50, 0x45, 0x52, 0x4d, 0x49, 0x53, 0x53, 0x49, 0x56, 0x45, - 0x10, 0x05, 0x12, 0x10, 0x0a, 0x0c, 0x55, 0x4e, 0x45, 0x4e, 0x43, 0x55, 0x4d, 0x42, 0x45, 0x52, - 0x45, 0x44, 0x10, 0x06, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, - 0x07, 0x22, 0x4e, 0x0a, 0x0b, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, - 0x22, 0x3f, 0x0a, 0x04, 0x45, 0x6e, 0x75, 0x6d, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, - 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x44, 0x50, 0x4b, - 0x47, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x48, 0x45, 0x41, 0x44, 0x45, 0x52, 0x10, 0x02, 0x12, - 0x10, 0x0a, 0x0c, 0x4c, 0x49, 0x43, 0x45, 0x4e, 0x53, 0x45, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x10, - 0x03, 0x2a, 0x44, 0x0a, 0x08, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x0b, 0x0a, - 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x4c, 0x4f, - 0x57, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x4d, 0x45, 0x44, 0x49, 0x55, 0x4d, 0x10, 0x02, 0x12, - 0x08, 0x0a, 0x04, 0x48, 0x49, 0x47, 0x48, 0x10, 0x03, 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x52, 0x49, - 0x54, 0x49, 0x43, 0x41, 0x4c, 0x10, 0x04, 0x42, 0x31, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x71, 0x75, 0x61, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, - 0x74, 0x79, 0x2f, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x63, 0x6f, 0x6d, - 0x6d, 0x6f, 0x6e, 0x3b, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, + 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x12, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x41, + 0x64, 0x76, 0x69, 0x73, 0x6f, 0x72, 0x79, 0x44, 0x61, 0x74, 0x61, 0x12, 0x40, 0x0a, 0x10, 0x63, + 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x76, 0x75, 0x6c, 0x6e, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x63, + 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x56, 0x75, 0x6c, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1d, 0x0a, + 0x0a, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x13, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x09, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x49, 0x64, 0x73, 0x12, 0x39, 0x0a, 0x0b, + 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x14, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0a, 0x64, 0x61, 0x74, + 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x58, 0x0a, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, + 0x72, 0x5f, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x2f, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, + 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x56, 0x65, + 0x6e, 0x64, 0x6f, 0x72, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x52, 0x0e, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, + 0x79, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x16, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67, 0x50, 0x61, 0x74, 0x68, 0x12, 0x15, 0x0a, 0x06, + 0x70, 0x6b, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x17, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x70, 0x6b, + 0x67, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x18, 0x20, + 0x01, 0x28, 0x05, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x1a, 0x4b, 0x0a, 0x09, 0x43, + 0x76, 0x73, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x28, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, 0x76, + 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x56, 0x53, 0x53, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x59, 0x0a, 0x13, 0x56, 0x65, 0x6e, 0x64, + 0x6f, 0x72, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, + 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x02, 0x38, 0x01, 0x22, 0x42, 0x0a, 0x0a, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, + 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x22, 0x57, 0x0a, 0x05, 0x4c, 0x61, 0x79, 0x65, 0x72, + 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x64, 0x69, 0x66, 0x66, + 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x69, 0x66, 0x66, 0x49, + 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x62, 0x79, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x42, 0x79, + 0x22, 0xc3, 0x01, 0x0a, 0x0d, 0x43, 0x61, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1a, + 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6c, 0x69, + 0x6e, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4c, + 0x69, 0x6e, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x26, + 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, + 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x43, 0x6f, 0x64, 0x65, + 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x76, 0x0a, 0x04, 0x43, 0x56, 0x53, 0x53, 0x12, 0x1b, + 0x0a, 0x09, 0x76, 0x32, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x08, 0x76, 0x32, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x1b, 0x0a, 0x09, 0x76, + 0x33, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x76, 0x33, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x19, 0x0a, 0x08, 0x76, 0x32, 0x5f, 0x73, + 0x63, 0x6f, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x76, 0x32, 0x53, 0x63, + 0x6f, 0x72, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x76, 0x33, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x76, 0x33, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x98, + 0x01, 0x0a, 0x0e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, + 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, + 0x74, 0x68, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x12, 0x2a, 0x0a, + 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0xf3, 0x01, 0x0a, 0x04, 0x4c, 0x69, + 0x6e, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x05, 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x69, 0x73, 0x5f, 0x63, 0x61, 0x75, 0x73, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x69, 0x73, 0x43, 0x61, 0x75, 0x73, 0x65, 0x12, + 0x1e, 0x0a, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x1c, 0x0a, 0x09, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x09, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x20, 0x0a, + 0x0b, 0x68, 0x69, 0x67, 0x68, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0b, 0x68, 0x69, 0x67, 0x68, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x12, + 0x1f, 0x0a, 0x0b, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x63, 0x61, 0x75, 0x73, 0x65, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, 0x61, 0x75, 0x73, 0x65, + 0x12, 0x1d, 0x0a, 0x0a, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x63, 0x61, 0x75, 0x73, 0x65, 0x18, 0x08, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x6c, 0x61, 0x73, 0x74, 0x43, 0x61, 0x75, 0x73, 0x65, 0x22, + 0x30, 0x0a, 0x04, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x28, 0x0a, 0x05, 0x6c, 0x69, 0x6e, 0x65, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, + 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x6e, 0x65, 0x52, 0x05, 0x6c, 0x69, 0x6e, 0x65, + 0x73, 0x22, 0x9f, 0x02, 0x0a, 0x0d, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x46, 0x69, 0x6e, 0x64, + 0x69, 0x6e, 0x67, 0x12, 0x17, 0x0a, 0x07, 0x72, 0x75, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x75, 0x6c, 0x65, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, + 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, + 0x72, 0x69, 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, + 0x72, 0x69, 0x74, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, + 0x73, 0x74, 0x61, 0x72, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x6e, 0x64, + 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x65, 0x6e, 0x64, + 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x26, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x07, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, + 0x6e, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x6d, 0x61, 0x74, 0x63, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x61, 0x74, + 0x63, 0x68, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x4a, 0x04, 0x08, + 0x09, 0x10, 0x0a, 0x22, 0x5d, 0x0a, 0x06, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x12, 0x1a, 0x0a, + 0x08, 0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x08, 0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68, 0x12, 0x37, 0x0a, 0x08, 0x66, 0x69, 0x6e, + 0x64, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, + 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x63, 0x72, 0x65, + 0x74, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x66, 0x69, 0x6e, 0x64, 0x69, 0x6e, + 0x67, 0x73, 0x22, 0x85, 0x02, 0x0a, 0x0f, 0x44, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x4c, + 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x12, 0x32, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, + 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, + 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, + 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x3e, 0x0a, 0x08, 0x63, 0x61, + 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x74, + 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, + 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x2e, 0x45, 0x6e, 0x75, 0x6d, + 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, + 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, + 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, + 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, + 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, + 0x65, 0x6e, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x22, 0xed, 0x01, 0x0a, 0x0b, 0x4c, + 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x41, 0x0a, 0x0c, 0x6c, 0x69, + 0x63, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x1e, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, + 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x45, 0x6e, 0x75, 0x6d, + 0x52, 0x0b, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, + 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, + 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, + 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x08, 0x66, 0x69, 0x6e, 0x67, 0x69, 0x6e, 0x67, + 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, + 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, + 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x66, 0x69, 0x6e, 0x67, 0x69, 0x6e, 0x67, 0x73, 0x12, + 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, + 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, + 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x22, 0x98, 0x01, 0x0a, 0x0e, 0x4c, + 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x3e, 0x0a, + 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x22, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, + 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x2e, 0x45, + 0x6e, 0x75, 0x6d, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x22, 0x95, 0x01, 0x0a, 0x0f, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, + 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x22, 0x81, 0x01, 0x0a, 0x04, 0x45, 0x6e, + 0x75, 0x6d, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, + 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x46, 0x4f, 0x52, 0x42, 0x49, 0x44, 0x44, 0x45, 0x4e, + 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x52, 0x45, 0x53, 0x54, 0x52, 0x49, 0x43, 0x54, 0x45, 0x44, + 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x52, 0x45, 0x43, 0x49, 0x50, 0x52, 0x4f, 0x43, 0x41, 0x4c, + 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x4e, 0x4f, 0x54, 0x49, 0x43, 0x45, 0x10, 0x04, 0x12, 0x0e, + 0x0a, 0x0a, 0x50, 0x45, 0x52, 0x4d, 0x49, 0x53, 0x53, 0x49, 0x56, 0x45, 0x10, 0x05, 0x12, 0x10, + 0x0a, 0x0c, 0x55, 0x4e, 0x45, 0x4e, 0x43, 0x55, 0x4d, 0x42, 0x45, 0x52, 0x45, 0x44, 0x10, 0x06, + 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x07, 0x22, 0x4e, 0x0a, + 0x0b, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x22, 0x3f, 0x0a, 0x04, + 0x45, 0x6e, 0x75, 0x6d, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, + 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x44, 0x50, 0x4b, 0x47, 0x10, 0x01, 0x12, + 0x0a, 0x0a, 0x06, 0x48, 0x45, 0x41, 0x44, 0x45, 0x52, 0x10, 0x02, 0x12, 0x10, 0x0a, 0x0c, 0x4c, + 0x49, 0x43, 0x45, 0x4e, 0x53, 0x45, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x10, 0x03, 0x2a, 0x44, 0x0a, + 0x08, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, + 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x4c, 0x4f, 0x57, 0x10, 0x01, 0x12, + 0x0a, 0x0a, 0x06, 0x4d, 0x45, 0x44, 0x49, 0x55, 0x4d, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x48, + 0x49, 0x47, 0x48, 0x10, 0x03, 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x52, 0x49, 0x54, 0x49, 0x43, 0x41, + 0x4c, 0x10, 0x04, 0x42, 0x31, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x61, 0x71, 0x75, 0x61, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2f, 0x74, + 0x72, 0x69, 0x76, 0x79, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x3b, + 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -2718,7 +2788,7 @@ func file_rpc_common_service_proto_rawDescGZIP() []byte { } var file_rpc_common_service_proto_enumTypes = make([]protoimpl.EnumInfo, 3) -var file_rpc_common_service_proto_msgTypes = make([]protoimpl.MessageInfo, 27) +var file_rpc_common_service_proto_msgTypes = make([]protoimpl.MessageInfo, 28) var file_rpc_common_service_proto_goTypes = []interface{}{ (Severity)(0), // 0: trivy.common.Severity (LicenseCategory_Enum)(0), // 1: trivy.common.LicenseCategory.Enum @@ -2729,74 +2799,76 @@ var file_rpc_common_service_proto_goTypes = []interface{}{ (*Application)(nil), // 6: trivy.common.Application (*Package)(nil), // 7: trivy.common.Package (*PkgIdentifier)(nil), // 8: trivy.common.PkgIdentifier - (*Misconfiguration)(nil), // 9: trivy.common.Misconfiguration - (*MisconfResult)(nil), // 10: trivy.common.MisconfResult - (*PolicyMetadata)(nil), // 11: trivy.common.PolicyMetadata - (*DetectedMisconfiguration)(nil), // 12: trivy.common.DetectedMisconfiguration - (*Vulnerability)(nil), // 13: trivy.common.Vulnerability - (*DataSource)(nil), // 14: trivy.common.DataSource - (*Layer)(nil), // 15: trivy.common.Layer - (*CauseMetadata)(nil), // 16: trivy.common.CauseMetadata - (*CVSS)(nil), // 17: trivy.common.CVSS - (*CustomResource)(nil), // 18: trivy.common.CustomResource - (*Line)(nil), // 19: trivy.common.Line - (*Code)(nil), // 20: trivy.common.Code - (*SecretFinding)(nil), // 21: trivy.common.SecretFinding - (*Secret)(nil), // 22: trivy.common.Secret - (*DetectedLicense)(nil), // 23: trivy.common.DetectedLicense - (*LicenseFile)(nil), // 24: trivy.common.LicenseFile - (*LicenseFinding)(nil), // 25: trivy.common.LicenseFinding - (*LicenseCategory)(nil), // 26: trivy.common.LicenseCategory - (*LicenseType)(nil), // 27: trivy.common.LicenseType - nil, // 28: trivy.common.Vulnerability.CvssEntry - nil, // 29: trivy.common.Vulnerability.VendorSeverityEntry - (*timestamppb.Timestamp)(nil), // 30: google.protobuf.Timestamp - (*structpb.Value)(nil), // 31: google.protobuf.Value + (*Location)(nil), // 9: trivy.common.Location + (*Misconfiguration)(nil), // 10: trivy.common.Misconfiguration + (*MisconfResult)(nil), // 11: trivy.common.MisconfResult + (*PolicyMetadata)(nil), // 12: trivy.common.PolicyMetadata + (*DetectedMisconfiguration)(nil), // 13: trivy.common.DetectedMisconfiguration + (*Vulnerability)(nil), // 14: trivy.common.Vulnerability + (*DataSource)(nil), // 15: trivy.common.DataSource + (*Layer)(nil), // 16: trivy.common.Layer + (*CauseMetadata)(nil), // 17: trivy.common.CauseMetadata + (*CVSS)(nil), // 18: trivy.common.CVSS + (*CustomResource)(nil), // 19: trivy.common.CustomResource + (*Line)(nil), // 20: trivy.common.Line + (*Code)(nil), // 21: trivy.common.Code + (*SecretFinding)(nil), // 22: trivy.common.SecretFinding + (*Secret)(nil), // 23: trivy.common.Secret + (*DetectedLicense)(nil), // 24: trivy.common.DetectedLicense + (*LicenseFile)(nil), // 25: trivy.common.LicenseFile + (*LicenseFinding)(nil), // 26: trivy.common.LicenseFinding + (*LicenseCategory)(nil), // 27: trivy.common.LicenseCategory + (*LicenseType)(nil), // 28: trivy.common.LicenseType + nil, // 29: trivy.common.Vulnerability.CvssEntry + nil, // 30: trivy.common.Vulnerability.VendorSeverityEntry + (*timestamppb.Timestamp)(nil), // 31: google.protobuf.Timestamp + (*structpb.Value)(nil), // 32: google.protobuf.Value } var file_rpc_common_service_proto_depIdxs = []int32{ 7, // 0: trivy.common.PackageInfo.packages:type_name -> trivy.common.Package 7, // 1: trivy.common.Application.libraries:type_name -> trivy.common.Package 8, // 2: trivy.common.Package.identifier:type_name -> trivy.common.PkgIdentifier - 15, // 3: trivy.common.Package.layer:type_name -> trivy.common.Layer - 10, // 4: trivy.common.Misconfiguration.successes:type_name -> trivy.common.MisconfResult - 10, // 5: trivy.common.Misconfiguration.warnings:type_name -> trivy.common.MisconfResult - 10, // 6: trivy.common.Misconfiguration.failures:type_name -> trivy.common.MisconfResult - 10, // 7: trivy.common.Misconfiguration.exceptions:type_name -> trivy.common.MisconfResult - 11, // 8: trivy.common.MisconfResult.policy_metadata:type_name -> trivy.common.PolicyMetadata - 16, // 9: trivy.common.MisconfResult.cause_metadata:type_name -> trivy.common.CauseMetadata - 0, // 10: trivy.common.DetectedMisconfiguration.severity:type_name -> trivy.common.Severity - 15, // 11: trivy.common.DetectedMisconfiguration.layer:type_name -> trivy.common.Layer - 16, // 12: trivy.common.DetectedMisconfiguration.cause_metadata:type_name -> trivy.common.CauseMetadata - 0, // 13: trivy.common.Vulnerability.severity:type_name -> trivy.common.Severity - 8, // 14: trivy.common.Vulnerability.pkg_identifier:type_name -> trivy.common.PkgIdentifier - 15, // 15: trivy.common.Vulnerability.layer:type_name -> trivy.common.Layer - 28, // 16: trivy.common.Vulnerability.cvss:type_name -> trivy.common.Vulnerability.CvssEntry - 30, // 17: trivy.common.Vulnerability.published_date:type_name -> google.protobuf.Timestamp - 30, // 18: trivy.common.Vulnerability.last_modified_date:type_name -> google.protobuf.Timestamp - 31, // 19: trivy.common.Vulnerability.custom_advisory_data:type_name -> google.protobuf.Value - 31, // 20: trivy.common.Vulnerability.custom_vuln_data:type_name -> google.protobuf.Value - 14, // 21: trivy.common.Vulnerability.data_source:type_name -> trivy.common.DataSource - 29, // 22: trivy.common.Vulnerability.vendor_severity:type_name -> trivy.common.Vulnerability.VendorSeverityEntry - 20, // 23: trivy.common.CauseMetadata.code:type_name -> trivy.common.Code - 15, // 24: trivy.common.CustomResource.layer:type_name -> trivy.common.Layer - 31, // 25: trivy.common.CustomResource.data:type_name -> google.protobuf.Value - 19, // 26: trivy.common.Code.lines:type_name -> trivy.common.Line - 20, // 27: trivy.common.SecretFinding.code:type_name -> trivy.common.Code - 15, // 28: trivy.common.SecretFinding.layer:type_name -> trivy.common.Layer - 21, // 29: trivy.common.Secret.findings:type_name -> trivy.common.SecretFinding - 0, // 30: trivy.common.DetectedLicense.severity:type_name -> trivy.common.Severity - 1, // 31: trivy.common.DetectedLicense.category:type_name -> trivy.common.LicenseCategory.Enum - 2, // 32: trivy.common.LicenseFile.license_type:type_name -> trivy.common.LicenseType.Enum - 25, // 33: trivy.common.LicenseFile.fingings:type_name -> trivy.common.LicenseFinding - 15, // 34: trivy.common.LicenseFile.layer:type_name -> trivy.common.Layer - 1, // 35: trivy.common.LicenseFinding.category:type_name -> trivy.common.LicenseCategory.Enum - 17, // 36: trivy.common.Vulnerability.CvssEntry.value:type_name -> trivy.common.CVSS - 0, // 37: trivy.common.Vulnerability.VendorSeverityEntry.value:type_name -> trivy.common.Severity - 38, // [38:38] is the sub-list for method output_type - 38, // [38:38] is the sub-list for method input_type - 38, // [38:38] is the sub-list for extension type_name - 38, // [38:38] is the sub-list for extension extendee - 0, // [0:38] is the sub-list for field type_name + 9, // 3: trivy.common.Package.locations:type_name -> trivy.common.Location + 16, // 4: trivy.common.Package.layer:type_name -> trivy.common.Layer + 11, // 5: trivy.common.Misconfiguration.successes:type_name -> trivy.common.MisconfResult + 11, // 6: trivy.common.Misconfiguration.warnings:type_name -> trivy.common.MisconfResult + 11, // 7: trivy.common.Misconfiguration.failures:type_name -> trivy.common.MisconfResult + 11, // 8: trivy.common.Misconfiguration.exceptions:type_name -> trivy.common.MisconfResult + 12, // 9: trivy.common.MisconfResult.policy_metadata:type_name -> trivy.common.PolicyMetadata + 17, // 10: trivy.common.MisconfResult.cause_metadata:type_name -> trivy.common.CauseMetadata + 0, // 11: trivy.common.DetectedMisconfiguration.severity:type_name -> trivy.common.Severity + 16, // 12: trivy.common.DetectedMisconfiguration.layer:type_name -> trivy.common.Layer + 17, // 13: trivy.common.DetectedMisconfiguration.cause_metadata:type_name -> trivy.common.CauseMetadata + 0, // 14: trivy.common.Vulnerability.severity:type_name -> trivy.common.Severity + 8, // 15: trivy.common.Vulnerability.pkg_identifier:type_name -> trivy.common.PkgIdentifier + 16, // 16: trivy.common.Vulnerability.layer:type_name -> trivy.common.Layer + 29, // 17: trivy.common.Vulnerability.cvss:type_name -> trivy.common.Vulnerability.CvssEntry + 31, // 18: trivy.common.Vulnerability.published_date:type_name -> google.protobuf.Timestamp + 31, // 19: trivy.common.Vulnerability.last_modified_date:type_name -> google.protobuf.Timestamp + 32, // 20: trivy.common.Vulnerability.custom_advisory_data:type_name -> google.protobuf.Value + 32, // 21: trivy.common.Vulnerability.custom_vuln_data:type_name -> google.protobuf.Value + 15, // 22: trivy.common.Vulnerability.data_source:type_name -> trivy.common.DataSource + 30, // 23: trivy.common.Vulnerability.vendor_severity:type_name -> trivy.common.Vulnerability.VendorSeverityEntry + 21, // 24: trivy.common.CauseMetadata.code:type_name -> trivy.common.Code + 16, // 25: trivy.common.CustomResource.layer:type_name -> trivy.common.Layer + 32, // 26: trivy.common.CustomResource.data:type_name -> google.protobuf.Value + 20, // 27: trivy.common.Code.lines:type_name -> trivy.common.Line + 21, // 28: trivy.common.SecretFinding.code:type_name -> trivy.common.Code + 16, // 29: trivy.common.SecretFinding.layer:type_name -> trivy.common.Layer + 22, // 30: trivy.common.Secret.findings:type_name -> trivy.common.SecretFinding + 0, // 31: trivy.common.DetectedLicense.severity:type_name -> trivy.common.Severity + 1, // 32: trivy.common.DetectedLicense.category:type_name -> trivy.common.LicenseCategory.Enum + 2, // 33: trivy.common.LicenseFile.license_type:type_name -> trivy.common.LicenseType.Enum + 26, // 34: trivy.common.LicenseFile.fingings:type_name -> trivy.common.LicenseFinding + 16, // 35: trivy.common.LicenseFile.layer:type_name -> trivy.common.Layer + 1, // 36: trivy.common.LicenseFinding.category:type_name -> trivy.common.LicenseCategory.Enum + 18, // 37: trivy.common.Vulnerability.CvssEntry.value:type_name -> trivy.common.CVSS + 0, // 38: trivy.common.Vulnerability.VendorSeverityEntry.value:type_name -> trivy.common.Severity + 39, // [39:39] is the sub-list for method output_type + 39, // [39:39] is the sub-list for method input_type + 39, // [39:39] is the sub-list for extension type_name + 39, // [39:39] is the sub-list for extension extendee + 0, // [0:39] is the sub-list for field type_name } func init() { file_rpc_common_service_proto_init() } @@ -2878,7 +2950,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Misconfiguration); i { + switch v := v.(*Location); i { case 0: return &v.state case 1: @@ -2890,7 +2962,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*MisconfResult); i { + switch v := v.(*Misconfiguration); i { case 0: return &v.state case 1: @@ -2902,7 +2974,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PolicyMetadata); i { + switch v := v.(*MisconfResult); i { case 0: return &v.state case 1: @@ -2914,7 +2986,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DetectedMisconfiguration); i { + switch v := v.(*PolicyMetadata); i { case 0: return &v.state case 1: @@ -2926,7 +2998,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Vulnerability); i { + switch v := v.(*DetectedMisconfiguration); i { case 0: return &v.state case 1: @@ -2938,7 +3010,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DataSource); i { + switch v := v.(*Vulnerability); i { case 0: return &v.state case 1: @@ -2950,7 +3022,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Layer); i { + switch v := v.(*DataSource); i { case 0: return &v.state case 1: @@ -2962,7 +3034,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CauseMetadata); i { + switch v := v.(*Layer); i { case 0: return &v.state case 1: @@ -2974,7 +3046,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CVSS); i { + switch v := v.(*CauseMetadata); i { case 0: return &v.state case 1: @@ -2986,7 +3058,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CustomResource); i { + switch v := v.(*CVSS); i { case 0: return &v.state case 1: @@ -2998,7 +3070,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Line); i { + switch v := v.(*CustomResource); i { case 0: return &v.state case 1: @@ -3010,7 +3082,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Code); i { + switch v := v.(*Line); i { case 0: return &v.state case 1: @@ -3022,7 +3094,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SecretFinding); i { + switch v := v.(*Code); i { case 0: return &v.state case 1: @@ -3034,7 +3106,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Secret); i { + switch v := v.(*SecretFinding); i { case 0: return &v.state case 1: @@ -3046,7 +3118,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DetectedLicense); i { + switch v := v.(*Secret); i { case 0: return &v.state case 1: @@ -3058,7 +3130,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LicenseFile); i { + switch v := v.(*DetectedLicense); i { case 0: return &v.state case 1: @@ -3070,7 +3142,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LicenseFinding); i { + switch v := v.(*LicenseFile); i { case 0: return &v.state case 1: @@ -3082,7 +3154,7 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LicenseCategory); i { + switch v := v.(*LicenseFinding); i { case 0: return &v.state case 1: @@ -3094,6 +3166,18 @@ func file_rpc_common_service_proto_init() { } } file_rpc_common_service_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LicenseCategory); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_rpc_common_service_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*LicenseType); i { case 0: return &v.state @@ -3112,7 +3196,7 @@ func file_rpc_common_service_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_rpc_common_service_proto_rawDesc, NumEnums: 3, - NumMessages: 27, + NumMessages: 28, NumExtensions: 0, NumServices: 0, }, diff --git a/rpc/common/service.proto b/rpc/common/service.proto index f35d22051deb..d5c1472b4aef 100644 --- a/rpc/common/service.proto +++ b/rpc/common/service.proto @@ -42,17 +42,18 @@ message Package { string arch = 5; // src package containing some binary packages // e.g. bind - string src_name = 6; - string src_version = 7; - string src_release = 8; - int32 src_epoch = 9; - repeated string licenses = 15; - Layer layer = 11; - string file_path = 12; - repeated string depends_on = 14; - string digest = 16; - bool dev = 17; - bool indirect = 18; + string src_name = 6; + string src_version = 7; + string src_release = 8; + int32 src_epoch = 9; + repeated string licenses = 15; + repeated Location locations = 20; + Layer layer = 11; + string file_path = 12; + repeated string depends_on = 14; + string digest = 16; + bool dev = 17; + bool indirect = 18; } message PkgIdentifier { @@ -60,6 +61,11 @@ message PkgIdentifier { string bom_ref = 2; } +message Location { + int32 start_line = 1; + int32 end_line = 2; +} + message Misconfiguration { string file_type = 1; string file_path = 2; From 1b68327b656be327586ad7899ba0b4b5d1bc8979 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 24 Mar 2024 13:55:08 +0400 Subject: [PATCH 12/57] chore(deps): bump github.com/docker/docker from 25.0.3+incompatible to 25.0.5+incompatible (#6364) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 4cea01b959e2..ea85b6e33f29 100644 --- a/go.mod +++ b/go.mod @@ -40,7 +40,7 @@ require ( github.com/cheggaaa/pb/v3 v3.1.4 github.com/containerd/containerd v1.7.13 github.com/csaf-poc/csaf_distribution/v3 v3.0.0 - github.com/docker/docker v25.0.3+incompatible + github.com/docker/docker v25.0.5+incompatible github.com/docker/go-connections v0.5.0 github.com/fatih/color v1.16.0 github.com/go-git/go-git/v5 v5.11.0 diff --git a/go.sum b/go.sum index bdd0da3db721..a1af99060dc5 100644 --- a/go.sum +++ b/go.sum @@ -717,8 +717,8 @@ github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBi github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/docker v20.10.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker v23.0.0-rc.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker v25.0.3+incompatible h1:D5fy/lYmY7bvZa0XTZ5/UJPljor41F+vdyJG5luQLfQ= -github.com/docker/docker v25.0.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v25.0.5+incompatible h1:UmQydMduGkrD5nQde1mecF/YnSbTOaPeFIeP5C4W+DE= +github.com/docker/docker v25.0.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y= github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A= github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0= From 7c409fd270bd71a24da8a06d7be2aa9f5a70321f Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Sun, 24 Mar 2024 15:57:32 +0600 Subject: [PATCH 13/57] fix(java): parse modules from `pom.xml` files once (#6312) --- pkg/dependency/parser/java/pom/parse.go | 11 ++++-- pkg/dependency/parser/java/pom/parse_test.go | 37 +++++++++++++++++++ .../module-1/module-2/pom.xml | 16 ++++++++ .../modules-infinity-loop/module-1/pom.xml | 12 ++++++ .../testdata/modules-infinity-loop/pom.xml | 13 +++++++ 5 files changed, 86 insertions(+), 3 deletions(-) create mode 100644 pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/module-2/pom.xml create mode 100644 pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/pom.xml create mode 100644 pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/pom.xml diff --git a/pkg/dependency/parser/java/pom/parse.go b/pkg/dependency/parser/java/pom/parse.go index 8abecc5df36c..955f8cfd9e33 100644 --- a/pkg/dependency/parser/java/pom/parse.go +++ b/pkg/dependency/parser/java/pom/parse.go @@ -105,10 +105,10 @@ func (p *parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, // Cache root POM p.cache.put(result.artifact, result) - return p.parseRoot(root.artifact()) + return p.parseRoot(root.artifact(), make(map[string]struct{})) } -func (p *parser) parseRoot(root artifact) ([]types.Library, []types.Dependency, error) { +func (p *parser) parseRoot(root artifact, uniqModules map[string]struct{}) ([]types.Library, []types.Dependency, error) { // Prepare a queue for dependencies queue := newArtifactQueue() @@ -132,7 +132,12 @@ func (p *parser) parseRoot(root artifact) ([]types.Library, []types.Dependency, // Modules should be handled separately so that they can have independent dependencies. // It means multi-module allows for duplicate dependencies. if art.Module { - moduleLibs, moduleDeps, err := p.parseRoot(art) + if _, ok := uniqModules[art.String()]; ok { + continue + } + uniqModules[art.String()] = struct{}{} + + moduleLibs, moduleDeps, err := p.parseRoot(art, uniqModules) if err != nil { return nil, nil, err } diff --git a/pkg/dependency/parser/java/pom/parse_test.go b/pkg/dependency/parser/java/pom/parse_test.go index 4123d1dde960..b73e40511507 100644 --- a/pkg/dependency/parser/java/pom/parse_test.go +++ b/pkg/dependency/parser/java/pom/parse_test.go @@ -959,6 +959,43 @@ func TestPom_Parse(t *testing.T) { }, }, }, + { + name: "Infinity loop for modules", + inputFile: filepath.Join("testdata", "modules-infinity-loop", "pom.xml"), + local: true, + want: []types.Library{ + // as module + { + ID: "org.example:module-1:2.0.0", + Name: "org.example:module-1", + Version: "2.0.0", + }, + // as dependency + { + ID: "org.example:module-1:2.0.0", + Name: "org.example:module-1", + Version: "2.0.0", + }, + { + ID: "org.example:module-2:3.0.0", + Name: "org.example:module-2", + Version: "3.0.0", + }, + { + ID: "org.example:root:1.0.0", + Name: "org.example:root", + Version: "1.0.0", + }, + }, + wantDeps: []types.Dependency{ + { + ID: "org.example:module-2:3.0.0", + DependsOn: []string{ + "org.example:module-1:2.0.0", + }, + }, + }, + }, { name: "multi module soft requirement", inputFile: filepath.Join("testdata", "multi-module-soft-requirement", "pom.xml"), diff --git a/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/module-2/pom.xml b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/module-2/pom.xml new file mode 100644 index 000000000000..37f39009ce97 --- /dev/null +++ b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/module-2/pom.xml @@ -0,0 +1,16 @@ + + 4.0.0 + + module-2 + org.example + 3.0.0 + + + + org.example + module-1 + 2.0.0 + + + diff --git a/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/pom.xml b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/pom.xml new file mode 100644 index 000000000000..9952a80dc685 --- /dev/null +++ b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/module-1/pom.xml @@ -0,0 +1,12 @@ + + 4.0.0 + + module-1 + org.example + 2.0.0 + + + module-2 + + diff --git a/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/pom.xml b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/pom.xml new file mode 100644 index 000000000000..372fefd3fce9 --- /dev/null +++ b/pkg/dependency/parser/java/pom/testdata/modules-infinity-loop/pom.xml @@ -0,0 +1,13 @@ + + 4.0.0 + + root + org.example + 1.0.0 + + + module-1 + module-2 + + From abd62ae74e6b3d7c785717643bb254ecfef0fdac Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Tue, 26 Mar 2024 06:31:28 +0300 Subject: [PATCH 14/57] =?UTF-8?q?fix(terraform):=20=D1=81hecking=20SSE=20e?= =?UTF-8?q?ncryption=20algorithm=20validity=20(#6341)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../adapters/terraform/aws/s3/adapt_test.go | 29 ++++++++++++++++++- pkg/iac/adapters/terraform/aws/s3/bucket.go | 10 +++++-- 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/pkg/iac/adapters/terraform/aws/s3/adapt_test.go b/pkg/iac/adapters/terraform/aws/s3/adapt_test.go index 1d347d3520fe..65394abd3ea7 100644 --- a/pkg/iac/adapters/terraform/aws/s3/adapt_test.go +++ b/pkg/iac/adapters/terraform/aws/s3/adapt_test.go @@ -36,7 +36,7 @@ resource "aws_s3_bucket_public_access_block" "example_access_block"{ hasPublicAccess: true, }, { - desc: "public access block is found when using the bucket name as the lookup", + desc: "public access block is found when using the bucket id as the lookup", source: ` resource "aws_s3_bucket" "example" { bucket = "bucketname" @@ -254,6 +254,33 @@ func Test_Adapt(t *testing.T) { }, }, }, + { + name: "non-valid SSE algorithm", + terraform: ` +resource "aws_s3_bucket" "this" { + bucket = "test" +} + +resource "aws_s3_bucket_server_side_encryption_configuration" "this" { + bucket = aws_s3_bucket.this.id + rule { + apply_server_side_encryption_by_default { + sse_algorithm = "" + } + } +}`, + expected: s3.S3{ + Buckets: []s3.Bucket{ + { + Name: iacTypes.String("test", iacTypes.NewTestMetadata()), + Encryption: s3.Encryption{ + Enabled: iacTypes.Bool(false, iacTypes.NewTestMetadata()), + }, + ACL: iacTypes.String("private", iacTypes.NewTestMetadata()), + }, + }, + }, + }, } for _, test := range tests { diff --git a/pkg/iac/adapters/terraform/aws/s3/bucket.go b/pkg/iac/adapters/terraform/aws/s3/bucket.go index ae5b2ddb2f4d..5ecf7e9ba21b 100644 --- a/pkg/iac/adapters/terraform/aws/s3/bucket.go +++ b/pkg/iac/adapters/terraform/aws/s3/bucket.go @@ -1,6 +1,10 @@ package s3 import ( + "slices" + + s3types "github.com/aws/aws-sdk-go-v2/service/s3/types" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/s3" "github.com/aquasecurity/trivy/pkg/iac/terraform" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" @@ -194,11 +198,13 @@ func isEncrypted(sseConfgihuration *terraform.Block) iacTypes.BoolValue { sseConfgihuration, "rule.apply_server_side_encryption_by_default.sse_algorithm", func(attr *terraform.Attribute, parent *terraform.Block) iacTypes.BoolValue { - if attr.IsNil() { + if attr.IsNil() || !attr.IsString() { return iacTypes.BoolDefault(false, parent.GetMetadata()) } + algoVal := attr.Value().AsString() + isValidAlgo := slices.Contains(s3types.ServerSideEncryption("").Values(), s3types.ServerSideEncryption(algoVal)) return iacTypes.Bool( - true, + isValidAlgo, attr.GetMetadata(), ) }, From 97f95c4ddfb1d47ab6571f218d30e0bc33f54382 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Tue, 26 Mar 2024 20:04:40 +0300 Subject: [PATCH 15/57] docs(terraform): add file patterns for Terraform Plan (#6393) --- docs/docs/coverage/iac/index.md | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/docs/docs/coverage/iac/index.md b/docs/docs/coverage/iac/index.md index 21f0209f2fad..168c3dd650fa 100644 --- a/docs/docs/coverage/iac/index.md +++ b/docs/docs/coverage/iac/index.md @@ -8,14 +8,15 @@ Trivy scans Infrastructure as Code (IaC) files for ## Supported configurations -| Config type | File patterns | -|-------------------------------------|-------------------------------| -| [Kubernetes](kubernetes.md) | *.yml, *.yaml, *.json | -| [Docker](docker.md) | Dockerfile, Containerfile | -| [Terraform](terraform.md) | *.tf, *.tf.json, *.tfvars, | -| [CloudFormation](cloudformation.md) | *.yml, *.yaml, *.json | -| [Azure ARM Template](azure-arm.md) | *.json | -| [Helm](helm.md) | *.yaml, *.tpl, *.tar.gz, etc. | +| Config type | File patterns | +|-------------------------------------|-----------------------------------------------| +| [Kubernetes](kubernetes.md) | \*.yml, \*.yaml, \*.json | +| [Docker](docker.md) | Dockerfile, Containerfile | +| [Terraform](terraform.md) | \*.tf, \*.tf.json, \*.tfvars | +| [Terraform Plan](terraform.md) | tfplan, \*.tfplan, \*.tfplan.json, \*.tf.json | +| [CloudFormation](cloudformation.md) | \*.yml, \*.yaml, \*.json | +| [Azure ARM Template](azure-arm.md) | \*.json | +| [Helm](helm.md) | \*.yaml, \*.tpl, \*.tar.gz, etc. | [misconf]: ../../scanner/misconfiguration/index.md [secret]: ../../scanner/secret.md From f148eb10f25b4daaf97b38ef523e6f16e9b118a1 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Tue, 26 Mar 2024 20:10:16 +0300 Subject: [PATCH 16/57] fix(helm): scan the subcharts once (#6382) --- pkg/iac/scanners/helm/scanner.go | 33 +++++++++++---- pkg/iac/scanners/helm/test/scanner_test.go | 41 +++++++++++++++++++ .../test/testdata/with-subchart/Chart.yaml | 6 +++ .../with-subchart/charts/nginx/Chart.yaml | 6 +++ .../charts/nginx/templates/pod.yaml | 12 ++++++ .../with-subchart/charts/nginx/values.yaml | 1 + .../test/testdata/with-subchart/values.yaml | 2 + 7 files changed, 92 insertions(+), 9 deletions(-) create mode 100644 pkg/iac/scanners/helm/test/testdata/with-subchart/Chart.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/Chart.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/templates/pod.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/values.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-subchart/values.yaml diff --git a/pkg/iac/scanners/helm/scanner.go b/pkg/iac/scanners/helm/scanner.go index e655932e01fd..3f1a0d2fbb65 100644 --- a/pkg/iac/scanners/helm/scanner.go +++ b/pkg/iac/scanners/helm/scanner.go @@ -7,6 +7,7 @@ import ( "io/fs" "path/filepath" "strings" + "sync" "github.com/liamg/memoryfs" @@ -38,6 +39,8 @@ type Scanner struct { skipRequired bool frameworks []framework.Framework spec string + regoScanner *rego.Scanner + mu sync.Mutex } func (s *Scanner) SetSpec(spec string) { @@ -120,6 +123,10 @@ func (s *Scanner) SetRegoErrorLimit(_ int) {} func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, path string) (scan.Results, error) { + if err := s.initRegoScanner(target); err != nil { + return nil, fmt.Errorf("failed to init rego scanner: %w", err) + } + var results []scan.Result if err := fs.WalkDir(target, path, func(path string, d fs.DirEntry, err error) error { select { @@ -150,6 +157,7 @@ func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, path string) (scan.R } else { results = append(results, scanResults...) } + return fs.SkipDir } return nil @@ -174,14 +182,6 @@ func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS) return nil, nil } - regoScanner := rego.NewScanner(types.SourceKubernetes, s.options...) - policyFS := target - if s.policyFS != nil { - policyFS = s.policyFS - } - if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, policyFS, s.policyDirs, s.policyReaders); err != nil { - return nil, fmt.Errorf("policies load: %w", err) - } for _, file := range chartFiles { file := file s.debug.Log("Processing rendered chart file: %s", file.TemplateFilePath) @@ -191,7 +191,7 @@ func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS) return nil, fmt.Errorf("unmarshal yaml: %w", err) } for _, manifest := range manifests { - fileResults, err := regoScanner.ScanInput(ctx, rego.Input{ + fileResults, err := s.regoScanner.ScanInput(ctx, rego.Input{ Path: file.TemplateFilePath, Contents: manifest, FS: target, @@ -219,3 +219,18 @@ func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS) } return results, nil } + +func (s *Scanner) initRegoScanner(srcFS fs.FS) error { + s.mu.Lock() + defer s.mu.Unlock() + if s.regoScanner != nil { + return nil + } + regoScanner := rego.NewScanner(types.SourceKubernetes, s.options...) + regoScanner.SetParentDebugLogger(s.debug) + if err := regoScanner.LoadPolicies(s.loadEmbeddedLibraries, s.loadEmbeddedPolicies, srcFS, s.policyDirs, s.policyReaders); err != nil { + return err + } + s.regoScanner = regoScanner + return nil +} diff --git a/pkg/iac/scanners/helm/test/scanner_test.go b/pkg/iac/scanners/helm/test/scanner_test.go index 67099af2bb36..a46031a8fb98 100644 --- a/pkg/iac/scanners/helm/test/scanner_test.go +++ b/pkg/iac/scanners/helm/test/scanner_test.go @@ -318,3 +318,44 @@ deny[res] { require.NoError(t, err) assert.NotNil(t, code) } + +func TestScanSubchartOnce(t *testing.T) { + check := `# METADATA +# title: "Test rego" +# description: "Test rego" +# scope: package +# schemas: +# - input: schema["kubernetes"] +# custom: +# id: ID001 +# avd_id: AVD-USR-ID001 +# severity: LOW +# input: +# selector: +# - type: kubernetes +# subtypes: +# - kind: pod +package user.kubernetes.ID001 + +import data.lib.kubernetes + +deny[res] { + container := kubernetes.containers[_] + container.securityContext.readOnlyRootFilesystem == false + res := result.new("set 'securityContext.readOnlyRootFilesystem' to true", container) +} +` + + scanner := helm.New( + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithEmbeddedLibraries(true), + options.ScannerWithPolicyNamespaces("user"), + options.ScannerWithPolicyReader(strings.NewReader(check)), + ) + + results, err := scanner.ScanFS(context.TODO(), os.DirFS("testdata/with-subchart"), ".") + require.NoError(t, err) + require.Len(t, results, 1) + + assert.Len(t, results.GetFailed(), 0) +} diff --git a/pkg/iac/scanners/helm/test/testdata/with-subchart/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/with-subchart/Chart.yaml new file mode 100644 index 000000000000..3c8c9b71ae45 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-subchart/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +name: test +description: A Helm chart for Kubernetes +type: application +version: 0.1.0 +appVersion: "1.16.0" diff --git a/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/Chart.yaml new file mode 100644 index 000000000000..45cdc636218e --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +name: nginx +description: A Helm chart for Kubernetes +type: application +version: 0.1.0 +appVersion: "1.16.0" diff --git a/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/templates/pod.yaml b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/templates/pod.yaml new file mode 100644 index 000000000000..70b3a84a8130 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/templates/pod.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: Pod +metadata: + name: nginx +spec: + containers: + - name: nginx + image: nginx:1.14.2 + ports: + - containerPort: 8080 + securityContext: + readOnlyRootFilesystem: {{ .Values.readOnlyFs }} diff --git a/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/values.yaml b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/values.yaml new file mode 100644 index 000000000000..ff3cff9db1e3 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-subchart/charts/nginx/values.yaml @@ -0,0 +1 @@ +readOnlyFs: false diff --git a/pkg/iac/scanners/helm/test/testdata/with-subchart/values.yaml b/pkg/iac/scanners/helm/test/testdata/with-subchart/values.yaml new file mode 100644 index 000000000000..1e51a8fed1da --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-subchart/values.yaml @@ -0,0 +1,2 @@ +nginx: + readOnlyFs: true From 9d7f5c948e30af7b76cbe32b8e53070fb5bfd16b Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Wed, 27 Mar 2024 11:32:22 +0600 Subject: [PATCH 17/57] fix: use `0600` perms for tmp files for post analyzers (#6386) --- pkg/fanal/analyzer/fs.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/fanal/analyzer/fs.go b/pkg/fanal/analyzer/fs.go index d578d6e6d06d..28880b6b0339 100644 --- a/pkg/fanal/analyzer/fs.go +++ b/pkg/fanal/analyzer/fs.go @@ -55,7 +55,8 @@ func (c *CompositeFS) CopyFileToTemp(opener Opener, info os.FileInfo) (string, e return "", xerrors.Errorf("copy error: %w", err) } - if err = os.Chmod(f.Name(), info.Mode()); err != nil { + // Use 0600 instead of file permissions to avoid errors when a file uses incorrect permissions (e.g. 0044). + if err = os.Chmod(f.Name(), 0600); err != nil { return "", xerrors.Errorf("chmod error: %w", err) } From f85c9fac6f522d9a5f139e7de9bc7b5037692877 Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Wed, 27 Mar 2024 11:46:25 +0600 Subject: [PATCH 18/57] fix(nodejs): add support for parsing `workspaces` from `package.json` as an object (#6231) Co-authored-by: Teppei Fukuda --- .../parser/nodejs/packagejson/parse.go | 28 +++++++++++++++++-- .../parser/nodejs/packagejson/parse_test.go | 14 ++++++++++ .../testdata/workspace_as_map_package.json | 8 ++++++ 3 files changed, 48 insertions(+), 2 deletions(-) create mode 100644 pkg/dependency/parser/nodejs/packagejson/testdata/workspace_as_map_package.json diff --git a/pkg/dependency/parser/nodejs/packagejson/parse.go b/pkg/dependency/parser/nodejs/packagejson/parse.go index f4bf258f7aae..19a53679f2d0 100644 --- a/pkg/dependency/parser/nodejs/packagejson/parse.go +++ b/pkg/dependency/parser/nodejs/packagejson/parse.go @@ -5,6 +5,7 @@ import ( "io" "regexp" + "github.com/samber/lo" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency" @@ -21,7 +22,7 @@ type packageJSON struct { Dependencies map[string]string `json:"dependencies"` OptionalDependencies map[string]string `json:"optionalDependencies"` DevDependencies map[string]string `json:"devDependencies"` - Workspaces []string `json:"workspaces"` + Workspaces any `json:"workspaces"` } type Package struct { @@ -65,7 +66,7 @@ func (p *Parser) Parse(r io.Reader) (Package, error) { Dependencies: pkgJSON.Dependencies, OptionalDependencies: pkgJSON.OptionalDependencies, DevDependencies: pkgJSON.DevDependencies, - Workspaces: pkgJSON.Workspaces, + Workspaces: parseWorkspaces(pkgJSON.Workspaces), }, nil } @@ -82,6 +83,29 @@ func parseLicense(val interface{}) string { return "" } +// parseWorkspaces returns slice of workspaces +func parseWorkspaces(val any) []string { + // Workspaces support 2 types - https://github.com/SchemaStore/schemastore/blob/d9516961f8a5b0e65a457808070147b5a866f60b/src/schemas/json/package.json#L777 + switch ws := val.(type) { + // Workspace as object (map[string][]string) + // e.g. "workspaces": {"packages": ["packages/*", "plugins/*"]}, + case map[string]interface{}: + // Take only workspaces for `packages` - https://classic.yarnpkg.com/blog/2018/02/15/nohoist/ + if pkgsWorkspaces, ok := ws["packages"]; ok { + return lo.Map(pkgsWorkspaces.([]interface{}), func(workspace interface{}, _ int) string { + return workspace.(string) + }) + } + // Workspace as string array + // e.g. "workspaces": ["packages/*", "backend"], + case []interface{}: + return lo.Map(ws, func(workspace interface{}, _ int) string { + return workspace.(string) + }) + } + return nil +} + func IsValidName(name string) bool { // Name is optional field // https://docs.npmjs.com/cli/v9/configuring-npm/package-json#name diff --git a/pkg/dependency/parser/nodejs/packagejson/parse_test.go b/pkg/dependency/parser/nodejs/packagejson/parse_test.go index 4f04cebcc1ee..97a0027d22ef 100644 --- a/pkg/dependency/parser/nodejs/packagejson/parse_test.go +++ b/pkg/dependency/parser/nodejs/packagejson/parse_test.go @@ -76,6 +76,20 @@ func TestParse(t *testing.T) { }, }, }, + { + name: "happy path - workspace as struct", + inputFile: "testdata/workspace_as_map_package.json", + want: packagejson.Package{ + Library: types.Library{ + ID: "example@1.0.0", + Name: "example", + Version: "1.0.0", + }, + Workspaces: []string{ + "packages/*", + }, + }, + }, { name: "invalid package name", inputFile: "testdata/invalid_name.json", diff --git a/pkg/dependency/parser/nodejs/packagejson/testdata/workspace_as_map_package.json b/pkg/dependency/parser/nodejs/packagejson/testdata/workspace_as_map_package.json new file mode 100644 index 000000000000..21a198e8bc6e --- /dev/null +++ b/pkg/dependency/parser/nodejs/packagejson/testdata/workspace_as_map_package.json @@ -0,0 +1,8 @@ +{ + "name": "example", + "version": "1.0.0", + "workspaces": { + "packages": ["packages/*"], + "nohoist": ["**/react-native", "**/react-native/**"] + } +} From ade033a837a528864f51b448a3bf256aafea50dd Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Wed, 27 Mar 2024 11:51:09 +0600 Subject: [PATCH 19/57] docs: add info about support for package license detection in `fs`/`repo` modes (#6381) --- docs/docs/scanner/license.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/docs/scanner/license.md b/docs/docs/scanner/license.md index 6033542e4bea..7472011af671 100644 --- a/docs/docs/scanner/license.md +++ b/docs/docs/scanner/license.md @@ -22,17 +22,15 @@ Check out [the coverage document][coverage] for details. To enable extended license scanning, you can use `--license-full`. In addition to package licenses, Trivy scans source code files, Markdown documents, text files and `LICENSE` documents to identify license usage within the image or filesystem. -By default, Trivy only classifies licenses that are matched with a confidence level of 0.9 or more by the classifer. +By default, Trivy only classifies licenses that are matched with a confidence level of 0.9 or more by the classifier. To configure the confidence level, you can use `--license-confidence-level`. This enables us to classify licenses that might be matched with a lower confidence level by the classifer. !!! note The full license scanning is expensive. It takes a while. -Currently, the standard license scanning doesn't support filesystem and repository scanning. - | License scanning | Image | Rootfs | Filesystem | Repository | SBOM | |:---------------------:|:-----:|:------:|:----------:|:----------:|:----:| -| Standard | ✅ | ✅ | - | - | ✅ | +| Standard | ✅ | ✅ | ✅[^1][^2] | ✅[^1][^2] | ✅ | | Full (--license-full) | ✅ | ✅ | ✅ | ✅ | - | License checking classifies the identified licenses and map the classification to severity. @@ -344,6 +342,8 @@ license: permissive: [] ``` +[^1]: See the list of supported language files [here](../coverage/language/index.md). +[^2]: Some lock files require additional files (e.g. files from the cache directory) to detect licenses. Check [coverage][coverage] for more information. [coverage]: ../coverage/index.md [google-license-classification]: https://opensource.google/documentation/reference/thirdparty/licenses From 258d1534614a98811de657eb196051728529defd Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Wed, 27 Mar 2024 12:08:58 +0600 Subject: [PATCH 20/57] fix(nodejs): merge `Indirect`, `Dev`, `ExternalReferences` fields for same deps from `package-lock.json` files v2 or later (#6356) Signed-off-by: knqyf263 Co-authored-by: knqyf263 --- .../go.sum | 1 + pkg/dependency/parser/nodejs/npm/parse.go | 48 ++++++++--- .../parser/nodejs/npm/parse_test.go | 6 ++ .../parser/nodejs/npm/parse_testcase.go | 85 +++++++++++++++++++ ...age-lock_v3_with-same-dev-and-non-dev.json | 74 ++++++++++++++++ 5 files changed, 202 insertions(+), 12 deletions(-) create mode 100644 pkg/dependency/parser/nodejs/npm/testdata/package-lock_v3_with-same-dev-and-non-dev.json diff --git a/pkg/dependency/parser/golang/mod/testdata/replaced-with-local-path-and-version-mismatch/go.sum b/pkg/dependency/parser/golang/mod/testdata/replaced-with-local-path-and-version-mismatch/go.sum index 3d1d7c0e3913..8a219a39d474 100644 --- a/pkg/dependency/parser/golang/mod/testdata/replaced-with-local-path-and-version-mismatch/go.sum +++ b/pkg/dependency/parser/golang/mod/testdata/replaced-with-local-path-and-version-mismatch/go.sum @@ -50,6 +50,7 @@ golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= diff --git a/pkg/dependency/parser/nodejs/npm/parse.go b/pkg/dependency/parser/nodejs/npm/parse.go index e289720b89a2..b74cfa5ce2f5 100644 --- a/pkg/dependency/parser/nodejs/npm/parse.go +++ b/pkg/dependency/parser/nodejs/npm/parse.go @@ -4,6 +4,7 @@ import ( "fmt" "io" "path" + "slices" "sort" "strings" @@ -115,28 +116,42 @@ func (p *Parser) parseV2(packages map[string]Package) ([]types.Library, []types. EndLine: pkg.EndLine, } + var ref types.ExternalRef + if pkg.Resolved != "" { + ref = types.ExternalRef{ + Type: types.RefOther, + URL: pkg.Resolved, + } + } + + pkgIndirect := isIndirectLib(pkgPath, directDeps) + // There are cases when similar libraries use same dependencies // we need to add location for each these dependencies if savedLib, ok := libs[pkgID]; ok { + savedLib.Dev = savedLib.Dev && pkg.Dev + savedLib.Indirect = savedLib.Indirect && pkgIndirect + + if ref.URL != "" && !slices.Contains(savedLib.ExternalReferences, ref) { + savedLib.ExternalReferences = append(savedLib.ExternalReferences, ref) + sortExternalReferences(savedLib.ExternalReferences) + } + savedLib.Locations = append(savedLib.Locations, location) sort.Sort(savedLib.Locations) + libs[pkgID] = savedLib continue } lib := types.Library{ - ID: pkgID, - Name: pkgName, - Version: pkg.Version, - Indirect: isIndirectLib(pkgPath, directDeps), - Dev: pkg.Dev, - ExternalReferences: []types.ExternalRef{ - { - Type: types.RefOther, - URL: pkg.Resolved, - }, - }, - Locations: []types.Location{location}, + ID: pkgID, + Name: pkgName, + Version: pkg.Version, + Indirect: pkgIndirect, + Dev: pkg.Dev, + ExternalReferences: lo.Ternary(ref.URL != "", []types.ExternalRef{ref}, nil), + Locations: []types.Location{location}, } libs[pkgID] = lib @@ -385,3 +400,12 @@ func (t *Package) UnmarshalJSONWithMetadata(node jfather.Node) error { func packageID(name, version string) string { return dependency.ID(ftypes.Npm, name, version) } + +func sortExternalReferences(refs []types.ExternalRef) { + sort.Slice(refs, func(i, j int) bool { + if refs[i].Type != refs[j].Type { + return refs[i].Type < refs[j].Type + } + return refs[i].URL < refs[j].URL + }) +} diff --git a/pkg/dependency/parser/nodejs/npm/parse_test.go b/pkg/dependency/parser/nodejs/npm/parse_test.go index c67055a71628..786fe643dfde 100644 --- a/pkg/dependency/parser/nodejs/npm/parse_test.go +++ b/pkg/dependency/parser/nodejs/npm/parse_test.go @@ -41,6 +41,12 @@ func TestParse(t *testing.T) { want: npmV3WithWorkspaceLibs, wantDeps: npmV3WithWorkspaceDeps, }, + { + name: "lock file v3 contains same dev and non-dev dependencies", + file: "testdata/package-lock_v3_with-same-dev-and-non-dev.json", + want: npmV3WithSameDevAndNonDevLibs, + wantDeps: npmV3WithSameDevAndNonDevDeps, + }, { name: "lock version v3 with workspace and without direct deps field", file: "testdata/package-lock_v3_without_root_deps_field.json", diff --git a/pkg/dependency/parser/nodejs/npm/parse_testcase.go b/pkg/dependency/parser/nodejs/npm/parse_testcase.go index e68addd15219..29b9e63d8f1c 100644 --- a/pkg/dependency/parser/nodejs/npm/parse_testcase.go +++ b/pkg/dependency/parser/nodejs/npm/parse_testcase.go @@ -1516,4 +1516,89 @@ var ( DependsOn: []string{"debug@2.6.9"}, }, } + + npmV3WithSameDevAndNonDevLibs = []types.Library{ + { + ID: "fsevents@1.2.9", + Name: "fsevents", + Version: "1.2.9", + Dev: true, + ExternalReferences: []types.ExternalRef{ + { + Type: types.RefOther, + URL: "https://registry.npmjs.org/fsevents/-/fsevents-1.2.9.tgz", + }, + }, + Locations: []types.Location{ + { + StartLine: 18, + EndLine: 37, + }, + }, + }, + { + ID: "minimist@0.0.8", + Name: "minimist", + Version: "0.0.8", + Indirect: false, + Dev: false, + ExternalReferences: []types.ExternalRef{ + { + Type: types.RefOther, + URL: "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + }, + }, + Locations: []types.Location{ + { + StartLine: 38, + EndLine: 43, + }, + { + StartLine: 68, + EndLine: 72, + }, + }, + }, + { + ID: "mkdirp@0.5.1", + Name: "mkdirp", + Version: "0.5.1", + Indirect: true, + Dev: true, + Locations: []types.Location{ + { + StartLine: 44, + EndLine: 55, + }, + }, + }, + { + ID: "node-pre-gyp@0.12.0", + Name: "node-pre-gyp", + Version: "0.12.0", + Indirect: true, + Dev: true, + Locations: []types.Location{ + { + StartLine: 56, + EndLine: 67, + }, + }, + }, + } + + npmV3WithSameDevAndNonDevDeps = []types.Dependency{ + { + ID: "fsevents@1.2.9", + DependsOn: []string{"node-pre-gyp@0.12.0"}, + }, + { + ID: "mkdirp@0.5.1", + DependsOn: []string{"minimist@0.0.8"}, + }, + { + ID: "node-pre-gyp@0.12.0", + DependsOn: []string{"mkdirp@0.5.1"}, + }, + } ) diff --git a/pkg/dependency/parser/nodejs/npm/testdata/package-lock_v3_with-same-dev-and-non-dev.json b/pkg/dependency/parser/nodejs/npm/testdata/package-lock_v3_with-same-dev-and-non-dev.json new file mode 100644 index 000000000000..4fe518b82bcd --- /dev/null +++ b/pkg/dependency/parser/nodejs/npm/testdata/package-lock_v3_with-same-dev-and-non-dev.json @@ -0,0 +1,74 @@ +{ + "name": "5139", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "5139", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "minimist": "^0.0.8" + }, + "devDependencies": { + "fsevents": "^1.2.9" + } + }, + "node_modules/fsevents": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.9.tgz", + "integrity": "sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw==", + "bundleDependencies": [ + "node-pre-gyp" + ], + "deprecated": "The v1 package contains DANGEROUS / INSECURE binaries. Upgrade to safe fsevents v2", + "dev": true, + "hasInstallScript": true, + "os": [ + "darwin" + ], + "dependencies": { + "node-pre-gyp": "^0.12.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/fsevents/node_modules/minimist": { + "version": "0.0.8", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/fsevents/node_modules/mkdirp": { + "version": "0.5.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "minimist": "0.0.8" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/fsevents/node_modules/node-pre-gyp": { + "version": "0.12.0", + "dev": true, + "inBundle": true, + "license": "BSD-3-Clause", + "dependencies": { + "mkdirp": "^0.5.1" + }, + "bin": { + "node-pre-gyp": "bin/node-pre-gyp" + } + }, + "node_modules/minimist": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha512-miQKw5Hv4NS1Psg2517mV4e4dYNaO3++hjAvLOAzKqZ61rH8NS1SK+vbfBWZ5PY/Me/bEWhUwqMghEW5Fb9T7Q==" + } + } +} From 5f69937cc6986912925a8a1b0801810ea850ba79 Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Wed, 27 Mar 2024 13:07:12 +0600 Subject: [PATCH 21/57] fix(sbom): fix error when parent of SPDX Relationships is not a package. (#6399) --- pkg/sbom/core/bom.go | 10 +++- .../with-file-as-relationship-parent.json | 54 +++++++++++++++++++ pkg/sbom/spdx/unmarshal.go | 12 ++++- pkg/sbom/spdx/unmarshal_test.go | 5 ++ 4 files changed, 78 insertions(+), 3 deletions(-) create mode 100644 pkg/sbom/spdx/testdata/happy/with-file-as-relationship-parent.json diff --git a/pkg/sbom/core/bom.go b/pkg/sbom/core/bom.go index 54755a81e6c8..893796a0fe0d 100644 --- a/pkg/sbom/core/bom.go +++ b/pkg/sbom/core/bom.go @@ -238,12 +238,20 @@ func (b *BOM) AddComponent(c *Component) { } func (b *BOM) AddRelationship(parent, child *Component, relationshipType RelationshipType) { + // Check the wrong parent to avoid `panic` + if parent == nil { + return + } if parent.id == uuid.Nil { b.AddComponent(parent) } if child == nil { - b.relationships[parent.id] = nil // Meaning no dependencies + // It is possible that `relationships` already contains this parent. + // Check this to avoid overwriting. + if _, ok := b.relationships[parent.id]; !ok { + b.relationships[parent.id] = nil // Meaning no dependencies + } return } diff --git a/pkg/sbom/spdx/testdata/happy/with-file-as-relationship-parent.json b/pkg/sbom/spdx/testdata/happy/with-file-as-relationship-parent.json new file mode 100644 index 000000000000..798e75d0a52a --- /dev/null +++ b/pkg/sbom/spdx/testdata/happy/with-file-as-relationship-parent.json @@ -0,0 +1,54 @@ +{ + "files": [ + { + "fileName": "./Modules/Microsoft.PowerShell.PSResourceGet/_manifest/spdx_2.2/manifest.spdx.json", + "SPDXID": "SPDXRef-File--Modules-Microsoft.PowerShell.PSResourceGet--manifest-spdx-2.2-manifest.spdx.json-2B9FB98F5CA97DC84FD382A8F8E68F663C003362", + "checksums": [ + { + "algorithm": "SHA256", + "checksumValue": "4201b0989938842ef8c11a006184e0b1466bd7f9bb2af61d89a4c8318d43466e" + }, + { + "algorithm": "SHA1", + "checksumValue": "2b9fb98f5ca97dc84fd382a8f8e68f663c003362" + } + ], + "licenseConcluded": "NOASSERTION", + "licenseInfoInFiles": [ + "NOASSERTION" + ], + "copyrightText": "NOASSERTION", + "fileTypes": [ + "SPDX" + ] + } + ], + "externalDocumentRefs": [], + "relationships": [ + { + "relationshipType": "DESCRIBES", + "relatedSpdxElement": "SPDXRef-RootPackage", + "spdxElementId": "SPDXRef-DOCUMENT" + }, + { + "relationshipType": "DESCRIBED_BY", + "relatedSpdxElement": "SPDXRef-DOCUMENT", + "spdxElementId": "SPDXRef-File--Modules-Microsoft.PowerShell.PSResourceGet--manifest-spdx-2.2-manifest.spdx.json-2B9FB98F5CA97DC84FD382A8F8E68F663C003362" + } + ], + "spdxVersion": "SPDX-2.2", + "dataLicense": "CC0-1.0", + "SPDXID": "SPDXRef-DOCUMENT", + "name": "PowerShell Linux Arm32 7.5.0-preview.2", + "documentNamespace": "https://sbom.microsoft/1:2QSF7qZlbE-F7QrUJlEo7g:pHp_nUFvDUijZ4LrJ4RhoQ/696:458654/PowerShell%20Linux%20Arm32:7.5.0-preview.2:pDkyTHXmgUOdzSXIq9CiqA", + "creationInfo": { + "created": "2024-02-22T00:43:53Z", + "creators": [ + "Organization: Microsoft", + "Tool: Microsoft.SBOMTool-2.2.3" + ] + }, + "documentDescribes": [ + "SPDXRef-RootPackage" + ] +} \ No newline at end of file diff --git a/pkg/sbom/spdx/unmarshal.go b/pkg/sbom/spdx/unmarshal.go index 14b34c1c0caf..bda18c16980a 100644 --- a/pkg/sbom/spdx/unmarshal.go +++ b/pkg/sbom/spdx/unmarshal.go @@ -87,8 +87,16 @@ func (s *SPDX) unmarshal(spdxDocument *spdx.Document) error { continue } - compA := components[rel.RefA.ElementRefID] - compB := components[rel.RefB.ElementRefID] + compA, ok := components[rel.RefA.ElementRefID] + if !ok { // Skip if parent is not Package + continue + } + + compB, ok := components[rel.RefB.ElementRefID] + if !ok { // Skip if child is not Package + continue + } + s.BOM.AddRelationship(compA, compB, s.parseRelationshipType(rel.Relationship)) } diff --git a/pkg/sbom/spdx/unmarshal_test.go b/pkg/sbom/spdx/unmarshal_test.go index f65294020728..73f7d2dc934f 100644 --- a/pkg/sbom/spdx/unmarshal_test.go +++ b/pkg/sbom/spdx/unmarshal_test.go @@ -314,6 +314,11 @@ func TestUnmarshaler_Unmarshal(t *testing.T) { }, }, }, + { + name: "happy path with file as parent of relationship", + inputFile: "testdata/happy/with-file-as-relationship-parent.json", + want: types.SBOM{}, + }, { name: "happy path only os component", inputFile: "testdata/happy/os-only-bom.json", From 6a2f6fde4f97f254eb4ef3b79cab99f574abf72a Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Fri, 29 Mar 2024 00:50:36 +0300 Subject: [PATCH 22/57] fix(cloudformation): infer type after resolving a function (#6406) --- .../scanners/cloudformation/cftypes/types.go | 24 +++++++++++++++++++ .../parser/fn_find_in_map_test.go | 23 ++++++++++++++++++ .../cloudformation/parser/intrinsics.go | 9 +++++-- .../cloudformation/parser/property.go | 8 +++++++ .../cloudformation/parser/resource.go | 7 ++---- .../scanners/cloudformation/parser/util.go | 4 +++- 6 files changed, 67 insertions(+), 8 deletions(-) diff --git a/pkg/iac/scanners/cloudformation/cftypes/types.go b/pkg/iac/scanners/cloudformation/cftypes/types.go index 44d9c1fd2a93..0dc3b8b586a2 100644 --- a/pkg/iac/scanners/cloudformation/cftypes/types.go +++ b/pkg/iac/scanners/cloudformation/cftypes/types.go @@ -1,5 +1,7 @@ package cftypes +import "reflect" + type CfType string const ( @@ -9,4 +11,26 @@ const ( Bool CfType = "bool" Map CfType = "map" List CfType = "list" + Unknown CfType = "unknown" ) + +func TypeFromGoValue(value interface{}) CfType { + switch reflect.TypeOf(value).Kind() { + case reflect.String: + return String + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return Int + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return Int + case reflect.Float32, reflect.Float64: + return Float64 + case reflect.Bool: + return Bool + case reflect.Map: + return Map + case reflect.Slice: + return List + default: + return Unknown + } +} diff --git a/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go index bbfa372b7121..6063c39fc006 100644 --- a/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go +++ b/pkg/iac/scanners/cloudformation/parser/fn_find_in_map_test.go @@ -98,3 +98,26 @@ Resources: nodeTypeProp := testRes.GetStringProperty("CacheNodeType", "") assert.Equal(t, "cache.t2.micro", nodeTypeProp.Value()) } + +func Test_InferType(t *testing.T) { + source := `--- +Mappings: + ApiDB: + MultiAZ: + development: False +Resources: + ApiDB: + Type: AWS::RDS::DBInstance + Properties: + MultiAZ: !FindInMap [ApiDB, MultiAZ, development] +` + + ctx := createTestFileContext(t, source) + require.NotNil(t, ctx) + + testRes := ctx.GetResourceByLogicalID("ApiDB") + require.NotNil(t, testRes) + + nodeTypeProp := testRes.GetBoolProperty("MultiAZ") + assert.False(t, nodeTypeProp.Value()) +} diff --git a/pkg/iac/scanners/cloudformation/parser/intrinsics.go b/pkg/iac/scanners/cloudformation/parser/intrinsics.go index d455fd3d5c6e..1dadc4f6d6fd 100644 --- a/pkg/iac/scanners/cloudformation/parser/intrinsics.go +++ b/pkg/iac/scanners/cloudformation/parser/intrinsics.go @@ -78,8 +78,13 @@ func ResolveIntrinsicFunc(property *Property) (*Property, bool) { for funcName := range property.AsMap() { if fn := intrinsicFuncs[funcName]; fn != nil { - // - return fn(property) + prop, resolved := fn(property) + if prop == nil || !resolved { + return prop, false + } + + prop.inferType() + return prop, true } } return property, false diff --git a/pkg/iac/scanners/cloudformation/parser/property.go b/pkg/iac/scanners/cloudformation/parser/property.go index 3cdbbb36b58a..e667c7844f44 100644 --- a/pkg/iac/scanners/cloudformation/parser/property.go +++ b/pkg/iac/scanners/cloudformation/parser/property.go @@ -425,3 +425,11 @@ func convert(input interface{}) interface{} { } return input } + +func (p *Property) inferType() { + typ := cftypes.TypeFromGoValue(p.Inner.Value) + if typ == cftypes.Unknown { + return + } + p.Inner.Type = typ +} diff --git a/pkg/iac/scanners/cloudformation/parser/resource.go b/pkg/iac/scanners/cloudformation/parser/resource.go index 69a864ad7cdf..bd1351f234df 100644 --- a/pkg/iac/scanners/cloudformation/parser/resource.go +++ b/pkg/iac/scanners/cloudformation/parser/resource.go @@ -100,11 +100,8 @@ func (r *Resource) GetProperty(path string) *Property { first := pathParts[0] property := &Property{} - for n, p := range r.properties() { - if n == first { - property = p - break - } + if p, exists := r.properties()[first]; exists { + property = p } if len(pathParts) == 1 || property.IsNil() { diff --git a/pkg/iac/scanners/cloudformation/parser/util.go b/pkg/iac/scanners/cloudformation/parser/util.go index a0792cf32865..03b9bf8da837 100644 --- a/pkg/iac/scanners/cloudformation/parser/util.go +++ b/pkg/iac/scanners/cloudformation/parser/util.go @@ -66,13 +66,15 @@ func setPropertyValueFromYaml(node *yaml.Node, propertyData *PropertyInner) erro if node.Content == nil { switch node.Tag { - case "!!int": propertyData.Type = cftypes.Int propertyData.Value, _ = strconv.Atoi(node.Value) case "!!bool": propertyData.Type = cftypes.Bool propertyData.Value, _ = strconv.ParseBool(node.Value) + case "!!float": + propertyData.Type = cftypes.Float64 + propertyData.Value, _ = strconv.ParseFloat(node.Value, 64) case "!!str", "!!string": propertyData.Type = cftypes.String propertyData.Value = node.Value From 625f22b81956aa2dde7c2aa6669b5beba895b830 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Fri, 29 Mar 2024 04:08:06 +0300 Subject: [PATCH 23/57] test(cloudformation): add CF tests (#6315) --- .../aws/accessanalyzer/accessanalyzer_test.go | 54 +++++ .../aws/apigateway/apigateway_test.go | 84 +++++++ .../cloudformation/aws/apigateway/stage.go | 12 +- .../cloudformation/aws/athena/athena_test.go | 61 +++++ .../aws/cloudfront/cloudfront_test.go | 68 ++++++ .../aws/cloudfront/distribution.go | 22 +- .../aws/cloudtrail/cloudtrail_test.go | 64 ++++++ .../aws/cloudwatch/cloudwatch.go | 1 - .../aws/cloudwatch/cloudwatch_test.go | 57 +++++ .../aws/cloudwatch/log_group.go | 5 +- .../aws/codebuild/codebuild_test.go | 68 ++++++ .../cloudformation/aws/codebuild/project.go | 8 +- .../cloudformation/aws/config/adapt_test.go | 21 +- .../cloudformation/aws/config/aggregator.go | 6 +- .../cloudformation/aws/documentdb/cluster.go | 10 +- .../aws/documentdb/documentdb_test.go | 79 +++++++ .../aws/dynamodb/dynamodb_test.go | 55 +++++ .../cloudformation/aws/ec2/adapt_test.go | 184 +++++++++++---- .../cloudformation/aws/ec2/instance.go | 10 +- .../aws/ec2/launch_configuration.go | 4 +- .../cloudformation/aws/ec2/launch_template.go | 6 +- .../cloudformation/aws/ec2/security_group.go | 8 +- .../cloudformation/aws/ecr/ecr_test.go | 102 +++++++++ .../cloudformation/aws/ecr/repository.go | 13 +- .../cloudformation/aws/ecs/cluster.go | 8 +- .../cloudformation/aws/ecs/ecs_test.go | 108 +++++++++ .../cloudformation/aws/ecs/task_definition.go | 26 +-- .../cloudformation/aws/efs/efs_test.go | 52 +++++ .../cloudformation/aws/eks/cluster.go | 10 +- .../cloudformation/aws/eks/eks_test.go | 45 ++++ .../aws/elasticache/elasticache_test.go | 82 +++++++ .../aws/elasticsearch/domain.go | 20 +- .../aws/elasticsearch/elasticsearch_test.go | 88 ++++++++ .../cloudformation/aws/elb/adapt_test.go | 47 ++-- .../cloudformation/aws/elb/loadbalancer.go | 14 +- .../cloudformation/aws/iam/iam_test.go | 189 ++++++++++++++++ .../adapters/cloudformation/aws/iam/policy.go | 17 +- .../aws/kinesis/kinesis_test.go | 57 +++++ .../cloudformation/aws/kinesis/stream.go | 6 - .../cloudformation/aws/lambda/function.go | 15 +- .../cloudformation/aws/lambda/lambda_test.go | 76 +++++++ .../adapters/cloudformation/aws/mq/mq_test.go | 59 +++++ .../cloudformation/aws/msk/msk_test.go | 87 +++++++ .../cloudformation/aws/neptune/cluster.go | 6 +- .../aws/neptune/neptune_test.go | 59 +++++ .../cloudformation/aws/rds/adapt_test.go | 124 +++++----- .../cloudformation/aws/rds/instance.go | 19 +- .../aws/rds/parameter_groups.go | 8 +- .../cloudformation/aws/redshift/cluster.go | 13 +- .../aws/redshift/redshift_test.go | 111 +++++++++ .../adapters/cloudformation/aws/s3/bucket.go | 13 +- .../adapters/cloudformation/aws/s3/s3_test.go | 60 +++-- .../adapters/cloudformation/aws/sam/api.go | 22 +- .../cloudformation/aws/sam/function.go | 8 +- .../cloudformation/aws/sam/http_api.go | 11 +- .../cloudformation/aws/sam/sam_test.go | 213 ++++++++++++++++++ .../cloudformation/aws/sam/state_machines.go | 9 +- .../adapters/cloudformation/aws/sam/tables.go | 23 +- .../cloudformation/aws/sns/sns_test.go | 54 +++++ .../adapters/cloudformation/aws/sqs/queue.go | 1 - .../cloudformation/aws/sqs/sqs_test.go | 86 +++++++ .../cloudformation/aws/ssm/ssm_test.go | 53 +++++ .../aws/workspaces/workspaces_test.go | 62 +++++ .../cloudformation/testutil/testutil.go | 25 ++ pkg/iac/providers/aws/ecs/ecs.go | 8 +- pkg/iac/types/bool.go | 4 + pkg/iac/types/int.go | 4 + pkg/iac/types/string.go | 5 + 68 files changed, 2660 insertions(+), 349 deletions(-) create mode 100644 pkg/iac/adapters/cloudformation/aws/accessanalyzer/accessanalyzer_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/apigateway/apigateway_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/athena/athena_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/cloudfront/cloudfront_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/cloudtrail/cloudtrail_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/codebuild/codebuild_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/documentdb/documentdb_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/dynamodb/dynamodb_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/ecr/ecr_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/ecs/ecs_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/efs/efs_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/eks/eks_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/elasticache/elasticache_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/iam/iam_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/kinesis/kinesis_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/lambda/lambda_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/mq/mq_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/msk/msk_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/neptune/neptune_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/redshift/redshift_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/sam/sam_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/sns/sns_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/sqs/sqs_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/ssm/ssm_test.go create mode 100644 pkg/iac/adapters/cloudformation/aws/workspaces/workspaces_test.go create mode 100644 pkg/iac/adapters/cloudformation/testutil/testutil.go diff --git a/pkg/iac/adapters/cloudformation/aws/accessanalyzer/accessanalyzer_test.go b/pkg/iac/adapters/cloudformation/aws/accessanalyzer/accessanalyzer_test.go new file mode 100644 index 000000000000..04e67c2b6818 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/accessanalyzer/accessanalyzer_test.go @@ -0,0 +1,54 @@ +package accessanalyzer + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/accessanalyzer" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected accessanalyzer.AccessAnalyzer + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Analyzer: + Type: 'AWS::AccessAnalyzer::Analyzer' + Properties: + AnalyzerName: MyAccountAnalyzer +`, + expected: accessanalyzer.AccessAnalyzer{ + Analyzers: []accessanalyzer.Analyzer{ + { + Name: types.StringTest("MyAccountAnalyzer"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Analyzer: + Type: 'AWS::AccessAnalyzer::Analyzer' +`, + expected: accessanalyzer.AccessAnalyzer{ + Analyzers: []accessanalyzer.Analyzer{ + {}, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/apigateway/apigateway_test.go b/pkg/iac/adapters/cloudformation/aws/apigateway/apigateway_test.go new file mode 100644 index 000000000000..8f9e55ef8abd --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/apigateway/apigateway_test.go @@ -0,0 +1,84 @@ +package apigateway + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/apigateway" + v2 "github.com/aquasecurity/trivy/pkg/iac/providers/aws/apigateway/v2" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected apigateway.APIGateway + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyApi: + Type: 'AWS::ApiGatewayV2::Api' + Properties: + Name: MyApi + ProtocolType: WEBSOCKET + MyStage: + Type: 'AWS::ApiGatewayV2::Stage' + Properties: + StageName: Prod + ApiId: !Ref MyApi + AccessLogSettings: + DestinationArn: some-arn +`, + expected: apigateway.APIGateway{ + V2: v2.APIGateway{ + APIs: []v2.API{ + { + Name: types.StringTest("MyApi"), + ProtocolType: types.StringTest("WEBSOCKET"), + Stages: []v2.Stage{ + { + Name: types.StringTest("Prod"), + AccessLogging: v2.AccessLogging{ + CloudwatchLogGroupARN: types.StringTest("some-arn"), + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyApi: + Type: 'AWS::ApiGatewayV2::Api' + MyStage: + Type: 'AWS::ApiGatewayV2::Stage' + MyStage2: + Type: 'AWS::ApiGatewayV2::Stage' + Properties: + ApiId: !Ref MyApi +`, + expected: apigateway.APIGateway{ + V2: v2.APIGateway{ + APIs: []v2.API{ + { + Stages: []v2.Stage{{}}, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/apigateway/stage.go b/pkg/iac/adapters/cloudformation/aws/apigateway/stage.go index c79f89fda5ea..8e9497a91ec3 100644 --- a/pkg/iac/adapters/cloudformation/aws/apigateway/stage.go +++ b/pkg/iac/adapters/cloudformation/aws/apigateway/stage.go @@ -2,18 +2,18 @@ package apigateway import ( v2 "github.com/aquasecurity/trivy/pkg/iac/providers/aws/apigateway/v2" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getApis(cfFile parser2.FileContext) (apis []v2.API) { +func getApis(cfFile parser.FileContext) (apis []v2.API) { apiResources := cfFile.GetResourcesByType("AWS::ApiGatewayV2::Api") for _, apiRes := range apiResources { api := v2.API{ Metadata: apiRes.Metadata(), - Name: types.StringDefault("", apiRes.Metadata()), - ProtocolType: types.StringDefault("", apiRes.Metadata()), + Name: apiRes.GetStringProperty("Name"), + ProtocolType: apiRes.GetStringProperty("ProtocolType"), Stages: getStages(apiRes.ID(), cfFile), } apis = append(apis, api) @@ -22,7 +22,7 @@ func getApis(cfFile parser2.FileContext) (apis []v2.API) { return apis } -func getStages(apiId string, cfFile parser2.FileContext) []v2.Stage { +func getStages(apiId string, cfFile parser.FileContext) []v2.Stage { var apiStages []v2.Stage stageResources := cfFile.GetResourcesByType("AWS::ApiGatewayV2::Stage") @@ -43,7 +43,7 @@ func getStages(apiId string, cfFile parser2.FileContext) []v2.Stage { return apiStages } -func getAccessLogging(r *parser2.Resource) v2.AccessLogging { +func getAccessLogging(r *parser.Resource) v2.AccessLogging { loggingProp := r.GetProperty("AccessLogSettings") if loggingProp.IsNil() { diff --git a/pkg/iac/adapters/cloudformation/aws/athena/athena_test.go b/pkg/iac/adapters/cloudformation/aws/athena/athena_test.go new file mode 100644 index 000000000000..097de6fa303d --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/athena/athena_test.go @@ -0,0 +1,61 @@ +package athena + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/athena" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected athena.Athena + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyAthenaWorkGroup: + Type: AWS::Athena::WorkGroup + Properties: + Name: MyCustomWorkGroup + WorkGroupConfiguration: + EnforceWorkGroupConfiguration: true + ResultConfiguration: + EncryptionOption: SSE_KMS +`, + expected: athena.Athena{ + Workgroups: []athena.Workgroup{ + { + Name: types.StringTest("MyCustomWorkGroup"), + EnforceConfiguration: types.BoolTest(true), + Encryption: athena.EncryptionConfiguration{ + Type: types.StringTest("SSE_KMS"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyAthenaWorkGroup: + Type: AWS::Athena::WorkGroup +`, + expected: athena.Athena{ + Workgroups: []athena.Workgroup{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } + +} diff --git a/pkg/iac/adapters/cloudformation/aws/cloudfront/cloudfront_test.go b/pkg/iac/adapters/cloudformation/aws/cloudfront/cloudfront_test.go new file mode 100644 index 000000000000..6c0ec7348b33 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/cloudfront/cloudfront_test.go @@ -0,0 +1,68 @@ +package cloudfront + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudfront" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected cloudfront.Cloudfront + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + cloudfrontdistribution: + Type: AWS::CloudFront::Distribution + Properties: + DistributionConfig: + WebACLId: "a1b2c3d4-5678-90ab-cdef-EXAMPLE11111" + Logging: + Bucket: "myawslogbucket.s3.amazonaws.com" + ViewerCertificate: + MinimumProtocolVersion: SSLv3 + DefaultCacheBehavior: + ViewerProtocolPolicy: "redirect-to-https" +`, + expected: cloudfront.Cloudfront{ + Distributions: []cloudfront.Distribution{ + { + WAFID: types.StringTest("a1b2c3d4-5678-90ab-cdef-EXAMPLE11111"), + Logging: cloudfront.Logging{ + Bucket: types.StringTest("myawslogbucket.s3.amazonaws.com"), + }, + ViewerCertificate: cloudfront.ViewerCertificate{ + MinimumProtocolVersion: types.StringTest("SSLv3"), + }, + DefaultCacheBehaviour: cloudfront.CacheBehaviour{ + ViewerProtocolPolicy: types.StringTest("redirect-to-https"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + cloudfrontdistribution: + Type: AWS::CloudFront::Distribution +`, + expected: cloudfront.Cloudfront{ + Distributions: []cloudfront.Distribution{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/cloudfront/distribution.go b/pkg/iac/adapters/cloudformation/aws/cloudfront/distribution.go index 0364dc82d052..70c5052bcd55 100644 --- a/pkg/iac/adapters/cloudformation/aws/cloudfront/distribution.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudfront/distribution.go @@ -2,11 +2,10 @@ package cloudfront import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudfront" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getDistributions(ctx parser2.FileContext) (distributions []cloudfront.Distribution) { +func getDistributions(ctx parser.FileContext) (distributions []cloudfront.Distribution) { distributionResources := ctx.GetResourcesByType("AWS::CloudFront::Distribution") @@ -32,24 +31,15 @@ func getDistributions(ctx parser2.FileContext) (distributions []cloudfront.Distr return distributions } -func getDefaultCacheBehaviour(r *parser2.Resource) cloudfront.CacheBehaviour { +func getDefaultCacheBehaviour(r *parser.Resource) cloudfront.CacheBehaviour { defaultCache := r.GetProperty("DistributionConfig.DefaultCacheBehavior") if defaultCache.IsNil() { return cloudfront.CacheBehaviour{ - Metadata: r.Metadata(), - ViewerProtocolPolicy: types.StringDefault("allow-all", r.Metadata()), - } - } - protoProp := r.GetProperty("DistributionConfig.DefaultCacheBehavior.ViewerProtocolPolicy") - if protoProp.IsNotString() { - return cloudfront.CacheBehaviour{ - Metadata: r.Metadata(), - ViewerProtocolPolicy: types.StringDefault("allow-all", r.Metadata()), + Metadata: r.Metadata(), } } - return cloudfront.CacheBehaviour{ - Metadata: r.Metadata(), - ViewerProtocolPolicy: protoProp.AsStringValue(), + Metadata: defaultCache.Metadata(), + ViewerProtocolPolicy: defaultCache.GetStringProperty("ViewerProtocolPolicy"), } } diff --git a/pkg/iac/adapters/cloudformation/aws/cloudtrail/cloudtrail_test.go b/pkg/iac/adapters/cloudformation/aws/cloudtrail/cloudtrail_test.go new file mode 100644 index 000000000000..5dcebb291035 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/cloudtrail/cloudtrail_test.go @@ -0,0 +1,64 @@ +package cloudtrail + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudtrail" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected cloudtrail.CloudTrail + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Trail: + Type: AWS::CloudTrail::Trail + Properties: + S3BucketName: MyBucket + IsLogging: true + TrailName: MyTrail + EnableLogFileValidation: true + IsMultiRegionTrail: true + CloudWatchLogsLogGroupArn: cw-arn + KmsKeyId: my-kms-key +`, + expected: cloudtrail.CloudTrail{ + Trails: []cloudtrail.Trail{ + { + Name: types.StringTest("MyTrail"), + BucketName: types.StringTest("MyBucket"), + IsLogging: types.BoolTest(true), + IsMultiRegion: types.BoolTest(true), + EnableLogFileValidation: types.BoolTest(true), + CloudWatchLogsLogGroupArn: types.StringTest("cw-arn"), + KMSKeyID: types.StringTest("my-kms-key"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Trail: + Type: AWS::CloudTrail::Trail + `, + expected: cloudtrail.CloudTrail{ + Trails: []cloudtrail.Trail{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch.go b/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch.go index 1c6efa85a891..0c4a59e43189 100644 --- a/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch.go @@ -9,6 +9,5 @@ import ( func Adapt(cfFile parser.FileContext) cloudwatch.CloudWatch { return cloudwatch.CloudWatch{ LogGroups: getLogGroups(cfFile), - Alarms: nil, } } diff --git a/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch_test.go b/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch_test.go new file mode 100644 index 000000000000..c8a7bd95c9a3 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/cloudwatch/cloudwatch_test.go @@ -0,0 +1,57 @@ +package cloudwatch + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudwatch" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected cloudwatch.CloudWatch + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + myLogGroup: + Type: AWS::Logs::LogGroup + Properties: + LogGroupName: my-log-group + RetentionInDays: 7 + KmsKeyId: my-kms + +`, + expected: cloudwatch.CloudWatch{ + LogGroups: []cloudwatch.LogGroup{ + { + Name: types.StringTest("my-log-group"), + RetentionInDays: types.IntTest(7), + KMSKeyID: types.StringTest("my-kms"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + myLogGroup: + Type: AWS::Logs::LogGroup + `, + expected: cloudwatch.CloudWatch{ + LogGroups: []cloudwatch.LogGroup{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/cloudwatch/log_group.go b/pkg/iac/adapters/cloudformation/aws/cloudwatch/log_group.go index 81730f050ecf..09e039129781 100644 --- a/pkg/iac/adapters/cloudformation/aws/cloudwatch/log_group.go +++ b/pkg/iac/adapters/cloudformation/aws/cloudwatch/log_group.go @@ -3,7 +3,6 @@ package cloudwatch import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/cloudwatch" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/aquasecurity/trivy/pkg/iac/types" ) func getLogGroups(ctx parser.FileContext) (logGroups []cloudwatch.LogGroup) { @@ -13,11 +12,9 @@ func getLogGroups(ctx parser.FileContext) (logGroups []cloudwatch.LogGroup) { for _, r := range logGroupResources { group := cloudwatch.LogGroup{ Metadata: r.Metadata(), - Arn: types.StringDefault("", r.Metadata()), Name: r.GetStringProperty("LogGroupName"), KMSKeyID: r.GetStringProperty("KmsKeyId"), - RetentionInDays: r.GetIntProperty("RetentionInDays", 0), - MetricFilters: nil, + RetentionInDays: r.GetIntProperty("RetentionInDays"), } logGroups = append(logGroups, group) } diff --git a/pkg/iac/adapters/cloudformation/aws/codebuild/codebuild_test.go b/pkg/iac/adapters/cloudformation/aws/codebuild/codebuild_test.go new file mode 100644 index 000000000000..06eaa19402e6 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/codebuild/codebuild_test.go @@ -0,0 +1,68 @@ +package codebuild + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/codebuild" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected codebuild.CodeBuild + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Project: + Type: AWS::CodeBuild::Project + Properties: + Artifacts: + EncryptionDisabled: true + SecondaryArtifacts: + - EncryptionDisabled: true +`, + expected: codebuild.CodeBuild{ + Projects: []codebuild.Project{ + { + ArtifactSettings: codebuild.ArtifactSettings{ + EncryptionEnabled: types.BoolTest(false), + }, + SecondaryArtifactSettings: []codebuild.ArtifactSettings{ + { + EncryptionEnabled: types.BoolTest(false), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + Project: + Type: AWS::CodeBuild::Project + `, + expected: codebuild.CodeBuild{ + Projects: []codebuild.Project{ + { + ArtifactSettings: codebuild.ArtifactSettings{ + EncryptionEnabled: types.BoolTest(true), + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/codebuild/project.go b/pkg/iac/adapters/cloudformation/aws/codebuild/project.go index 9c0541831223..554fc8afecea 100644 --- a/pkg/iac/adapters/cloudformation/aws/codebuild/project.go +++ b/pkg/iac/adapters/cloudformation/aws/codebuild/project.go @@ -2,11 +2,11 @@ package codebuild import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/codebuild" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getProjects(ctx parser2.FileContext) (projects []codebuild.Project) { +func getProjects(ctx parser.FileContext) (projects []codebuild.Project) { projectResources := ctx.GetResourcesByType("AWS::CodeBuild::Project") @@ -23,7 +23,7 @@ func getProjects(ctx parser2.FileContext) (projects []codebuild.Project) { return projects } -func getSecondaryArtifactSettings(r *parser2.Resource) (secondaryArtifacts []codebuild.ArtifactSettings) { +func getSecondaryArtifactSettings(r *parser.Resource) (secondaryArtifacts []codebuild.ArtifactSettings) { secondaryArtifactsList := r.GetProperty("SecondaryArtifacts") if secondaryArtifactsList.IsNil() || !secondaryArtifactsList.IsList() { return @@ -44,7 +44,7 @@ func getSecondaryArtifactSettings(r *parser2.Resource) (secondaryArtifacts []cod return secondaryArtifacts } -func getArtifactSettings(r *parser2.Resource) codebuild.ArtifactSettings { +func getArtifactSettings(r *parser.Resource) codebuild.ArtifactSettings { settings := codebuild.ArtifactSettings{ Metadata: r.Metadata(), diff --git a/pkg/iac/adapters/cloudformation/aws/config/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/config/adapt_test.go index 1a8f30e018f6..e6dc652da7b1 100644 --- a/pkg/iac/adapters/cloudformation/aws/config/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/config/adapt_test.go @@ -1,14 +1,11 @@ package config import ( - "context" "testing" - "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/config" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/stretchr/testify/require" ) func TestAdapt(t *testing.T) { @@ -29,8 +26,7 @@ Resources: `, expected: config.Config{ ConfigurationAggregrator: config.ConfigurationAggregrator{ - Metadata: types.NewTestMetadata(), - SourceAllRegions: types.Bool(true, types.NewTestMetadata()), + SourceAllRegions: types.BoolTest(true), }, }, }, @@ -46,8 +42,7 @@ Resources: `, expected: config.Config{ ConfigurationAggregrator: config.ConfigurationAggregrator{ - Metadata: types.NewTestMetadata(), - SourceAllRegions: types.Bool(true, types.NewTestMetadata()), + SourceAllRegions: types.BoolTest(true), }, }, }, @@ -55,15 +50,7 @@ Resources: for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fs := testutil.CreateFS(t, map[string]string{ - "template.yaml": tt.source, - }) - - p := parser.New() - fctx, err := p.ParseFile(context.TODO(), fs, "template.yaml") - require.NoError(t, err) - - testutil.AssertDefsecEqual(t, tt.expected, Adapt(*fctx)) + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) }) } diff --git a/pkg/iac/adapters/cloudformation/aws/config/aggregator.go b/pkg/iac/adapters/cloudformation/aws/config/aggregator.go index 1f34c21591b0..72447398b80f 100644 --- a/pkg/iac/adapters/cloudformation/aws/config/aggregator.go +++ b/pkg/iac/adapters/cloudformation/aws/config/aggregator.go @@ -2,11 +2,11 @@ package config import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/config" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getConfigurationAggregator(ctx parser2.FileContext) config.ConfigurationAggregrator { +func getConfigurationAggregator(ctx parser.FileContext) config.ConfigurationAggregrator { aggregator := config.ConfigurationAggregrator{ Metadata: iacTypes.NewUnmanagedMetadata(), @@ -25,7 +25,7 @@ func getConfigurationAggregator(ctx parser2.FileContext) config.ConfigurationAgg } } -func isSourcingAllRegions(r *parser2.Resource) iacTypes.BoolValue { +func isSourcingAllRegions(r *parser.Resource) iacTypes.BoolValue { accountProp := r.GetProperty("AccountAggregationSources") if accountProp.IsNotNil() && accountProp.IsList() { diff --git a/pkg/iac/adapters/cloudformation/aws/documentdb/cluster.go b/pkg/iac/adapters/cloudformation/aws/documentdb/cluster.go index 568fcfb44f72..f37467dc4100 100644 --- a/pkg/iac/adapters/cloudformation/aws/documentdb/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/documentdb/cluster.go @@ -2,11 +2,11 @@ package documentdb import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/documentdb" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getClusters(ctx parser2.FileContext) (clusters []documentdb.Cluster) { +func getClusters(ctx parser.FileContext) (clusters []documentdb.Cluster) { clusterResources := ctx.GetResourcesByType("AWS::DocDB::DBCluster") @@ -28,13 +28,13 @@ func getClusters(ctx parser2.FileContext) (clusters []documentdb.Cluster) { return clusters } -func updateInstancesOnCluster(cluster *documentdb.Cluster, ctx parser2.FileContext) { +func updateInstancesOnCluster(cluster *documentdb.Cluster, ctx parser.FileContext) { instanceResources := ctx.GetResourcesByType("AWS::DocDB::DBInstance") for _, r := range instanceResources { clusterIdentifier := r.GetStringProperty("DBClusterIdentifier") - if clusterIdentifier == cluster.Identifier { + if cluster.Identifier.EqualTo(clusterIdentifier.Value()) { cluster.Instances = append(cluster.Instances, documentdb.Instance{ Metadata: r.Metadata(), KMSKeyID: cluster.KMSKeyID, @@ -43,7 +43,7 @@ func updateInstancesOnCluster(cluster *documentdb.Cluster, ctx parser2.FileConte } } -func getLogExports(r *parser2.Resource) (logExports []types.StringValue) { +func getLogExports(r *parser.Resource) (logExports []types.StringValue) { exportsList := r.GetProperty("EnableCloudwatchLogsExports") diff --git a/pkg/iac/adapters/cloudformation/aws/documentdb/documentdb_test.go b/pkg/iac/adapters/cloudformation/aws/documentdb/documentdb_test.go new file mode 100644 index 000000000000..3e60155e9dfb --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/documentdb/documentdb_test.go @@ -0,0 +1,79 @@ +package documentdb + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/documentdb" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected documentdb.DocumentDB + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + myDBCluster: + Type: 'AWS::DocDB::DBCluster' + Properties: + BackupRetentionPeriod: 8 + DBClusterIdentifier: sample-cluster + KmsKeyId: your-kms-key-id + StorageEncrypted: true + EnableCloudwatchLogsExports: + - audit + - general + myDBInstance: + Type: 'AWS::DocDB::DBInstance' + Properties: + DBClusterIdentifier: sample-cluster + KmsKeyId: your-kms-key-id +`, + expected: documentdb.DocumentDB{ + Clusters: []documentdb.Cluster{ + { + Identifier: types.StringTest("sample-cluster"), + BackupRetentionPeriod: types.IntTest(8), + KMSKeyID: types.StringTest("your-kms-key-id"), + StorageEncrypted: types.BoolTest(true), + EnabledLogExports: []types.StringValue{ + types.StringTest("audit"), + types.StringTest("general"), + }, + Instances: []documentdb.Instance{ + { + KMSKeyID: types.StringTest("your-kms-key-id"), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + myDBCluster: + Type: 'AWS::DocDB::DBCluster' + `, + expected: documentdb.DocumentDB{ + Clusters: []documentdb.Cluster{ + { + BackupRetentionPeriod: types.IntTest(1), + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/dynamodb/dynamodb_test.go b/pkg/iac/adapters/cloudformation/aws/dynamodb/dynamodb_test.go new file mode 100644 index 000000000000..ce62e85cde5e --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/dynamodb/dynamodb_test.go @@ -0,0 +1,55 @@ +package dynamodb + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/dynamodb" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected dynamodb.DynamoDB + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + daxCluster: + Type: AWS::DAX::Cluster + Properties: + SSESpecification: + SSEEnabled: true +`, + expected: dynamodb.DynamoDB{ + DAXClusters: []dynamodb.DAXCluster{ + { + ServerSideEncryption: dynamodb.ServerSideEncryption{ + Enabled: types.BoolTest(true), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + daxCluster: + Type: AWS::DAX::Cluster + `, + expected: dynamodb.DynamoDB{ + DAXClusters: []dynamodb.DAXCluster{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/ec2/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/ec2/adapt_test.go index 7e7ece3df765..ac05f8f7b263 100644 --- a/pkg/iac/adapters/cloudformation/aws/ec2/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/adapt_test.go @@ -1,14 +1,11 @@ package ec2 import ( - "context" "testing" - "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ec2" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/stretchr/testify/require" ) func TestAdapt(t *testing.T) { @@ -18,7 +15,7 @@ func TestAdapt(t *testing.T) { expected ec2.EC2 }{ { - name: "ec2 instance", + name: "complete", source: `AWSTemplateFormatVersion: 2010-09-09 Resources: MyEC2Instance: @@ -36,27 +33,155 @@ Resources: Encrypted: true - DeviceName: "/dev/sdk" NoDevice: {} + NewVolume: + Type: AWS::EC2::Volume + Properties: + KmsKeyId: alias/my_cmk + Encrypted: true + mySubnet: + Type: AWS::EC2::Subnet + Properties: + MapPublicIpOnLaunch: true + InstanceSecurityGroup: + Type: AWS::EC2::SecurityGroup + Properties: + GroupName: default + GroupDescription: Allow http to client host + VpcId: vpc-id + SecurityGroupIngress: + - IpProtocol: tcp + Description: ingress + FromPort: 80 + ToPort: 80 + CidrIp: 0.0.0.0/0 + SecurityGroupEgress: + - IpProtocol: tcp + Description: egress + FromPort: 80 + ToPort: 80 + CidrIp: 0.0.0.0/0 + myNetworkAcl: + Type: AWS::EC2::NetworkAcl + Properties: + VpcId: vpc-1122334455aabbccd + InboundRule: + Type: AWS::EC2::NetworkAclEntry + Properties: + NetworkAclId: + Ref: myNetworkAcl + Egress: true + Protocol: 6 + RuleAction: allow + CidrBlock: 172.16.0.0/24 + myLaunchConfig: + Type: AWS::AutoScaling::LaunchConfiguration + Properties: + LaunchConfigurationName: test-cfg + InstanceId: !Ref MyEC2Instance + AssociatePublicIpAddress: true + SecurityGroups: + - !Ref InstanceSecurityGroup + UserData: test + BlockDeviceMappings: + - DeviceName: /dev/sda1 + Ebs: + VolumeSize: '30' + VolumeType: gp3 + Encrypted: true + - DeviceName: /dev/sdm + Ebs: + VolumeSize: '100' + DeleteOnTermination: false + MetadataOptions: + HttpTokens: required + HttpEndpoint: disabled `, expected: ec2.EC2{ Instances: []ec2.Instance{ { - Metadata: types.NewTestMetadata(), MetadataOptions: ec2.MetadataOptions{ HttpEndpoint: types.StringDefault("enabled", types.NewTestMetadata()), HttpTokens: types.StringDefault("optional", types.NewTestMetadata()), }, RootBlockDevice: &ec2.BlockDevice{ - Metadata: types.NewTestMetadata(), Encrypted: types.BoolDefault(true, types.NewTestMetadata()), }, EBSBlockDevices: []*ec2.BlockDevice{ { - Metadata: types.NewTestMetadata(), Encrypted: types.BoolDefault(false, types.NewTestMetadata()), }, }, }, }, + Volumes: []ec2.Volume{ + { + Encryption: ec2.Encryption{ + KMSKeyID: types.StringTest("alias/my_cmk"), + Enabled: types.BoolTest(true), + }, + }, + }, + Subnets: []ec2.Subnet{ + { + MapPublicIpOnLaunch: types.BoolTest(true), + }, + }, + SecurityGroups: []ec2.SecurityGroup{ + { + IsDefault: types.BoolTest(true), + Description: types.StringTest("Allow http to client host"), + VPCID: types.StringTest("vpc-id"), + IngressRules: []ec2.SecurityGroupRule{ + { + Description: types.StringTest("ingress"), + CIDRs: []types.StringValue{ + types.StringTest("0.0.0.0/0"), + }, + }, + }, + EgressRules: []ec2.SecurityGroupRule{ + { + Description: types.StringTest("egress"), + CIDRs: []types.StringValue{ + types.StringTest("0.0.0.0/0"), + }, + }, + }, + }, + }, + NetworkACLs: []ec2.NetworkACL{ + { + Rules: []ec2.NetworkACLRule{ + { + Type: types.StringTest(ec2.TypeEgress), + Action: types.StringTest(ec2.ActionAllow), + Protocol: types.StringTest("6"), + CIDRs: []types.StringValue{ + types.StringTest("172.16.0.0/24"), + }, + }, + }, + }, + }, + LaunchConfigurations: []ec2.LaunchConfiguration{ + { + Name: types.StringTest("test-cfg"), + AssociatePublicIP: types.BoolTest(true), + RootBlockDevice: &ec2.BlockDevice{ + Encrypted: types.BoolTest(true), + }, + EBSBlockDevices: []*ec2.BlockDevice{ + { + Encrypted: types.BoolTest(false), + }, + }, + UserData: types.StringTest("test"), + MetadataOptions: ec2.MetadataOptions{ + HttpTokens: types.StringTest("required"), + HttpEndpoint: types.StringTest("disabled"), + }, + }, + }, }, }, { @@ -81,27 +206,23 @@ Resources: expected: ec2.EC2{ LaunchTemplates: []ec2.LaunchTemplate{ { - Metadata: types.NewTestMetadata(), - Name: types.String("MyTemplate", types.NewTestMetadata()), + Name: types.StringTest("MyTemplate"), Instance: ec2.Instance{ - Metadata: types.NewTestMetadata(), MetadataOptions: ec2.MetadataOptions{ - HttpEndpoint: types.String("enabled", types.NewTestMetadata()), - HttpTokens: types.String("required", types.NewTestMetadata()), + HttpEndpoint: types.StringTest("enabled"), + HttpTokens: types.StringTest("required"), }, }, }, }, Instances: []ec2.Instance{ { - Metadata: types.NewTestMetadata(), MetadataOptions: ec2.MetadataOptions{ - HttpEndpoint: types.String("enabled", types.NewTestMetadata()), - HttpTokens: types.String("required", types.NewTestMetadata()), + HttpEndpoint: types.StringTest("enabled"), + HttpTokens: types.StringTest("required"), }, RootBlockDevice: &ec2.BlockDevice{ - Metadata: types.NewTestMetadata(), - Encrypted: types.Bool(false, types.NewTestMetadata()), + Encrypted: types.BoolTest(false), }, }, }, @@ -129,27 +250,23 @@ Resources: expected: ec2.EC2{ LaunchTemplates: []ec2.LaunchTemplate{ { - Metadata: types.NewTestMetadata(), - Name: types.String("MyTemplate", types.NewTestMetadata()), + Name: types.StringTest("MyTemplate"), Instance: ec2.Instance{ - Metadata: types.NewTestMetadata(), MetadataOptions: ec2.MetadataOptions{ - HttpEndpoint: types.String("enabled", types.NewTestMetadata()), - HttpTokens: types.String("required", types.NewTestMetadata()), + HttpEndpoint: types.StringTest("enabled"), + HttpTokens: types.StringTest("required"), }, }, }, }, Instances: []ec2.Instance{ { - Metadata: types.NewTestMetadata(), MetadataOptions: ec2.MetadataOptions{ - HttpEndpoint: types.String("enabled", types.NewTestMetadata()), - HttpTokens: types.String("required", types.NewTestMetadata()), + HttpEndpoint: types.StringTest("enabled"), + HttpTokens: types.StringTest("required"), }, RootBlockDevice: &ec2.BlockDevice{ - Metadata: types.NewTestMetadata(), - Encrypted: types.Bool(false, types.NewTestMetadata()), + Encrypted: types.BoolTest(false), }, }, }, @@ -159,16 +276,7 @@ Resources: for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - - fsys := testutil.CreateFS(t, map[string]string{ - "main.yaml": tt.source, - }) - - fctx, err := parser.New().ParseFile(context.TODO(), fsys, "main.yaml") - require.NoError(t, err) - - adapted := Adapt(*fctx) - testutil.AssertDefsecEqual(t, tt.expected, adapted) + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) }) } diff --git a/pkg/iac/adapters/cloudformation/aws/ec2/instance.go b/pkg/iac/adapters/cloudformation/aws/ec2/instance.go index 8a7952f9b809..7b6f149e0168 100644 --- a/pkg/iac/adapters/cloudformation/aws/ec2/instance.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/instance.go @@ -2,11 +2,11 @@ package ec2 import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ec2" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getInstances(ctx parser2.FileContext) (instances []ec2.Instance) { +func getInstances(ctx parser.FileContext) (instances []ec2.Instance) { instanceResources := ctx.GetResourcesByType("AWS::EC2::Instance") for _, r := range instanceResources { @@ -48,7 +48,7 @@ func getInstances(ctx parser2.FileContext) (instances []ec2.Instance) { return instances } -func findRelatedLaunchTemplate(fctx parser2.FileContext, r *parser2.Resource) (ec2.LaunchTemplate, bool) { +func findRelatedLaunchTemplate(fctx parser.FileContext, r *parser.Resource) (ec2.LaunchTemplate, bool) { launchTemplateRef := r.GetProperty("LaunchTemplate.LaunchTemplateName") if launchTemplateRef.IsString() { res := findLaunchTemplateByName(fctx, launchTemplateRef) @@ -69,7 +69,7 @@ func findRelatedLaunchTemplate(fctx parser2.FileContext, r *parser2.Resource) (e return adaptLaunchTemplate(resource), true } -func findLaunchTemplateByName(fctx parser2.FileContext, prop *parser2.Property) *parser2.Resource { +func findLaunchTemplateByName(fctx parser.FileContext, prop *parser.Property) *parser.Resource { for _, res := range fctx.GetResourcesByType("AWS::EC2::LaunchTemplate") { templateName := res.GetProperty("LaunchTemplateName") if templateName.IsNotString() { @@ -84,7 +84,7 @@ func findLaunchTemplateByName(fctx parser2.FileContext, prop *parser2.Property) return nil } -func getBlockDevices(r *parser2.Resource) []*ec2.BlockDevice { +func getBlockDevices(r *parser.Resource) []*ec2.BlockDevice { var blockDevices []*ec2.BlockDevice devicesProp := r.GetProperty("BlockDeviceMappings") diff --git a/pkg/iac/adapters/cloudformation/aws/ec2/launch_configuration.go b/pkg/iac/adapters/cloudformation/aws/ec2/launch_configuration.go index 9dcd80f5d47f..e99459b5d4f0 100644 --- a/pkg/iac/adapters/cloudformation/aws/ec2/launch_configuration.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/launch_configuration.go @@ -13,14 +13,14 @@ func getLaunchConfigurations(file parser.FileContext) (launchConfigurations []ec launchConfig := ec2.LaunchConfiguration{ Metadata: r.Metadata(), - Name: r.GetStringProperty("Name"), + Name: r.GetStringProperty("LaunchConfigurationName"), AssociatePublicIP: r.GetBoolProperty("AssociatePublicIpAddress"), MetadataOptions: ec2.MetadataOptions{ Metadata: r.Metadata(), HttpTokens: types.StringDefault("optional", r.Metadata()), HttpEndpoint: types.StringDefault("enabled", r.Metadata()), }, - UserData: r.GetStringProperty("UserData", ""), + UserData: r.GetStringProperty("UserData"), } if opts := r.GetProperty("MetadataOptions"); opts.IsNotNil() { diff --git a/pkg/iac/adapters/cloudformation/aws/ec2/launch_template.go b/pkg/iac/adapters/cloudformation/aws/ec2/launch_template.go index e22ac9abed3d..c138ed3284e1 100644 --- a/pkg/iac/adapters/cloudformation/aws/ec2/launch_template.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/launch_template.go @@ -2,11 +2,11 @@ package ec2 import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ec2" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getLaunchTemplates(file parser2.FileContext) (templates []ec2.LaunchTemplate) { +func getLaunchTemplates(file parser.FileContext) (templates []ec2.LaunchTemplate) { launchConfigResources := file.GetResourcesByType("AWS::EC2::LaunchTemplate") for _, r := range launchConfigResources { @@ -15,7 +15,7 @@ func getLaunchTemplates(file parser2.FileContext) (templates []ec2.LaunchTemplat return templates } -func adaptLaunchTemplate(r *parser2.Resource) ec2.LaunchTemplate { +func adaptLaunchTemplate(r *parser.Resource) ec2.LaunchTemplate { launchTemplate := ec2.LaunchTemplate{ Metadata: r.Metadata(), Name: r.GetStringProperty("LaunchTemplateName", ""), diff --git a/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go b/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go index 687fd12d4366..72546aa116e0 100644 --- a/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go +++ b/pkg/iac/adapters/cloudformation/aws/ec2/security_group.go @@ -2,11 +2,11 @@ package ec2 import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ec2" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getSecurityGroups(ctx parser2.FileContext) (groups []ec2.SecurityGroup) { +func getSecurityGroups(ctx parser.FileContext) (groups []ec2.SecurityGroup) { for _, r := range ctx.GetResourcesByType("AWS::EC2::SecurityGroup") { group := ec2.SecurityGroup{ Metadata: r.Metadata(), @@ -22,7 +22,7 @@ func getSecurityGroups(ctx parser2.FileContext) (groups []ec2.SecurityGroup) { return groups } -func getIngressRules(r *parser2.Resource) (sgRules []ec2.SecurityGroupRule) { +func getIngressRules(r *parser.Resource) (sgRules []ec2.SecurityGroupRule) { if ingressProp := r.GetProperty("SecurityGroupIngress"); ingressProp.IsList() { for _, ingress := range ingressProp.AsList() { rule := ec2.SecurityGroupRule{ @@ -45,7 +45,7 @@ func getIngressRules(r *parser2.Resource) (sgRules []ec2.SecurityGroupRule) { return sgRules } -func getEgressRules(r *parser2.Resource) (sgRules []ec2.SecurityGroupRule) { +func getEgressRules(r *parser.Resource) (sgRules []ec2.SecurityGroupRule) { if egressProp := r.GetProperty("SecurityGroupEgress"); egressProp.IsList() { for _, egress := range egressProp.AsList() { rule := ec2.SecurityGroupRule{ diff --git a/pkg/iac/adapters/cloudformation/aws/ecr/ecr_test.go b/pkg/iac/adapters/cloudformation/aws/ecr/ecr_test.go new file mode 100644 index 000000000000..cb3e4b6b4b8d --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/ecr/ecr_test.go @@ -0,0 +1,102 @@ +package ecr + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ecr" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/liamg/iamgo" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected ecr.ECR + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + +`, + expected: ecr.ECR{}, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyRepository: + Type: AWS::ECR::Repository + Properties: + RepositoryName: "test-repository" + ImageScanningConfiguration: + ScanOnPush: true + EncryptionConfiguration: + EncryptionType: KMS + KmsKey: mykey + ImageTagMutability: IMMUTABLE + RepositoryPolicyText: + Version: "2012-10-17" + Statement: + - + Sid: AllowPushPull + Effect: Allow + Principal: + AWS: + - "arn:aws:iam::123456789012:user/Alice" + Action: + - "ecr:GetDownloadUrlForLayer" + - "ecr:BatchGetImage" + `, + expected: ecr.ECR{ + Repositories: []ecr.Repository{ + { + ImageTagsImmutable: types.BoolTest(true), + ImageScanning: ecr.ImageScanning{ + ScanOnPush: types.BoolTest(true), + }, + Encryption: ecr.Encryption{ + Type: types.StringTest("KMS"), + KMSKeyID: types.StringTest("mykey"), + }, + Policies: []iam.Policy{ + { + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithSid("AllowPushPull"). + WithEffect("Allow"). + WithAWSPrincipals( + []string{"arn:aws:iam::123456789012:user/Alice"}, + ). + WithActions( + []string{ + "ecr:GetDownloadUrlForLayer", + "ecr:BatchGetImage", + }, + ). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/ecr/repository.go b/pkg/iac/adapters/cloudformation/aws/ecr/repository.go index 886be64037a3..2c08d57a29c6 100644 --- a/pkg/iac/adapters/cloudformation/aws/ecr/repository.go +++ b/pkg/iac/adapters/cloudformation/aws/ecr/repository.go @@ -7,11 +7,11 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ecr" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getRepositories(ctx parser2.FileContext) (repositories []ecr.Repository) { +func getRepositories(ctx parser.FileContext) (repositories []ecr.Repository) { repositoryResources := ctx.GetResourcesByType("AWS::ECR::Repository") @@ -57,7 +57,7 @@ func getRepositories(ctx parser2.FileContext) (repositories []ecr.Repository) { return repositories } -func getPolicy(r *parser2.Resource) (*iam.Policy, error) { +func getPolicy(r *parser.Resource) (*iam.Policy, error) { policyProp := r.GetProperty("RepositoryPolicyText") if policyProp.IsNil() { return nil, fmt.Errorf("missing policy") @@ -79,13 +79,10 @@ func getPolicy(r *parser2.Resource) (*iam.Policy, error) { }, nil } -func hasImmutableImageTags(r *parser2.Resource) iacTypes.BoolValue { +func hasImmutableImageTags(r *parser.Resource) iacTypes.BoolValue { mutabilityProp := r.GetProperty("ImageTagMutability") if mutabilityProp.IsNil() { return iacTypes.BoolDefault(false, r.Metadata()) } - if !mutabilityProp.EqualTo("IMMUTABLE") { - return iacTypes.Bool(false, mutabilityProp.Metadata()) - } - return iacTypes.Bool(true, mutabilityProp.Metadata()) + return iacTypes.Bool(mutabilityProp.EqualTo("IMMUTABLE"), mutabilityProp.Metadata()) } diff --git a/pkg/iac/adapters/cloudformation/aws/ecs/cluster.go b/pkg/iac/adapters/cloudformation/aws/ecs/cluster.go index 6359dbc4cc93..e3964076d25e 100644 --- a/pkg/iac/adapters/cloudformation/aws/ecs/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/ecs/cluster.go @@ -2,11 +2,11 @@ package ecs import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ecs" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getClusters(ctx parser2.FileContext) (clusters []ecs.Cluster) { +func getClusters(ctx parser.FileContext) (clusters []ecs.Cluster) { clusterResources := ctx.GetResourcesByType("AWS::ECS::Cluster") @@ -24,7 +24,7 @@ func getClusters(ctx parser2.FileContext) (clusters []ecs.Cluster) { return clusters } -func getClusterSettings(r *parser2.Resource) ecs.ClusterSettings { +func getClusterSettings(r *parser.Resource) ecs.ClusterSettings { clusterSettings := ecs.ClusterSettings{ Metadata: r.Metadata(), @@ -45,7 +45,7 @@ func getClusterSettings(r *parser2.Resource) ecs.ClusterSettings { return clusterSettings } -func checkProperty(setting *parser2.Property, clusterSettings *ecs.ClusterSettings) { +func checkProperty(setting *parser.Property, clusterSettings *ecs.ClusterSettings) { settingMap := setting.AsMap() name := settingMap["Name"] if name.IsNotNil() && name.EqualTo("containerInsights") { diff --git a/pkg/iac/adapters/cloudformation/aws/ecs/ecs_test.go b/pkg/iac/adapters/cloudformation/aws/ecs/ecs_test.go new file mode 100644 index 000000000000..c6323a1df926 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/ecs/ecs_test.go @@ -0,0 +1,108 @@ +package ecs + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ecs" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected ecs.ECS + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + ECSCluster: + Type: 'AWS::ECS::Cluster' + Properties: + ClusterName: MyFargateCluster + ClusterSettings: + - Name: containerInsights + Value: enabled + taskdefinition: + Type: AWS::ECS::TaskDefinition + Properties: + ContainerDefinitions: + - + Name: "busybox" + Image: "busybox" + Cpu: 256 + Memory: 512 + Essential: true + Privileged: true + Environment: + - Name: entryPoint + Value: 'sh, -c' + Volumes: + - + Host: + SourcePath: "/var/lib/docker/vfs/dir/" + Name: "my-vol" + EFSVolumeConfiguration: + TransitEncryption: enabled +`, + expected: ecs.ECS{ + Clusters: []ecs.Cluster{ + { + Settings: ecs.ClusterSettings{ + ContainerInsightsEnabled: types.BoolTest(true), + }, + }, + }, + TaskDefinitions: []ecs.TaskDefinition{ + { + Volumes: []ecs.Volume{ + { + EFSVolumeConfiguration: ecs.EFSVolumeConfiguration{ + TransitEncryptionEnabled: types.BoolTest(true), + }, + }, + }, + ContainerDefinitions: []ecs.ContainerDefinition{ + { + Name: types.StringTest("busybox"), + Image: types.StringTest("busybox"), + CPU: types.IntTest(256), + Memory: types.IntTest(512), + Essential: types.BoolTest(true), + Privileged: types.BoolTest(true), + Environment: []ecs.EnvVar{ + { + Name: "entryPoint", + Value: "sh, -c", + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + ECSCluster: + Type: 'AWS::ECS::Cluster' + taskdefinition: + Type: AWS::ECS::TaskDefinition + `, + expected: ecs.ECS{ + Clusters: []ecs.Cluster{{}}, + TaskDefinitions: []ecs.TaskDefinition{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/ecs/task_definition.go b/pkg/iac/adapters/cloudformation/aws/ecs/task_definition.go index cdb9ae08ab45..9c2e342bb6f3 100644 --- a/pkg/iac/adapters/cloudformation/aws/ecs/task_definition.go +++ b/pkg/iac/adapters/cloudformation/aws/ecs/task_definition.go @@ -2,11 +2,11 @@ package ecs import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ecs" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getTaskDefinitions(ctx parser2.FileContext) (taskDefinitions []ecs.TaskDefinition) { +func getTaskDefinitions(ctx parser.FileContext) (taskDefinitions []ecs.TaskDefinition) { taskDefResources := ctx.GetResourcesByType("AWS::ECS::TaskDefinition") @@ -23,7 +23,7 @@ func getTaskDefinitions(ctx parser2.FileContext) (taskDefinitions []ecs.TaskDefi return taskDefinitions } -func getContainerDefinitions(r *parser2.Resource) ([]ecs.ContainerDefinition, error) { +func getContainerDefinitions(r *parser.Resource) ([]ecs.ContainerDefinition, error) { var definitions []ecs.ContainerDefinition containerDefs := r.GetProperty("ContainerDefinitions") if containerDefs.IsNil() || containerDefs.IsNotList() { @@ -36,19 +36,19 @@ func getContainerDefinitions(r *parser2.Resource) ([]ecs.ContainerDefinition, er if envVarsList.IsNotNil() && envVarsList.IsList() { for _, envVar := range envVarsList.AsList() { envVars = append(envVars, ecs.EnvVar{ - Name: envVar.GetStringProperty("Name", "").Value(), - Value: envVar.GetStringProperty("Value", "").Value(), + Name: envVar.GetStringProperty("Name").Value(), + Value: envVar.GetStringProperty("Value").Value(), }) } } definition := ecs.ContainerDefinition{ Metadata: containerDef.Metadata(), - Name: containerDef.GetStringProperty("Name", ""), - Image: containerDef.GetStringProperty("Image", ""), - CPU: containerDef.GetIntProperty("CPU", 1), - Memory: containerDef.GetIntProperty("Memory", 128), - Essential: containerDef.GetBoolProperty("Essential", false), - Privileged: containerDef.GetBoolProperty("Privileged", false), + Name: containerDef.GetStringProperty("Name"), + Image: containerDef.GetStringProperty("Image"), + CPU: containerDef.GetIntProperty("Cpu"), + Memory: containerDef.GetIntProperty("Memory"), + Essential: containerDef.GetBoolProperty("Essential"), + Privileged: containerDef.GetBoolProperty("Privileged"), Environment: envVars, PortMappings: nil, } @@ -60,7 +60,7 @@ func getContainerDefinitions(r *parser2.Resource) ([]ecs.ContainerDefinition, er return definitions, nil } -func getVolumes(r *parser2.Resource) (volumes []ecs.Volume) { +func getVolumes(r *parser.Resource) (volumes []ecs.Volume) { volumesList := r.GetProperty("Volumes") if volumesList.IsNil() || volumesList.IsNotList() { @@ -76,7 +76,7 @@ func getVolumes(r *parser2.Resource) (volumes []ecs.Volume) { }, } transitProp := v.GetProperty("EFSVolumeConfiguration.TransitEncryption") - if transitProp.IsNotNil() && transitProp.EqualTo("enabled", parser2.IgnoreCase) { + if transitProp.IsNotNil() && transitProp.EqualTo("enabled", parser.IgnoreCase) { volume.EFSVolumeConfiguration.TransitEncryptionEnabled = types.Bool(true, transitProp.Metadata()) } diff --git a/pkg/iac/adapters/cloudformation/aws/efs/efs_test.go b/pkg/iac/adapters/cloudformation/aws/efs/efs_test.go new file mode 100644 index 000000000000..a22d769020b6 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/efs/efs_test.go @@ -0,0 +1,52 @@ +package efs + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/efs" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected efs.EFS + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + FileSystemResource: + Type: 'AWS::EFS::FileSystem' + Properties: + Encrypted: true +`, + expected: efs.EFS{ + FileSystems: []efs.FileSystem{ + { + Encrypted: types.BoolTest(true), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + FileSystemResource: + Type: 'AWS::EFS::FileSystem' + `, + expected: efs.EFS{ + FileSystems: []efs.FileSystem{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/eks/cluster.go b/pkg/iac/adapters/cloudformation/aws/eks/cluster.go index 07adedf06c21..d4c80e72dbd4 100644 --- a/pkg/iac/adapters/cloudformation/aws/eks/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/eks/cluster.go @@ -2,11 +2,11 @@ package eks import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/eks" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getClusters(ctx parser2.FileContext) (clusters []eks.Cluster) { +func getClusters(ctx parser.FileContext) (clusters []eks.Cluster) { clusterResources := ctx.GetResourcesByType("AWS::EKS::Cluster") @@ -14,6 +14,7 @@ func getClusters(ctx parser2.FileContext) (clusters []eks.Cluster) { cluster := eks.Cluster{ Metadata: r.Metadata(), // Logging not supported for cloudformation https://github.com/aws/containers-roadmap/issues/242 + // TODO: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eks-cluster.html#cfn-eks-cluster-logging Logging: eks.Logging{ Metadata: r.Metadata(), API: iacTypes.BoolUnresolvable(r.Metadata()), @@ -24,6 +25,7 @@ func getClusters(ctx parser2.FileContext) (clusters []eks.Cluster) { }, Encryption: getEncryptionConfig(r), // endpoint protection not supported - https://github.com/aws/containers-roadmap/issues/242 + // TODO: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eks-cluster.html#cfn-eks-cluster-resourcesvpcconfig PublicAccessEnabled: iacTypes.BoolUnresolvable(r.Metadata()), PublicAccessCIDRs: nil, } @@ -33,7 +35,7 @@ func getClusters(ctx parser2.FileContext) (clusters []eks.Cluster) { return clusters } -func getEncryptionConfig(r *parser2.Resource) eks.Encryption { +func getEncryptionConfig(r *parser.Resource) eks.Encryption { encryption := eks.Encryption{ Metadata: r.Metadata(), @@ -41,6 +43,8 @@ func getEncryptionConfig(r *parser2.Resource) eks.Encryption { KMSKeyID: iacTypes.StringDefault("", r.Metadata()), } + // TODO: EncryptionConfig is a list + // https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eks-cluster.html#cfn-eks-cluster-encryptionconfig if encProp := r.GetProperty("EncryptionConfig"); encProp.IsNotNil() { encryption.Metadata = encProp.Metadata() encryption.KMSKeyID = encProp.GetStringProperty("Provider.KeyArn") diff --git a/pkg/iac/adapters/cloudformation/aws/eks/eks_test.go b/pkg/iac/adapters/cloudformation/aws/eks/eks_test.go new file mode 100644 index 000000000000..84095c3b6592 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/eks/eks_test.go @@ -0,0 +1,45 @@ +package eks + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/eks" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected eks.EKS + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + EKSCluster: + Type: AWS::EKS::Cluster +`, + expected: eks.EKS{ + Clusters: []eks.Cluster{{}}, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + EKSCluster: + Type: AWS::EKS::Cluster + `, + expected: eks.EKS{ + Clusters: []eks.Cluster{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/elasticache/elasticache_test.go b/pkg/iac/adapters/cloudformation/aws/elasticache/elasticache_test.go new file mode 100644 index 000000000000..e7e3d018b14c --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/elasticache/elasticache_test.go @@ -0,0 +1,82 @@ +package elasticache + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/elasticache" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected elasticache.ElastiCache + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + Properties: + Engine: memcached + CacheNodeType: cache.t2.micro + SnapshotRetentionLimit: 5 + myReplicationGroup: + Type: 'AWS::ElastiCache::ReplicationGroup' + Properties: + TransitEncryptionEnabled: true + AtRestEncryptionEnabled: true + mySecGroup: + Type: AWS::ElastiCache::SecurityGroup + Properties: + Description: test +`, + expected: elasticache.ElastiCache{ + Clusters: []elasticache.Cluster{ + { + Engine: types.StringTest("memcached"), + NodeType: types.StringTest("cache.t2.micro"), + SnapshotRetentionLimit: types.IntTest(5), + }, + }, + ReplicationGroups: []elasticache.ReplicationGroup{ + { + TransitEncryptionEnabled: types.BoolTest(true), + AtRestEncryptionEnabled: types.BoolTest(true), + }, + }, + SecurityGroups: []elasticache.SecurityGroup{ + { + Description: types.StringTest("test"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + ElasticacheCluster: + Type: 'AWS::ElastiCache::CacheCluster' + myReplicationGroup: + Type: 'AWS::ElastiCache::ReplicationGroup' + mySecGroup: + Type: AWS::ElastiCache::SecurityGroup + `, + expected: elasticache.ElastiCache{ + Clusters: []elasticache.Cluster{{}}, + ReplicationGroups: []elasticache.ReplicationGroup{{}}, + SecurityGroups: []elasticache.SecurityGroup{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go b/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go index 5ff46bc41cbd..26747e44999b 100644 --- a/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go +++ b/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go @@ -13,9 +13,10 @@ func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { for _, r := range domainResources { domain := elasticsearch.Domain{ - Metadata: r.Metadata(), - DomainName: r.GetStringProperty("DomainName"), - AccessPolicies: r.GetStringProperty("AccessPolicies"), + Metadata: r.Metadata(), + DomainName: r.GetStringProperty("DomainName"), + AccessPolicies: r.GetStringProperty("AccessPolicies"), + // TODO: ElasticsearchClusterConfig changed to ClusterConfig DedicatedMasterEnabled: r.GetBoolProperty("ElasticsearchClusterConfig.DedicatedMasterEnabled"), VpcId: iacTypes.String("", r.Metadata()), LogPublishing: elasticsearch.LogPublishing{ @@ -35,7 +36,6 @@ func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { Endpoint: elasticsearch.Endpoint{ Metadata: r.Metadata(), EnforceHTTPS: iacTypes.BoolDefault(false, r.Metadata()), - TLSPolicy: iacTypes.StringDefault("Policy-Min-TLS-1-0-2019-07", r.Metadata()), }, ServiceSoftwareOptions: elasticsearch.ServiceSoftwareOptions{ Metadata: r.Metadata(), @@ -49,22 +49,22 @@ func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { if prop := r.GetProperty("LogPublishingOptions"); prop.IsNotNil() { domain.LogPublishing = elasticsearch.LogPublishing{ Metadata: prop.Metadata(), - AuditEnabled: prop.GetBoolProperty("AUDIT_LOGS.Enabled", false), - CloudWatchLogGroupArn: prop.GetStringProperty("CloudWatchLogsLogGroupArn"), + AuditEnabled: prop.GetBoolProperty("AUDIT_LOGS.Enabled"), + CloudWatchLogGroupArn: prop.GetStringProperty("AUDIT_LOGS.CloudWatchLogsLogGroupArn"), } } if prop := r.GetProperty("NodeToNodeEncryptionOptions"); prop.IsNotNil() { domain.TransitEncryption = elasticsearch.TransitEncryption{ Metadata: prop.Metadata(), - Enabled: prop.GetBoolProperty("Enabled", false), + Enabled: prop.GetBoolProperty("Enabled"), } } if prop := r.GetProperty("EncryptionAtRestOptions"); prop.IsNotNil() { domain.AtRestEncryption = elasticsearch.AtRestEncryption{ Metadata: prop.Metadata(), - Enabled: prop.GetBoolProperty("Enabled", false), + Enabled: prop.GetBoolProperty("Enabled"), KmsKeyId: prop.GetStringProperty("KmsKeyId"), } } @@ -72,8 +72,8 @@ func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { if prop := r.GetProperty("DomainEndpointOptions"); prop.IsNotNil() { domain.Endpoint = elasticsearch.Endpoint{ Metadata: prop.Metadata(), - EnforceHTTPS: prop.GetBoolProperty("EnforceHTTPS", false), - TLSPolicy: prop.GetStringProperty("TLSSecurityPolicy", "Policy-Min-TLS-1-0-2019-07"), + EnforceHTTPS: prop.GetBoolProperty("EnforceHTTPS"), + TLSPolicy: prop.GetStringProperty("TLSSecurityPolicy"), } } diff --git a/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch_test.go b/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch_test.go new file mode 100644 index 000000000000..514c689b8d28 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch_test.go @@ -0,0 +1,88 @@ +package elasticsearch + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/elasticsearch" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected elasticsearch.Elasticsearch + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + OpenSearchServiceDomain: + Type: AWS::OpenSearchService::Domain + Properties: + DomainName: 'test' + NodeToNodeEncryptionOptions: + Enabled: true + EncryptionAtRestOptions: + Enabled: true + KmsKeyId: mykey + DomainEndpointOptions: + EnforceHTTPS: true + TLSSecurityPolicy: Policy-Min-TLS-1-0-2019-07 + AccessPolicies: + Version: '2012-10-17' + Statement: + - + Effect: 'Allow' + Principal: + AWS: 'arn:aws:iam::123456789012:user/opensearch-user' + Action: 'es:*' + Resource: 'arn:aws:es:us-east-1:846973539254:domain/test/*' + LogPublishingOptions: + AUDIT_LOGS: + CloudWatchLogsLogGroupArn: 'arn:aws:logs:us-east-1:123456789012:log-group:/aws/opensearch/domains/opensearch-application-logs' + Enabled: true +`, + expected: elasticsearch.Elasticsearch{ + Domains: []elasticsearch.Domain{ + { + DomainName: types.StringTest("test"), + LogPublishing: elasticsearch.LogPublishing{ + AuditEnabled: types.BoolTest(true), + CloudWatchLogGroupArn: types.StringTest("arn:aws:logs:us-east-1:123456789012:log-group:/aws/opensearch/domains/opensearch-application-logs"), + }, + TransitEncryption: elasticsearch.TransitEncryption{ + Enabled: types.BoolTest(true), + }, + AtRestEncryption: elasticsearch.AtRestEncryption{ + Enabled: types.BoolTest(true), + KmsKeyId: types.StringTest("mykey"), + }, + Endpoint: elasticsearch.Endpoint{ + EnforceHTTPS: types.BoolTest(true), + TLSPolicy: types.StringTest("Policy-Min-TLS-1-0-2019-07"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + OpenSearchServiceDomain: + Type: AWS::OpenSearchService::Domain + `, + expected: elasticsearch.Elasticsearch{ + Domains: []elasticsearch.Domain{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/elb/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/elb/adapt_test.go index ca8fd631fa39..2c5ee494e66d 100644 --- a/pkg/iac/adapters/cloudformation/aws/elb/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/elb/adapt_test.go @@ -1,14 +1,11 @@ package elb import ( - "context" "testing" - "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/elb" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/stretchr/testify/require" ) func TestAdapt(t *testing.T) { @@ -18,7 +15,7 @@ func TestAdapt(t *testing.T) { expected elb.ELB }{ { - name: "LoadBalancer", + name: "complete", source: `AWSTemplateFormatVersion: "2010-09-09" Resources: LoadBalancer: @@ -27,6 +24,7 @@ Resources: - ALBLogsBucketPermission Properties: Name: "k8s-dev" + Scheme: internal IpAddressType: ipv4 LoadBalancerAttributes: - Key: routing.http2.enabled @@ -43,13 +41,36 @@ Resources: - Key: elbv2.k8s.aws/cluster Value: "biomage-dev" Type: application + Listener: + Type: AWS::ElasticLoadBalancingV2::Listener + Properties: + DefaultActions: + - Type: 'redirect' + RedirectConfig: + Port: 443 + Protocol: HTTPS + StatusCode: HTTP_302 + LoadBalancerArn: !Ref LoadBalancer + Protocol: HTTPS + SslPolicy: "ELBSecurityPolicy-TLS-1-2-2017-01" `, expected: elb.ELB{ LoadBalancers: []elb.LoadBalancer{ { - Metadata: types.NewTestMetadata(), - Type: types.String("application", types.NewTestMetadata()), - DropInvalidHeaderFields: types.Bool(true, types.NewTestMetadata()), + Type: types.StringTest("application"), + DropInvalidHeaderFields: types.BoolTest(true), + Internal: types.Bool(true, types.NewTestMetadata()), + Listeners: []elb.Listener{ + { + Protocol: types.StringTest("HTTPS"), + TLSPolicy: types.StringTest("ELBSecurityPolicy-TLS-1-2-2017-01"), + DefaultActions: []elb.Action{ + { + Type: types.StringTest("redirect"), + }, + }, + }, + }, }, }, }, @@ -58,15 +79,7 @@ Resources: for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fs := testutil.CreateFS(t, map[string]string{ - "template.yaml": tt.source, - }) - - p := parser.New() - fctx, err := p.ParseFile(context.TODO(), fs, "template.yaml") - require.NoError(t, err) - - testutil.AssertDefsecEqual(t, tt.expected, Adapt(*fctx)) + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) }) } } diff --git a/pkg/iac/adapters/cloudformation/aws/elb/loadbalancer.go b/pkg/iac/adapters/cloudformation/aws/elb/loadbalancer.go index 50b8f26275d5..002b6487ba43 100644 --- a/pkg/iac/adapters/cloudformation/aws/elb/loadbalancer.go +++ b/pkg/iac/adapters/cloudformation/aws/elb/loadbalancer.go @@ -2,11 +2,11 @@ package elb import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/elb" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getLoadBalancers(ctx parser2.FileContext) (loadbalancers []elb.LoadBalancer) { +func getLoadBalancers(ctx parser.FileContext) (loadbalancers []elb.LoadBalancer) { loadBalanacerResources := ctx.GetResourcesByType("AWS::ElasticLoadBalancingV2::LoadBalancer") @@ -24,7 +24,7 @@ func getLoadBalancers(ctx parser2.FileContext) (loadbalancers []elb.LoadBalancer return loadbalancers } -func getListeners(lbr *parser2.Resource, ctx parser2.FileContext) (listeners []elb.Listener) { +func getListeners(lbr *parser.Resource, ctx parser.FileContext) (listeners []elb.Listener) { listenerResources := ctx.GetResourcesByType("AWS::ElasticLoadBalancingV2::Listener") @@ -43,7 +43,7 @@ func getListeners(lbr *parser2.Resource, ctx parser2.FileContext) (listeners []e return listeners } -func getDefaultListenerActions(r *parser2.Resource) (actions []elb.Action) { +func getDefaultListenerActions(r *parser.Resource) (actions []elb.Action) { defaultActionsProp := r.GetProperty("DefaultActions") if defaultActionsProp.IsNotList() { return actions @@ -57,15 +57,15 @@ func getDefaultListenerActions(r *parser2.Resource) (actions []elb.Action) { return actions } -func isInternal(r *parser2.Resource) types.BoolValue { +func isInternal(r *parser.Resource) types.BoolValue { schemeProp := r.GetProperty("Scheme") if schemeProp.IsNotString() { return r.BoolDefault(false) } - return types.Bool(schemeProp.EqualTo("internal", parser2.IgnoreCase), schemeProp.Metadata()) + return types.Bool(schemeProp.EqualTo("internal", parser.IgnoreCase), schemeProp.Metadata()) } -func checkForDropInvalidHeaders(r *parser2.Resource) types.BoolValue { +func checkForDropInvalidHeaders(r *parser.Resource) types.BoolValue { attributesProp := r.GetProperty("LoadBalancerAttributes") if attributesProp.IsNotList() { return types.BoolDefault(false, r.Metadata()) diff --git a/pkg/iac/adapters/cloudformation/aws/iam/iam_test.go b/pkg/iac/adapters/cloudformation/aws/iam/iam_test.go new file mode 100644 index 000000000000..3e548dec0cf7 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/iam/iam_test.go @@ -0,0 +1,189 @@ +package iam + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/liamg/iamgo" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected iam.IAM + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + myIAMPolicy: + Type: 'AWS::IAM::Policy' + Properties: + PolicyName: TestPolicy + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - 'cloudformation:Describe*' + Resource: '*' + Groups: + - !Ref MyGroup + Users: + - !Ref PublishUser + Roles: + - !Ref MyRole + MyGroup: + Type: AWS::IAM::Group + Properties: + GroupName: TestGroup + Policies: + - PolicyName: TestGroupPolicy + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Resource: arn:*:cloudfront::*:distribution/* + Action: + - cloudfront:CreateDistribution + MyUser: + Type: AWS::IAM::User + Properties: + UserName: TestUser + Policies: + - PolicyName: TestUserPolicy + PolicyDocument: + Statement: + - Action: 's3:*' + Effect: Allow + Resource: + - 'arn:aws:s3:::testbucket' + MyRole: + Type: 'AWS::IAM::Role' + Properties: + RoleName: TestRole + Policies: + - PolicyName: TestRolePolicy + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - 'sts:AssumeRole' + AccessKey: + Type: AWS::IAM::AccessKey + Properties: + UserName: !Ref MyUser + Status: Active +`, + expected: iam.IAM{ + Policies: []iam.Policy{ + { + Name: types.StringTest("TestPolicy"), + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"cloudformation:Describe*"}). + WithResources([]string{"*"}). + Build(), + ). + Build(), + } + }(), + }, + }, + Users: []iam.User{ + { + Name: types.StringTest("TestUser"), + Policies: []iam.Policy{ + { + Name: types.StringTest("TestUserPolicy"), + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"s3:*"}). + WithResources([]string{"arn:aws:s3:::testbucket"}). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + Groups: []iam.Group{ + { + Name: types.StringTest("TestGroup"), + Policies: []iam.Policy{ + { + Name: types.StringTest("TestGroupPolicy"), + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"cloudfront:CreateDistribution"}). + WithResources([]string{"arn:*:cloudfront::*:distribution/*"}). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + Roles: []iam.Role{ + { + Name: types.StringTest("TestRole"), + Policies: []iam.Policy{ + { + Name: types.StringTest("TestRolePolicy"), + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"sts:AssumeRole"}). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + + `, + expected: iam.IAM{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/iam/policy.go b/pkg/iac/adapters/cloudformation/aws/iam/policy.go index 9843c8cdaa43..f83771f882d2 100644 --- a/pkg/iac/adapters/cloudformation/aws/iam/policy.go +++ b/pkg/iac/adapters/cloudformation/aws/iam/policy.go @@ -4,11 +4,11 @@ import ( "github.com/liamg/iamgo" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getPolicies(ctx parser2.FileContext) (policies []iam.Policy) { +func getPolicies(ctx parser.FileContext) (policies []iam.Policy) { for _, policyResource := range ctx.GetResourcesByType("AWS::IAM::Policy") { policy := iam.Policy{ @@ -34,7 +34,7 @@ func getPolicies(ctx parser2.FileContext) (policies []iam.Policy) { return policies } -func getRoles(ctx parser2.FileContext) (roles []iam.Role) { +func getRoles(ctx parser.FileContext) (roles []iam.Role) { for _, roleResource := range ctx.GetResourcesByType("AWS::IAM::Role") { policyProp := roleResource.GetProperty("Policies") roleName := roleResource.GetStringProperty("RoleName") @@ -48,10 +48,10 @@ func getRoles(ctx parser2.FileContext) (roles []iam.Role) { return roles } -func getUsers(ctx parser2.FileContext) (users []iam.User) { +func getUsers(ctx parser.FileContext) (users []iam.User) { for _, userResource := range ctx.GetResourcesByType("AWS::IAM::User") { policyProp := userResource.GetProperty("Policies") - userName := userResource.GetStringProperty("GroupName") + userName := userResource.GetStringProperty("UserName") users = append(users, iam.User{ Metadata: userResource.Metadata(), @@ -64,7 +64,8 @@ func getUsers(ctx parser2.FileContext) (users []iam.User) { return users } -func getAccessKeys(ctx parser2.FileContext, username string) (accessKeys []iam.AccessKey) { +func getAccessKeys(ctx parser.FileContext, username string) (accessKeys []iam.AccessKey) { + // TODO: also search for a key by the logical id of the resource for _, keyResource := range ctx.GetResourcesByType("AWS::IAM::AccessKey") { keyUsername := keyResource.GetStringProperty("UserName") if !keyUsername.EqualTo(username) { @@ -86,7 +87,7 @@ func getAccessKeys(ctx parser2.FileContext, username string) (accessKeys []iam.A return accessKeys } -func getGroups(ctx parser2.FileContext) (groups []iam.Group) { +func getGroups(ctx parser.FileContext) (groups []iam.Group) { for _, groupResource := range ctx.GetResourcesByType("AWS::IAM::Group") { policyProp := groupResource.GetProperty("Policies") groupName := groupResource.GetStringProperty("GroupName") @@ -100,7 +101,7 @@ func getGroups(ctx parser2.FileContext) (groups []iam.Group) { return groups } -func getPoliciesDocs(policiesProp *parser2.Property) []iam.Policy { +func getPoliciesDocs(policiesProp *parser.Property) []iam.Policy { var policies []iam.Policy for _, policy := range policiesProp.AsList() { diff --git a/pkg/iac/adapters/cloudformation/aws/kinesis/kinesis_test.go b/pkg/iac/adapters/cloudformation/aws/kinesis/kinesis_test.go new file mode 100644 index 000000000000..ce38afadf806 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/kinesis/kinesis_test.go @@ -0,0 +1,57 @@ +package kinesis + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/kinesis" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected kinesis.Kinesis + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + MyStream: + Type: 'AWS::Kinesis::Stream' + Properties: + StreamEncryption: + EncryptionType: KMS + KeyId: key +`, + expected: kinesis.Kinesis{ + Streams: []kinesis.Stream{ + { + Encryption: kinesis.Encryption{ + Type: types.StringTest("KMS"), + KMSKeyID: types.StringTest("key"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyStream: + Type: 'AWS::Kinesis::Stream' + `, + expected: kinesis.Kinesis{ + Streams: []kinesis.Stream{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/kinesis/stream.go b/pkg/iac/adapters/cloudformation/aws/kinesis/stream.go index b2bc8bac3411..6c10ec6134c0 100644 --- a/pkg/iac/adapters/cloudformation/aws/kinesis/stream.go +++ b/pkg/iac/adapters/cloudformation/aws/kinesis/stream.go @@ -3,7 +3,6 @@ package kinesis import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/kinesis" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/aquasecurity/trivy/pkg/iac/types" ) func getStreams(ctx parser.FileContext) (streams []kinesis.Stream) { @@ -14,11 +13,6 @@ func getStreams(ctx parser.FileContext) (streams []kinesis.Stream) { stream := kinesis.Stream{ Metadata: r.Metadata(), - Encryption: kinesis.Encryption{ - Metadata: r.Metadata(), - Type: types.StringDefault("KMS", r.Metadata()), - KMSKeyID: types.StringDefault("", r.Metadata()), - }, } if prop := r.GetProperty("StreamEncryption"); prop.IsNotNil() { diff --git a/pkg/iac/adapters/cloudformation/aws/lambda/function.go b/pkg/iac/adapters/cloudformation/aws/lambda/function.go index 02bde4b903ff..f91a565d193a 100644 --- a/pkg/iac/adapters/cloudformation/aws/lambda/function.go +++ b/pkg/iac/adapters/cloudformation/aws/lambda/function.go @@ -2,29 +2,24 @@ package lambda import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/lambda" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" ) -func getFunctions(ctx parser2.FileContext) (functions []lambda.Function) { +func getFunctions(ctx parser.FileContext) (functions []lambda.Function) { functionResources := ctx.GetResourcesByType("AWS::Lambda::Function") for _, r := range functionResources { function := lambda.Function{ - Metadata: r.Metadata(), - Tracing: lambda.Tracing{ - Metadata: r.Metadata(), - Mode: types.StringDefault("PassThrough", r.Metadata()), - }, + Metadata: r.Metadata(), Permissions: getPermissions(r, ctx), } if prop := r.GetProperty("TracingConfig"); prop.IsNotNil() { function.Tracing = lambda.Tracing{ Metadata: prop.Metadata(), - Mode: prop.GetStringProperty("Mode", "PassThrough"), + Mode: prop.GetStringProperty("Mode"), } } @@ -34,7 +29,7 @@ func getFunctions(ctx parser2.FileContext) (functions []lambda.Function) { return functions } -func getPermissions(funcR *parser2.Resource, ctx parser2.FileContext) (perms []lambda.Permission) { +func getPermissions(funcR *parser.Resource, ctx parser.FileContext) (perms []lambda.Permission) { permissionResources := ctx.GetResourcesByType("AWS::Lambda::Permission") diff --git a/pkg/iac/adapters/cloudformation/aws/lambda/lambda_test.go b/pkg/iac/adapters/cloudformation/aws/lambda/lambda_test.go new file mode 100644 index 000000000000..4262181f89ee --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/lambda/lambda_test.go @@ -0,0 +1,76 @@ +package lambda + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/lambda" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected lambda.Lambda + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + lambdaFunction: + Type: AWS::Lambda::Function + Properties: + TracingConfig: + Mode: Active + permission: + Type: AWS::Lambda::Permission + Properties: + FunctionName: !Ref lambdaFunction + Action: lambda:InvokeFunction + Principal: s3.amazonaws.com + SourceArn: arn +`, + expected: lambda.Lambda{ + Functions: []lambda.Function{ + { + Tracing: lambda.Tracing{ + Mode: types.StringTest("Active"), + }, + Permissions: []lambda.Permission{ + { + Principal: types.StringTest("s3.amazonaws.com"), + SourceARN: types.StringTest("arn"), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + lambdaFunction: + Type: AWS::Lambda::Function + permission: + Type: AWS::Lambda::Permission + Properties: + FunctionName: !Ref lambdaFunction + `, + expected: lambda.Lambda{ + Functions: []lambda.Function{ + { + Permissions: []lambda.Permission{{}}, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/mq/mq_test.go b/pkg/iac/adapters/cloudformation/aws/mq/mq_test.go new file mode 100644 index 000000000000..b4f1d5048898 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/mq/mq_test.go @@ -0,0 +1,59 @@ +package mq + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/mq" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected mq.MQ + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + BasicBroker: + Type: "AWS::AmazonMQ::Broker" + Properties: + PubliclyAccessible: true + Logs: + Audit: true + General: true +`, + expected: mq.MQ{ + Brokers: []mq.Broker{ + { + PublicAccess: types.BoolTest(true), + Logging: mq.Logging{ + Audit: types.BoolTest(true), + General: types.BoolTest(true), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + BasicBroker: + Type: "AWS::AmazonMQ::Broker" + `, + expected: mq.MQ{ + Brokers: []mq.Broker{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/msk/msk_test.go b/pkg/iac/adapters/cloudformation/aws/msk/msk_test.go new file mode 100644 index 000000000000..2cc5a6cd0945 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/msk/msk_test.go @@ -0,0 +1,87 @@ +package msk + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/msk" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected msk.MSK + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + cluster: + Type: AWS::MSK::Cluster + Properties: + EncryptionInfo: + EncryptionInTransit: + ClientBroker: 'PLAINTEXT' + EncryptionAtRest: + DataVolumeKMSKeyId: key + LoggingInfo: + BrokerLogs: + S3: + Enabled: true + CloudWatchLogs: + Enabled: true + Firehose: + Enabled: true +`, + expected: msk.MSK{ + Clusters: []msk.Cluster{ + { + EncryptionInTransit: msk.EncryptionInTransit{ + ClientBroker: types.StringTest("PLAINTEXT"), + }, + EncryptionAtRest: msk.EncryptionAtRest{ + KMSKeyARN: types.StringTest("key"), + Enabled: types.BoolTest(true), + }, + Logging: msk.Logging{ + Broker: msk.BrokerLogging{ + S3: msk.S3Logging{ + Enabled: types.BoolTest(true), + }, + Firehose: msk.FirehoseLogging{ + Enabled: types.BoolTest(true), + }, + Cloudwatch: msk.CloudwatchLogging{ + Enabled: types.BoolTest(true), + }, + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + cluster: + Type: AWS::MSK::Cluster + `, + expected: msk.MSK{ + Clusters: []msk.Cluster{{ + EncryptionInTransit: msk.EncryptionInTransit{ + ClientBroker: types.StringTest("TLS"), + }, + }}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/neptune/cluster.go b/pkg/iac/adapters/cloudformation/aws/neptune/cluster.go index 3685a655aee2..33012cbd2236 100644 --- a/pkg/iac/adapters/cloudformation/aws/neptune/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/neptune/cluster.go @@ -2,11 +2,11 @@ package neptune import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/neptune" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getClusters(ctx parser2.FileContext) (clusters []neptune.Cluster) { +func getClusters(ctx parser.FileContext) (clusters []neptune.Cluster) { for _, r := range ctx.GetResourcesByType("AWS::Neptune::DBCluster") { cluster := neptune.Cluster{ @@ -23,7 +23,7 @@ func getClusters(ctx parser2.FileContext) (clusters []neptune.Cluster) { return clusters } -func getAuditLog(r *parser2.Resource) types.BoolValue { +func getAuditLog(r *parser.Resource) types.BoolValue { if logsProp := r.GetProperty("EnableCloudwatchLogsExports"); logsProp.IsList() { if logsProp.Contains("audit") { return types.Bool(true, logsProp.Metadata()) diff --git a/pkg/iac/adapters/cloudformation/aws/neptune/neptune_test.go b/pkg/iac/adapters/cloudformation/aws/neptune/neptune_test.go new file mode 100644 index 000000000000..8e63a481ff2e --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/neptune/neptune_test.go @@ -0,0 +1,59 @@ +package neptune + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/neptune" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected neptune.Neptune + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + cluster: + Type: AWS::Neptune::DBCluster + Properties: + StorageEncrypted: true + KmsKeyId: key + EnableCloudwatchLogsExports: + - audit +`, + expected: neptune.Neptune{ + Clusters: []neptune.Cluster{ + { + StorageEncrypted: types.BoolTest(true), + KMSKeyID: types.StringTest("key"), + Logging: neptune.Logging{ + Audit: types.BoolTest(true), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + cluster: + Type: AWS::Neptune::DBCluster + `, + expected: neptune.Neptune{ + Clusters: []neptune.Cluster{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/rds/adapt_test.go b/pkg/iac/adapters/cloudformation/aws/rds/adapt_test.go index 7685c3118a0e..4875395c8506 100644 --- a/pkg/iac/adapters/cloudformation/aws/rds/adapt_test.go +++ b/pkg/iac/adapters/cloudformation/aws/rds/adapt_test.go @@ -1,14 +1,11 @@ package rds import ( - "context" "testing" - "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/rds" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/stretchr/testify/require" ) func TestAdapt(t *testing.T) { @@ -18,7 +15,7 @@ func TestAdapt(t *testing.T) { expected rds.RDS }{ { - name: "cluster with instances", + name: "complete", source: `AWSTemplateFormatVersion: 2010-09-09 Resources: RDSCluster: @@ -65,92 +62,113 @@ Resources: Properties: Description: "CloudFormation Sample MySQL Parameter Group" DBParameterGroupName: "testgroup" + Parameters: + sql_mode: IGNORE_SPACE + DbSecurityByEC2SecurityGroup: + Type: AWS::RDS::DBSecurityGroup + Properties: + GroupDescription: "Ingress for Amazon EC2 security group" `, expected: rds.RDS{ + Classic: rds.Classic{ + DBSecurityGroups: []rds.DBSecurityGroup{{}}, + }, ParameterGroups: []rds.ParameterGroups{ { - Metadata: types.NewTestMetadata(), - DBParameterGroupName: types.String("testgroup", types.NewTestMetadata()), + DBParameterGroupName: types.StringTest("testgroup"), }, }, Clusters: []rds.Cluster{ { - Metadata: types.NewTestMetadata(), - BackupRetentionPeriodDays: types.Int(2, types.NewTestMetadata()), - Engine: types.String("aurora-postgresql", types.NewTestMetadata()), + BackupRetentionPeriodDays: types.IntTest(2), + Engine: types.StringTest("aurora-postgresql"), Encryption: rds.Encryption{ - EncryptStorage: types.Bool(true, types.NewTestMetadata()), - KMSKeyID: types.String("your-kms-key-id", types.NewTestMetadata()), + EncryptStorage: types.BoolTest(true), + KMSKeyID: types.StringTest("your-kms-key-id"), }, PerformanceInsights: rds.PerformanceInsights{ - Metadata: types.NewTestMetadata(), - Enabled: types.Bool(true, types.NewTestMetadata()), - KMSKeyID: types.String("test-kms-key-id", types.NewTestMetadata()), + Enabled: types.BoolTest(true), + KMSKeyID: types.StringTest("test-kms-key-id"), }, - PublicAccess: types.Bool(false, types.NewTestMetadata()), - DeletionProtection: types.Bool(true, types.NewTestMetadata()), + PublicAccess: types.BoolTest(false), + DeletionProtection: types.BoolTest(true), Instances: []rds.ClusterInstance{ { Instance: rds.Instance{ - Metadata: types.NewTestMetadata(), - StorageEncrypted: types.Bool(true, types.NewTestMetadata()), + StorageEncrypted: types.BoolTest(true), Encryption: rds.Encryption{ - EncryptStorage: types.Bool(true, types.NewTestMetadata()), - KMSKeyID: types.String("your-kms-key-id", types.NewTestMetadata()), + EncryptStorage: types.BoolTest(true), + KMSKeyID: types.StringTest("your-kms-key-id"), }, - DBInstanceIdentifier: types.String("test", types.NewTestMetadata()), - PubliclyAccessible: types.Bool(false, types.NewTestMetadata()), - PublicAccess: types.BoolDefault(false, types.NewTestMetadata()), - BackupRetentionPeriodDays: types.IntDefault(1, types.NewTestMetadata()), - Engine: types.StringDefault("aurora-mysql", types.NewTestMetadata()), - EngineVersion: types.String("5.7.12", types.NewTestMetadata()), - MultiAZ: types.Bool(true, types.NewTestMetadata()), - AutoMinorVersionUpgrade: types.Bool(true, types.NewTestMetadata()), - DBInstanceArn: types.String("arn:aws:rds:us-east-2:123456789012:db:my-mysql-instance-1", types.NewTestMetadata()), - IAMAuthEnabled: types.Bool(true, types.NewTestMetadata()), + DBInstanceIdentifier: types.StringTest("test"), + PubliclyAccessible: types.BoolTest(false), + PublicAccess: types.BoolTest(false), + BackupRetentionPeriodDays: types.IntTest(1), + Engine: types.StringTest("aurora-mysql"), + EngineVersion: types.StringTest("5.7.12"), + MultiAZ: types.BoolTest(true), + AutoMinorVersionUpgrade: types.BoolTest(true), + DBInstanceArn: types.StringTest("arn:aws:rds:us-east-2:123456789012:db:my-mysql-instance-1"), + IAMAuthEnabled: types.BoolTest(true), PerformanceInsights: rds.PerformanceInsights{ - Metadata: types.NewTestMetadata(), - Enabled: types.Bool(true, types.NewTestMetadata()), - KMSKeyID: types.String("test-kms-key-id2", types.NewTestMetadata()), + Enabled: types.BoolTest(true), + KMSKeyID: types.StringTest("test-kms-key-id2"), }, EnabledCloudwatchLogsExports: []types.StringValue{ - types.String("error", types.NewTestMetadata()), - types.String("general", types.NewTestMetadata()), + types.StringTest("error"), + types.StringTest("general"), }, DBParameterGroups: []rds.DBParameterGroupsList{ { - DBParameterGroupName: types.String("testgroup", types.NewTestMetadata()), + DBParameterGroupName: types.StringTest("testgroup"), }, }, TagList: []rds.TagList{ - { - Metadata: types.NewTestMetadata(), - }, - { - Metadata: types.NewTestMetadata(), - }, + {}, + {}, }, }, - ClusterIdentifier: types.String("RDSCluster", types.NewTestMetadata()), + ClusterIdentifier: types.StringTest("RDSCluster"), }, }, }, }, }, }, + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + RDSCluster: + Type: 'AWS::RDS::DBCluster' + RDSDBInstance1: + Type: 'AWS::RDS::DBInstance' + RDSDBParameterGroup: + Type: 'AWS::RDS::DBParameterGroup' + DbSecurityByEC2SecurityGroup: + Type: AWS::RDS::DBSecurityGroup +`, + expected: rds.RDS{ + Classic: rds.Classic{ + DBSecurityGroups: []rds.DBSecurityGroup{{}}, + }, + ParameterGroups: []rds.ParameterGroups{{}}, + Clusters: []rds.Cluster{{ + Engine: types.StringTest("aurora"), + BackupRetentionPeriodDays: types.IntTest(1), + }}, + Instances: []rds.Instance{{ + BackupRetentionPeriodDays: types.IntTest(1), + PublicAccess: types.BoolTest(true), + DBParameterGroups: []rds.DBParameterGroupsList{{}}, + }}, + }, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fs := testutil.CreateFS(t, map[string]string{ - "template.yaml": tt.source, - }) - - p := parser.New() - fctx, err := p.ParseFile(context.TODO(), fs, "template.yaml") - require.NoError(t, err) - - testutil.AssertDefsecEqual(t, tt.expected, Adapt(*fctx)) + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) }) } diff --git a/pkg/iac/adapters/cloudformation/aws/rds/instance.go b/pkg/iac/adapters/cloudformation/aws/rds/instance.go index 6b4a39e7acf7..256eada02aac 100644 --- a/pkg/iac/adapters/cloudformation/aws/rds/instance.go +++ b/pkg/iac/adapters/cloudformation/aws/rds/instance.go @@ -2,11 +2,11 @@ package rds import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/rds" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getClustersAndInstances(ctx parser2.FileContext) ([]rds.Cluster, []rds.Instance) { +func getClustersAndInstances(ctx parser.FileContext) ([]rds.Cluster, []rds.Instance) { clusterMap := getClusters(ctx) @@ -68,12 +68,15 @@ func getClustersAndInstances(ctx parser2.FileContext) ([]rds.Cluster, []rds.Inst return clusters, orphans } -func getDBParameterGroups(ctx parser2.FileContext, r *parser2.Resource) (dbParameterGroup []rds.DBParameterGroupsList) { +func getDBParameterGroups(ctx parser.FileContext, r *parser.Resource) (dbParameterGroup []rds.DBParameterGroupsList) { + + var parameterGroupList []rds.DBParameterGroupsList dbParameterGroupName := r.GetStringProperty("DBParameterGroupName") for _, r := range ctx.GetResourcesByType("AWS::RDS::DBParameterGroup") { name := r.GetStringProperty("DBParameterGroupName") + // TODO: find by resource logical id if !dbParameterGroupName.EqualTo(name.Value()) { continue } @@ -82,13 +85,13 @@ func getDBParameterGroups(ctx parser2.FileContext, r *parser2.Resource) (dbParam DBParameterGroupName: name, KMSKeyID: types.StringUnresolvable(r.Metadata()), } - dbParameterGroup = append(dbParameterGroup, dbpmgl) + parameterGroupList = append(dbParameterGroup, dbpmgl) } - return dbParameterGroup + return parameterGroupList } -func getEnabledCloudwatchLogsExports(r *parser2.Resource) (enabledcloudwatchlogexportslist []types.StringValue) { +func getEnabledCloudwatchLogsExports(r *parser.Resource) (enabledcloudwatchlogexportslist []types.StringValue) { enabledCloudwatchLogExportList := r.GetProperty("EnableCloudwatchLogsExports") if enabledCloudwatchLogExportList.IsNil() || enabledCloudwatchLogExportList.IsNotList() { @@ -101,7 +104,7 @@ func getEnabledCloudwatchLogsExports(r *parser2.Resource) (enabledcloudwatchloge return enabledcloudwatchlogexportslist } -func getTagList(r *parser2.Resource) (taglist []rds.TagList) { +func getTagList(r *parser.Resource) (taglist []rds.TagList) { tagLists := r.GetProperty("Tags") if tagLists.IsNil() || tagLists.IsNotList() { @@ -116,7 +119,7 @@ func getTagList(r *parser2.Resource) (taglist []rds.TagList) { return taglist } -func getReadReplicaDBInstanceIdentifiers(r *parser2.Resource) (readreplicadbidentifier []types.StringValue) { +func getReadReplicaDBInstanceIdentifiers(r *parser.Resource) (readreplicadbidentifier []types.StringValue) { readReplicaDBIdentifier := r.GetProperty("SourceDBInstanceIdentifier") if readReplicaDBIdentifier.IsNil() || readReplicaDBIdentifier.IsNotList() { diff --git a/pkg/iac/adapters/cloudformation/aws/rds/parameter_groups.go b/pkg/iac/adapters/cloudformation/aws/rds/parameter_groups.go index 98df5187401b..f47c2f70a706 100644 --- a/pkg/iac/adapters/cloudformation/aws/rds/parameter_groups.go +++ b/pkg/iac/adapters/cloudformation/aws/rds/parameter_groups.go @@ -2,11 +2,11 @@ package rds import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/rds" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getParameterGroups(ctx parser2.FileContext) (parametergroups []rds.ParameterGroups) { +func getParameterGroups(ctx parser.FileContext) (parametergroups []rds.ParameterGroups) { for _, r := range ctx.GetResourcesByType("AWS::RDS::DBParameterGroup") { @@ -23,10 +23,12 @@ func getParameterGroups(ctx parser2.FileContext) (parametergroups []rds.Paramete return parametergroups } -func getParameters(r *parser2.Resource) (parameters []rds.Parameters) { +func getParameters(r *parser.Resource) (parameters []rds.Parameters) { dBParam := r.GetProperty("Parameters") + // TODO: parameters is JSON + // https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-rds-dbparametergroup.html#cfn-rds-dbparametergroup-parameters if dBParam.IsNil() || dBParam.IsNotList() { return parameters } diff --git a/pkg/iac/adapters/cloudformation/aws/redshift/cluster.go b/pkg/iac/adapters/cloudformation/aws/redshift/cluster.go index 6aac98978b94..c8acf8997af0 100644 --- a/pkg/iac/adapters/cloudformation/aws/redshift/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/redshift/cluster.go @@ -3,7 +3,6 @@ package redshift import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/redshift" "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" - "github.com/aquasecurity/trivy/pkg/iac/types" ) func getClusters(ctx parser.FileContext) (clusters []redshift.Cluster) { @@ -12,14 +11,12 @@ func getClusters(ctx parser.FileContext) (clusters []redshift.Cluster) { cluster := redshift.Cluster{ Metadata: r.Metadata(), ClusterIdentifier: r.GetStringProperty("ClusterIdentifier"), - AllowVersionUpgrade: r.GetBoolProperty("AllowVersionUpgrade"), + AllowVersionUpgrade: r.GetBoolProperty("AllowVersionUpgrade", true), NodeType: r.GetStringProperty("NodeType"), - NumberOfNodes: r.GetIntProperty("NumberOfNodes"), + NumberOfNodes: r.GetIntProperty("NumberOfNodes", 1), PubliclyAccessible: r.GetBoolProperty("PubliclyAccessible"), MasterUsername: r.GetStringProperty("MasterUsername"), - VpcId: types.String("", r.Metadata()), - LoggingEnabled: types.Bool(false, r.Metadata()), - AutomatedSnapshotRetentionPeriod: r.GetIntProperty("AutomatedSnapshotRetentionPeriod"), + AutomatedSnapshotRetentionPeriod: r.GetIntProperty("AutomatedSnapshotRetentionPeriod", 1), Encryption: redshift.Encryption{ Metadata: r.Metadata(), Enabled: r.GetBoolProperty("Encrypted"), @@ -38,10 +35,8 @@ func getClusters(ctx parser.FileContext) (clusters []redshift.Cluster) { } func getParameters(ctx parser.FileContext) (parameter []redshift.ClusterParameter) { - - paraRes := ctx.GetResourcesByType("AWS::Redshift::ClusterParameterGroup") var parameters []redshift.ClusterParameter - for _, r := range paraRes { + for _, r := range ctx.GetResourcesByType("AWS::Redshift::ClusterParameterGroup") { for _, par := range r.GetProperty("Parameters").AsList() { parameters = append(parameters, redshift.ClusterParameter{ Metadata: par.Metadata(), diff --git a/pkg/iac/adapters/cloudformation/aws/redshift/redshift_test.go b/pkg/iac/adapters/cloudformation/aws/redshift/redshift_test.go new file mode 100644 index 000000000000..a14117a21961 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/redshift/redshift_test.go @@ -0,0 +1,111 @@ +package redshift + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/redshift" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected redshift.Redshift + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + myCluster: + Type: "AWS::Redshift::Cluster" + Properties: + DBName: "mydb" + ClusterIdentifier: myexamplecluster + AllowVersionUpgrade: false + MasterUsername: "master" + NodeType: "ds2.xlarge" + NumberOfNodes: 2 + PubliclyAccessible: true + AutomatedSnapshotRetentionPeriod: 2 + Encrypted: true + KmsKeyId: key + Endpoint: + Port: 2000 + ClusterSubnetGroupName: test + myClusterParameterGroup: + Type: "AWS::Redshift::ClusterParameterGroup" + Properties: + Parameters: + - + ParameterName: "enable_user_activity_logging" + ParameterValue: "true" + mySecGroup: + Type: AWS::Redshift::ClusterSecurityGroup + Properties: + Description: test + `, + expected: redshift.Redshift{ + Clusters: []redshift.Cluster{ + { + ClusterIdentifier: types.StringTest("myexamplecluster"), + AllowVersionUpgrade: types.BoolTest(false), + MasterUsername: types.StringTest("master"), + NodeType: types.StringTest("ds2.xlarge"), + NumberOfNodes: types.IntTest(2), + PubliclyAccessible: types.BoolTest(true), + AutomatedSnapshotRetentionPeriod: types.IntTest(2), + Encryption: redshift.Encryption{ + Enabled: types.BoolTest(true), + KMSKeyID: types.StringTest("key"), + }, + EndPoint: redshift.EndPoint{ + Port: types.IntTest(2000), + }, + SubnetGroupName: types.StringTest("test"), + }, + }, + ClusterParameters: []redshift.ClusterParameter{ + { + ParameterName: types.StringTest("enable_user_activity_logging"), + ParameterValue: types.StringTest("true"), + }, + }, + SecurityGroups: []redshift.SecurityGroup{ + { + Description: types.StringTest("test"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + myCluster: + Type: "AWS::Redshift::Cluster" + mySecGroup: + Type: AWS::Redshift::ClusterSecurityGroup + myClusterParameterGroup: + Type: "AWS::Redshift::ClusterParameterGroup" +`, + expected: redshift.Redshift{ + Clusters: []redshift.Cluster{ + { + AllowVersionUpgrade: types.BoolTest(true), + AutomatedSnapshotRetentionPeriod: types.IntTest(1), + NumberOfNodes: types.IntTest(1), + }, + }, + SecurityGroups: []redshift.SecurityGroup{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/s3/bucket.go b/pkg/iac/adapters/cloudformation/aws/s3/bucket.go index 35b95520fd9c..5f5329fc6714 100644 --- a/pkg/iac/adapters/cloudformation/aws/s3/bucket.go +++ b/pkg/iac/adapters/cloudformation/aws/s3/bucket.go @@ -50,16 +50,17 @@ func getBuckets(cfFile parser.FileContext) []s3.Bucket { } func getPublicAccessBlock(r *parser.Resource) *s3.PublicAccessBlock { - if block := r.GetProperty("PublicAccessBlockConfiguration"); block.IsNil() { + block := r.GetProperty("PublicAccessBlockConfiguration") + if block.IsNil() { return nil } return &s3.PublicAccessBlock{ - Metadata: r.Metadata(), - BlockPublicACLs: r.GetBoolProperty("PublicAccessBlockConfiguration.BlockPublicAcls"), - BlockPublicPolicy: r.GetBoolProperty("PublicAccessBlockConfiguration.BlockPublicPolicy"), - IgnorePublicACLs: r.GetBoolProperty("PublicAccessBlockConfiguration.IgnorePublicAcls"), - RestrictPublicBuckets: r.GetBoolProperty("PublicAccessBlockConfiguration.RestrictPublicBuckets"), + Metadata: block.Metadata(), + BlockPublicACLs: block.GetBoolProperty("BlockPublicAcls"), + BlockPublicPolicy: block.GetBoolProperty("BlockPublicPolicy"), + IgnorePublicACLs: block.GetBoolProperty("IgnorePublicAcls"), + RestrictPublicBuckets: block.GetBoolProperty("RestrictPublicBuckets"), } } diff --git a/pkg/iac/adapters/cloudformation/aws/s3/s3_test.go b/pkg/iac/adapters/cloudformation/aws/s3/s3_test.go index f4139c7ad15b..ee8fffb39f75 100644 --- a/pkg/iac/adapters/cloudformation/aws/s3/s3_test.go +++ b/pkg/iac/adapters/cloudformation/aws/s3/s3_test.go @@ -1,14 +1,11 @@ package s3 import ( - "context" "testing" - "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/s3" - "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/stretchr/testify/require" ) func TestAdapt(t *testing.T) { @@ -56,36 +53,44 @@ Resources: ExpirationInDays: 365 AccelerateConfiguration: AccelerationStatus: Enabled + VersioningConfiguration: + Status: Enabled + WebsiteConfiguration: + IndexDocument: index.html `, expected: s3.S3{ Buckets: []s3.Bucket{ { - Name: types.String("logging-bucket", types.NewTestMetadata()), + Name: types.StringTest("logging-bucket"), }, { - Name: types.String("test-bucket", types.NewTestMetadata()), + Name: types.StringTest("test-bucket"), Encryption: s3.Encryption{ - Enabled: types.Bool(true, types.NewTestMetadata()), - Algorithm: types.String("aws:kms", types.NewTestMetadata()), - KMSKeyId: types.String("Key", types.NewTestMetadata()), + Enabled: types.BoolTest(true), + Algorithm: types.StringTest("aws:kms"), + KMSKeyId: types.StringTest("Key"), }, - ACL: types.String("aws-exec-read", types.NewTestMetadata()), + ACL: types.StringTest("aws-exec-read"), PublicAccessBlock: &s3.PublicAccessBlock{ - BlockPublicACLs: types.Bool(true, types.NewTestMetadata()), - BlockPublicPolicy: types.Bool(true, types.NewTestMetadata()), - IgnorePublicACLs: types.Bool(true, types.NewTestMetadata()), - RestrictPublicBuckets: types.Bool(true, types.NewTestMetadata()), + BlockPublicACLs: types.BoolTest(true), + BlockPublicPolicy: types.BoolTest(true), + IgnorePublicACLs: types.BoolTest(true), + RestrictPublicBuckets: types.BoolTest(true), }, Logging: s3.Logging{ - TargetBucket: types.String("LoggingBucket", types.NewTestMetadata()), - Enabled: types.Bool(true, types.NewTestMetadata()), + TargetBucket: types.StringTest("LoggingBucket"), + Enabled: types.BoolTest(true), }, LifecycleConfiguration: []s3.Rules{ { - Status: types.String("Enabled", types.NewTestMetadata()), + Status: types.StringTest("Enabled"), }, }, - AccelerateConfigurationStatus: types.String("Enabled", types.NewTestMetadata()), + AccelerateConfigurationStatus: types.StringTest("Enabled"), + Versioning: s3.Versioning{ + Enabled: types.BoolTest(true), + }, + Website: &s3.Website{}, }, }, }, @@ -101,7 +106,7 @@ Resources: expected: s3.S3{ Buckets: []s3.Bucket{ { - Name: types.String("test-bucket", types.NewTestMetadata()), + Name: types.StringTest("test-bucket"), Encryption: s3.Encryption{ Enabled: types.BoolDefault(false, types.NewTestMetadata()), }, @@ -126,11 +131,11 @@ Resources: expected: s3.S3{ Buckets: []s3.Bucket{ { - Name: types.String("test-bucket", types.NewTestMetadata()), + Name: types.StringTest("test-bucket"), Encryption: s3.Encryption{ Enabled: types.BoolDefault(false, types.NewTestMetadata()), - KMSKeyId: types.String("alias/my-key", types.NewTestMetadata()), - Algorithm: types.String("aes256", types.NewTestMetadata()), + KMSKeyId: types.StringTest("alias/my-key"), + Algorithm: types.StringTest("aes256"), }, }, }, @@ -140,16 +145,7 @@ Resources: for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - - fsys := testutil.CreateFS(t, map[string]string{ - "main.yaml": tt.source, - }) - - fctx, err := parser.New().ParseFile(context.TODO(), fsys, "main.yaml") - require.NoError(t, err) - - adapted := Adapt(*fctx) - testutil.AssertDefsecEqual(t, tt.expected, adapted) + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) }) } diff --git a/pkg/iac/adapters/cloudformation/aws/sam/api.go b/pkg/iac/adapters/cloudformation/aws/sam/api.go index d42010166914..4d4f04e6e83a 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/api.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/api.go @@ -2,11 +2,11 @@ package sam import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getApis(cfFile parser2.FileContext) (apis []sam.API) { +func getApis(cfFile parser.FileContext) (apis []sam.API) { apiResources := cfFile.GetResourcesByType("AWS::Serverless::Api") for _, r := range apiResources { @@ -25,7 +25,7 @@ func getApis(cfFile parser2.FileContext) (apis []sam.API) { return apis } -func getRestMethodSettings(r *parser2.Resource) sam.RESTMethodSettings { +func getRestMethodSettings(r *parser.Resource) sam.RESTMethodSettings { settings := sam.RESTMethodSettings{ Metadata: r.Metadata(), @@ -35,6 +35,8 @@ func getRestMethodSettings(r *parser2.Resource) sam.RESTMethodSettings { MetricsEnabled: iacTypes.BoolDefault(false, r.Metadata()), } + // TODO: MethodSettings is list + // https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-stage.html#cfn-apigateway-stage-methodsettings settingsProp := r.GetProperty("MethodSettings") if settingsProp.IsNotNil() { @@ -47,7 +49,7 @@ func getRestMethodSettings(r *parser2.Resource) sam.RESTMethodSettings { } if loggingLevel := settingsProp.GetProperty("LoggingLevel"); loggingLevel.IsNotNil() { - if loggingLevel.EqualTo("OFF", parser2.IgnoreCase) { + if loggingLevel.EqualTo("OFF", parser.IgnoreCase) { settings.LoggingEnabled = iacTypes.Bool(false, loggingLevel.Metadata()) } else { settings.LoggingEnabled = iacTypes.Bool(true, loggingLevel.Metadata()) @@ -58,7 +60,7 @@ func getRestMethodSettings(r *parser2.Resource) sam.RESTMethodSettings { return settings } -func getAccessLogging(r *parser2.Resource) sam.AccessLogging { +func getAccessLogging(r *parser.Resource) sam.AccessLogging { logging := sam.AccessLogging{ Metadata: r.Metadata(), @@ -75,19 +77,17 @@ func getAccessLogging(r *parser2.Resource) sam.AccessLogging { return logging } -func getDomainConfiguration(r *parser2.Resource) sam.DomainConfiguration { +func getDomainConfiguration(r *parser.Resource) sam.DomainConfiguration { domainConfig := sam.DomainConfiguration{ - Metadata: r.Metadata(), - Name: iacTypes.StringDefault("", r.Metadata()), - SecurityPolicy: iacTypes.StringDefault("TLS_1_0", r.Metadata()), + Metadata: r.Metadata(), } if domain := r.GetProperty("Domain"); domain.IsNotNil() { domainConfig = sam.DomainConfiguration{ Metadata: domain.Metadata(), - Name: domain.GetStringProperty("DomainName", ""), - SecurityPolicy: domain.GetStringProperty("SecurityPolicy", "TLS_1_0"), + Name: domain.GetStringProperty("DomainName"), + SecurityPolicy: domain.GetStringProperty("SecurityPolicy"), } } diff --git a/pkg/iac/adapters/cloudformation/aws/sam/function.go b/pkg/iac/adapters/cloudformation/aws/sam/function.go index f6f2cfd747a6..161b078bf681 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/function.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/function.go @@ -5,18 +5,18 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getFunctions(cfFile parser2.FileContext) (functions []sam.Function) { +func getFunctions(cfFile parser.FileContext) (functions []sam.Function) { functionResources := cfFile.GetResourcesByType("AWS::Serverless::Function") for _, r := range functionResources { function := sam.Function{ Metadata: r.Metadata(), FunctionName: r.GetStringProperty("FunctionName"), - Tracing: r.GetStringProperty("Tracing", sam.TracingModePassThrough), + Tracing: r.GetStringProperty("Tracing"), ManagedPolicies: nil, Policies: nil, } @@ -28,7 +28,7 @@ func getFunctions(cfFile parser2.FileContext) (functions []sam.Function) { return functions } -func setFunctionPolicies(r *parser2.Resource, function *sam.Function) { +func setFunctionPolicies(r *parser.Resource, function *sam.Function) { policies := r.GetProperty("Policies") if policies.IsNotNil() { if policies.IsString() { diff --git a/pkg/iac/adapters/cloudformation/aws/sam/http_api.go b/pkg/iac/adapters/cloudformation/aws/sam/http_api.go index c51c3efb8913..02f9ba6c5ef9 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/http_api.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/http_api.go @@ -2,11 +2,11 @@ package sam import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getHttpApis(cfFile parser2.FileContext) (apis []sam.HttpAPI) { +func getHttpApis(cfFile parser.FileContext) (apis []sam.HttpAPI) { apiResources := cfFile.GetResourcesByType("AWS::Serverless::HttpApi") for _, r := range apiResources { @@ -24,7 +24,7 @@ func getHttpApis(cfFile parser2.FileContext) (apis []sam.HttpAPI) { return apis } -func getAccessLoggingV2(r *parser2.Resource) sam.AccessLogging { +func getAccessLoggingV2(r *parser.Resource) sam.AccessLogging { logging := sam.AccessLogging{ Metadata: r.Metadata(), @@ -41,7 +41,7 @@ func getAccessLoggingV2(r *parser2.Resource) sam.AccessLogging { return logging } -func getRouteSettings(r *parser2.Resource) sam.RouteSettings { +func getRouteSettings(r *parser.Resource) sam.RouteSettings { routeSettings := sam.RouteSettings{ Metadata: r.Metadata(), @@ -52,7 +52,8 @@ func getRouteSettings(r *parser2.Resource) sam.RouteSettings { if route := r.GetProperty("DefaultRouteSettings"); route.IsNotNil() { routeSettings = sam.RouteSettings{ - Metadata: route.Metadata(), + Metadata: route.Metadata(), + // TODO: LoggingLevel is string LoggingEnabled: route.GetBoolProperty("LoggingLevel"), DataTraceEnabled: route.GetBoolProperty("DataTraceEnabled"), DetailedMetricsEnabled: route.GetBoolProperty("DetailedMetricsEnabled"), diff --git a/pkg/iac/adapters/cloudformation/aws/sam/sam_test.go b/pkg/iac/adapters/cloudformation/aws/sam/sam_test.go new file mode 100644 index 000000000000..ec2fed201ea6 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/sam/sam_test.go @@ -0,0 +1,213 @@ +package sam + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/liamg/iamgo" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected sam.SAM + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + ApiGatewayApi: + Type: AWS::Serverless::Api + Properties: + StageName: prod + Name: test + TracingEnabled: true + Domain: + DomainName: domain + SecurityPolicy: "TLS_1_2" + MethodSettings: + - DataTraceEnabled: true + CacheDataEncrypted: true + MetricsEnabled: true + LoggingLevel: INFO + AccessLogSetting: + DestinationArn: 'arn:aws:logs:us-east-1:123456789:log-group:my-log-group' + HttpApi: + Type: AWS::Serverless::HttpApi + Properties: + Name: test + Domain: + DomainName: test + SecurityPolicy: "TLS_1_2" + AccessLogSettings: + DestinationArn: 'arn:aws:logs:us-east-1:123456789:log-group:my-log-group' + DefaultRouteSettings: + LoggingLevel: INFO + DataTraceEnabled: true + DetailedMetricsEnabled: true + myFunction: + Type: AWS::Serverless::Function + Properties: + FunctionName: test + Tracing: Active + Policies: + - AWSLambdaExecute + - Version: '2012-10-17' + Statement: + - Effect: Allow + Action: + - s3:GetObject + Resource: 'arn:aws:s3:::my-bucket/*' + MySampleStateMachine: + Type: AWS::Serverless::StateMachine + Properties: + Logging: + Level: ALL + Tracing: + Enabled: true + Policies: + - Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - "cloudwatch:*" + Resource: "*" + myTable: + Type: AWS::Serverless::SimpleTable + Properties: + TableName: my-table + SSESpecification: + SSEEnabled: "true" + KMSMasterKeyId: "kmskey" +`, + expected: sam.SAM{ + APIs: []sam.API{ + { + Name: types.StringTest("test"), + TracingEnabled: types.BoolTest(true), + DomainConfiguration: sam.DomainConfiguration{ + Name: types.StringTest("domain"), + SecurityPolicy: types.StringTest("TLS_1_2"), + }, + AccessLogging: sam.AccessLogging{ + CloudwatchLogGroupARN: types.StringTest("arn:aws:logs:us-east-1:123456789:log-group:my-log-group"), + }, + }, + }, + HttpAPIs: []sam.HttpAPI{ + { + Name: types.StringTest("test"), + DomainConfiguration: sam.DomainConfiguration{ + Name: types.StringTest("test"), + SecurityPolicy: types.StringTest("TLS_1_2"), + }, + AccessLogging: sam.AccessLogging{ + CloudwatchLogGroupARN: types.StringTest("arn:aws:logs:us-east-1:123456789:log-group:my-log-group"), + }, + DefaultRouteSettings: sam.RouteSettings{ + DataTraceEnabled: types.BoolTest(true), + DetailedMetricsEnabled: types.BoolTest(true), + }, + }, + }, + Functions: []sam.Function{ + { + FunctionName: types.StringTest("test"), + Tracing: types.StringTest("Active"), + Policies: []iam.Policy{ + { + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"s3:GetObject"}). + WithResources([]string{"arn:aws:s3:::my-bucket/*"}). + Build(), + ). + Build(), + } + }(), + }, + }, + ManagedPolicies: []types.StringValue{ + types.StringTest("AWSLambdaExecute"), + }, + }, + }, + StateMachines: []sam.StateMachine{ + { + LoggingConfiguration: sam.LoggingConfiguration{ + LoggingEnabled: types.BoolTest(true), + }, + Tracing: sam.TracingConfiguration{ + Enabled: types.BoolTest(true), + }, + Policies: []iam.Policy{ + { + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithVersion("2012-10-17"). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"cloudwatch:*"}). + WithResources([]string{"*"}). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + SimpleTables: []sam.SimpleTable{ + { + TableName: types.StringTest("my-table"), + SSESpecification: sam.SSESpecification{ + Enabled: types.BoolTest(true), + KMSMasterKeyID: types.StringTest("kmskey"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + ApiGatewayApi: + Type: AWS::Serverless::Api + HttpApi: + Type: AWS::Serverless::HttpApi + myFunction: + Type: AWS::Serverless::Function + MySampleStateMachine: + Type: AWS::Serverless::StateMachine + myTable: + Type: AWS::Serverless::SimpleTable +`, + expected: sam.SAM{ + APIs: []sam.API{{}}, + HttpAPIs: []sam.HttpAPI{{}}, + Functions: []sam.Function{{}}, + StateMachines: []sam.StateMachine{{}}, + SimpleTables: []sam.SimpleTable{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go b/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go index efcaf3772be4..2a57afd2bdb6 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/state_machines.go @@ -5,11 +5,11 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getStateMachines(cfFile parser2.FileContext) (stateMachines []sam.StateMachine) { +func getStateMachines(cfFile parser.FileContext) (stateMachines []sam.StateMachine) { stateMachineResources := cfFile.GetResourcesByType("AWS::Serverless::StateMachine") for _, r := range stateMachineResources { @@ -25,6 +25,7 @@ func getStateMachines(cfFile parser2.FileContext) (stateMachines []sam.StateMach Tracing: getTracingConfiguration(r), } + // TODO: By default, the level is set to OFF if logging := r.GetProperty("Logging"); logging.IsNotNil() { stateMachine.LoggingConfiguration.Metadata = logging.Metadata() if level := logging.GetProperty("Level"); level.IsNotNil() { @@ -39,7 +40,7 @@ func getStateMachines(cfFile parser2.FileContext) (stateMachines []sam.StateMach return stateMachines } -func getTracingConfiguration(r *parser2.Resource) sam.TracingConfiguration { +func getTracingConfiguration(r *parser.Resource) sam.TracingConfiguration { tracing := r.GetProperty("Tracing") if tracing.IsNil() { return sam.TracingConfiguration{ @@ -54,7 +55,7 @@ func getTracingConfiguration(r *parser2.Resource) sam.TracingConfiguration { } } -func setStateMachinePolicies(r *parser2.Resource, stateMachine *sam.StateMachine) { +func setStateMachinePolicies(r *parser.Resource, stateMachine *sam.StateMachine) { policies := r.GetProperty("Policies") if policies.IsNotNil() { if policies.IsString() { diff --git a/pkg/iac/adapters/cloudformation/aws/sam/tables.go b/pkg/iac/adapters/cloudformation/aws/sam/tables.go index 713f723bf319..89e66acdf514 100644 --- a/pkg/iac/adapters/cloudformation/aws/sam/tables.go +++ b/pkg/iac/adapters/cloudformation/aws/sam/tables.go @@ -2,11 +2,11 @@ package sam import ( "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sam" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) -func getSimpleTables(cfFile parser2.FileContext) (tables []sam.SimpleTable) { +func getSimpleTables(cfFile parser.FileContext) (tables []sam.SimpleTable) { tableResources := cfFile.GetResourcesByType("AWS::Serverless::SimpleTable") for _, r := range tableResources { @@ -22,21 +22,18 @@ func getSimpleTables(cfFile parser2.FileContext) (tables []sam.SimpleTable) { return tables } -func getSSESpecification(r *parser2.Resource) sam.SSESpecification { - - spec := sam.SSESpecification{ - Metadata: r.Metadata(), - Enabled: iacTypes.BoolDefault(false, r.Metadata()), - KMSMasterKeyID: iacTypes.StringDefault("", r.Metadata()), - } - +func getSSESpecification(r *parser.Resource) sam.SSESpecification { if sse := r.GetProperty("SSESpecification"); sse.IsNotNil() { - spec = sam.SSESpecification{ + return sam.SSESpecification{ Metadata: sse.Metadata(), Enabled: sse.GetBoolProperty("SSEEnabled"), - KMSMasterKeyID: sse.GetStringProperty("KMSMasterKeyID"), + KMSMasterKeyID: sse.GetStringProperty("KMSMasterKeyId"), } } - return spec + return sam.SSESpecification{ + Metadata: r.Metadata(), + Enabled: iacTypes.BoolDefault(false, r.Metadata()), + KMSMasterKeyID: iacTypes.StringDefault("", r.Metadata()), + } } diff --git a/pkg/iac/adapters/cloudformation/aws/sns/sns_test.go b/pkg/iac/adapters/cloudformation/aws/sns/sns_test.go new file mode 100644 index 000000000000..25f271db2073 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/sns/sns_test.go @@ -0,0 +1,54 @@ +package sns + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sns" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected sns.SNS + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + MySNSTopic: + Type: AWS::SNS::Topic + Properties: + KmsMasterKeyId: mykey +`, + expected: sns.SNS{ + Topics: []sns.Topic{ + { + Encryption: sns.Encryption{ + KMSKeyID: types.StringTest("mykey"), + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MySNSTopic: + Type: AWS::SNS::Topic + `, + expected: sns.SNS{ + Topics: []sns.Topic{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/sqs/queue.go b/pkg/iac/adapters/cloudformation/aws/sqs/queue.go index 2670dc299663..555fd54efd90 100644 --- a/pkg/iac/adapters/cloudformation/aws/sqs/queue.go +++ b/pkg/iac/adapters/cloudformation/aws/sqs/queue.go @@ -21,7 +21,6 @@ func getQueues(ctx parser.FileContext) (queues []sqs.Queue) { ManagedEncryption: iacTypes.Bool(false, r.Metadata()), KMSKeyID: r.GetStringProperty("KmsMasterKeyId"), }, - Policies: []iam.Policy{}, } if policy, err := getPolicy(r.ID(), ctx); err == nil { queue.Policies = append(queue.Policies, *policy) diff --git a/pkg/iac/adapters/cloudformation/aws/sqs/sqs_test.go b/pkg/iac/adapters/cloudformation/aws/sqs/sqs_test.go new file mode 100644 index 000000000000..8abeff2aca3e --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/sqs/sqs_test.go @@ -0,0 +1,86 @@ +package sqs + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/iam" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/sqs" + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/liamg/iamgo" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected sqs.SQS + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + MyQueue: + Type: AWS::SQS::Queue + Properties: + QueueName: "SampleQueue" + KmsMasterKeyId: mykey + SampleSQSPolicy: + Type: AWS::SQS::QueuePolicy + Properties: + Queues: + - !Ref MyQueue + PolicyDocument: + Statement: + - + Action: + - "SQS:SendMessage" + Effect: "Allow" + Resource: "arn:aws:sqs:us-east-2:444455556666:queue2" +`, + expected: sqs.SQS{ + Queues: []sqs.Queue{ + { + Encryption: sqs.Encryption{ + KMSKeyID: types.StringTest("mykey"), + }, + Policies: []iam.Policy{ + { + Document: func() iam.Document { + return iam.Document{ + Parsed: iamgo.NewPolicyBuilder(). + WithStatement( + iamgo.NewStatementBuilder(). + WithEffect("Allow"). + WithActions([]string{"SQS:SendMessage"}). + WithResources([]string{"arn:aws:sqs:us-east-2:444455556666:queue2"}). + Build(), + ). + Build(), + } + }(), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MySNSTopic: + Type: AWS::SQS::Queue + `, + expected: sqs.SQS{ + Queues: []sqs.Queue{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/ssm/ssm_test.go b/pkg/iac/adapters/cloudformation/aws/ssm/ssm_test.go new file mode 100644 index 000000000000..9709207fec66 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/ssm/ssm_test.go @@ -0,0 +1,53 @@ +package ssm + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/ssm" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected ssm.SSM + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + MySecretA: + Type: 'AWS::SecretsManager::Secret' + Properties: + Name: MySecretForAppA + KmsKeyId: alias/exampleAlias +`, + expected: ssm.SSM{ + Secrets: []ssm.Secret{ + { + KMSKeyID: types.StringTest("alias/exampleAlias"), + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MySecretA: + Type: 'AWS::SecretsManager::Secret' + `, + expected: ssm.SSM{ + Secrets: []ssm.Secret{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/aws/workspaces/workspaces_test.go b/pkg/iac/adapters/cloudformation/aws/workspaces/workspaces_test.go new file mode 100644 index 000000000000..41e821e6466d --- /dev/null +++ b/pkg/iac/adapters/cloudformation/aws/workspaces/workspaces_test.go @@ -0,0 +1,62 @@ +package workspaces + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" + "github.com/aquasecurity/trivy/pkg/iac/providers/aws/workspaces" + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +func TestAdapt(t *testing.T) { + tests := []struct { + name string + source string + expected workspaces.WorkSpaces + }{ + { + name: "complete", + source: `AWSTemplateFormatVersion: '2010-09-09' +Resources: + MyWorkSpace: + Type: AWS::WorkSpaces::Workspace + Properties: + RootVolumeEncryptionEnabled: true + UserVolumeEncryptionEnabled: true +`, + expected: workspaces.WorkSpaces{ + WorkSpaces: []workspaces.WorkSpace{ + { + RootVolume: workspaces.Volume{ + Encryption: workspaces.Encryption{ + Enabled: types.BoolTest(true), + }, + }, + UserVolume: workspaces.Volume{ + Encryption: workspaces.Encryption{ + Enabled: types.BoolTest(true), + }, + }, + }, + }, + }, + }, + { + name: "empty", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + MyWorkSpace: + Type: AWS::WorkSpaces::Workspace + `, + expected: workspaces.WorkSpaces{ + WorkSpaces: []workspaces.WorkSpace{{}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testutil.AdaptAndCompare(t, tt.source, tt.expected, Adapt) + }) + } +} diff --git a/pkg/iac/adapters/cloudformation/testutil/testutil.go b/pkg/iac/adapters/cloudformation/testutil/testutil.go new file mode 100644 index 000000000000..f908519d4106 --- /dev/null +++ b/pkg/iac/adapters/cloudformation/testutil/testutil.go @@ -0,0 +1,25 @@ +package testutil + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/aquasecurity/trivy/internal/testutil" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" +) + +type adaptFn[T any] func(fctx parser.FileContext) T + +func AdaptAndCompare[T any](t *testing.T, source string, expected any, fn adaptFn[T]) { + fsys := testutil.CreateFS(t, map[string]string{ + "main.yaml": source, + }) + + fctx, err := parser.New().ParseFile(context.TODO(), fsys, "main.yaml") + require.NoError(t, err) + + adapted := fn(*fctx) + testutil.AssertDefsecEqual(t, expected, adapted) +} diff --git a/pkg/iac/providers/aws/ecs/ecs.go b/pkg/iac/providers/aws/ecs/ecs.go index 181c4a2ac90a..b0728c2bbf7f 100755 --- a/pkg/iac/providers/aws/ecs/ecs.go +++ b/pkg/iac/providers/aws/ecs/ecs.go @@ -87,9 +87,11 @@ func (j containerDefinitionJSON) convert(metadata iacTypes.Metadata) ContainerDe } type ContainerDefinition struct { - Metadata iacTypes.Metadata - Name iacTypes.StringValue - Image iacTypes.StringValue + Metadata iacTypes.Metadata + Name iacTypes.StringValue + Image iacTypes.StringValue + // TODO: CPU and Memory are strings + // https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecs-taskdefinition.html#cfn-ecs-taskdefinition-cpu CPU iacTypes.IntValue Memory iacTypes.IntValue Essential iacTypes.BoolValue diff --git a/pkg/iac/types/bool.go b/pkg/iac/types/bool.go index 66179d2c1e5e..c897206b8b19 100755 --- a/pkg/iac/types/bool.go +++ b/pkg/iac/types/bool.go @@ -45,6 +45,10 @@ func Bool(value bool, metadata Metadata) BoolValue { } } +func BoolTest(value bool) BoolValue { + return Bool(value, NewTestMetadata()) +} + func BoolDefault(value bool, metadata Metadata) BoolValue { b := Bool(value, metadata) b.BaseAttribute.metadata.isDefault = true diff --git a/pkg/iac/types/int.go b/pkg/iac/types/int.go index 551c2e34bdf6..34ffc05c49c3 100755 --- a/pkg/iac/types/int.go +++ b/pkg/iac/types/int.go @@ -45,6 +45,10 @@ func Int(value int, m Metadata) IntValue { } } +func IntTest(value int) IntValue { + return Int(value, NewTestMetadata()) +} + func IntFromInt32(value int32, m Metadata) IntValue { return Int(int(value), m) } diff --git a/pkg/iac/types/string.go b/pkg/iac/types/string.go index 8c04d967be0b..1db865740cd3 100755 --- a/pkg/iac/types/string.go +++ b/pkg/iac/types/string.go @@ -19,6 +19,7 @@ func String(str string, m Metadata) StringValue { BaseAttribute: BaseAttribute{metadata: m}, } } + func StringDefault(value string, m Metadata) StringValue { b := String(value, m) b.BaseAttribute.metadata.isDefault = true @@ -37,6 +38,10 @@ func StringExplicit(value string, m Metadata) StringValue { return b } +func StringTest(value string) StringValue { + return String(value, NewTestMetadata()) +} + type StringValueList []StringValue type StringValue struct { From 712dcd30077dfdf7a5449d635ee38fff5165c422 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Fri, 29 Mar 2024 04:10:06 +0300 Subject: [PATCH 24/57] fix(misconf): clear location URI for SARIF (#6405) --- pkg/report/sarif.go | 13 +++-- pkg/report/sarif_test.go | 123 ++++++++++++++++++++++++++++++++++++++- 2 files changed, 131 insertions(+), 5 deletions(-) diff --git a/pkg/report/sarif.go b/pkg/report/sarif.go index 25f1ba47bee5..2f9dd5891516 100644 --- a/pkg/report/sarif.go +++ b/pkg/report/sarif.go @@ -178,6 +178,7 @@ func (sw *SarifWriter) Write(ctx context.Context, report types.Report) error { }) } for _, misconf := range res.Misconfigurations { + locationURI := clearURI(res.Target) sw.addSarifResult(&sarifData{ title: "misconfiguration", vulnerabilityId: misconf.ID, @@ -185,8 +186,8 @@ func (sw *SarifWriter) Write(ctx context.Context, report types.Report) error { cvssScore: severityToScore(misconf.Severity), url: misconf.PrimaryURL, resourceClass: res.Class, - artifactLocation: target, - locationMessage: target, + artifactLocation: locationURI, + locationMessage: locationURI, locations: []location{ { startLine: misconf.CauseMetadata.StartLine, @@ -201,7 +202,7 @@ func (sw *SarifWriter) Write(ctx context.Context, report types.Report) error { helpMarkdown: fmt.Sprintf("**Misconfiguration %v**\n| Type | Severity | Check | Message | Link |\n| --- | --- | --- | --- | --- |\n|%v|%v|%v|%s|[%v](%v)|\n\n%v", misconf.ID, misconf.Type, misconf.Severity, misconf.Title, misconf.Message, misconf.ID, misconf.PrimaryURL, misconf.Description), message: fmt.Sprintf("Artifact: %v\nType: %v\nVulnerability %v\nSeverity: %v\nMessage: %v\nLink: [%v](%v)", - res.Target, res.Type, misconf.ID, misconf.Severity, misconf.Message, misconf.ID, misconf.PrimaryURL), + locationURI, res.Type, misconf.ID, misconf.Severity, misconf.Message, misconf.ID, misconf.PrimaryURL), }) } for _, secret := range res.Secrets { @@ -338,7 +339,11 @@ func ToPathUri(input string, resultClass types.ResultClass) string { input = ref.Context().RepositoryStr() } - return strings.ReplaceAll(strings.ReplaceAll(input, "\\", "/"), "git::https:/", "") + return clearURI(input) +} + +func clearURI(s string) string { + return strings.ReplaceAll(strings.ReplaceAll(s, "\\", "/"), "git::https:/", "") } func (sw *SarifWriter) getLocations(name, version, path string, pkgs []ftypes.Package) []location { diff --git a/pkg/report/sarif_test.go b/pkg/report/sarif_test.go index 9968fafbef34..fe46514002b6 100644 --- a/pkg/report/sarif_test.go +++ b/pkg/report/sarif_test.go @@ -2,6 +2,7 @@ package report_test import ( "bytes" + "context" "encoding/json" "testing" @@ -541,6 +542,126 @@ func TestReportWriter_Sarif(t *testing.T) { }, }, }, + { + name: "ref to github", + input: types.Report{ + Results: types.Results{ + { + Target: "git::https:/github.com/terraform-google-modules/terraform-google-kubernetes-engine?ref=c4809044b52b91505bfba5ef9f25526aa0361788/modules/workload-identity/main.tf", + Class: types.ClassConfig, + Type: ftypes.Terraform, + Misconfigurations: []types.DetectedMisconfiguration{ + { + Type: "Terraform Security Check", + ID: "AVD-GCP-0007", + AVDID: "AVD-GCP-0007", + Title: "Service accounts should not have roles assigned with excessive privileges", + Description: "Service accounts should have a minimal set of permissions assigned in order to do their job. They should never have excessive access as if compromised, an attacker can escalate privileges and take over the entire account.", + Message: "Service account is granted a privileged role.", + Query: "data..", + Resolution: "Limit service account access to minimal required set", + Severity: "HIGH", + PrimaryURL: "https://avd.aquasec.com/misconfig/avd-gcp-0007", + References: []string{ + "https://cloud.google.com/iam/docs/understanding-roles", + "https://avd.aquasec.com/misconfig/avd-gcp-0007", + }, + Status: "Fail", + CauseMetadata: ftypes.CauseMetadata{ + StartLine: 91, + EndLine: 91, + Occurrences: []ftypes.Occurrence{ + { + Resource: "google_project_iam_member.workload_identity_sa_bindings[\"roles/storage.admin\"]", + Filename: "git::https:/github.com/terraform-google-modules/terraform-google-kubernetes-engine?ref=c4809044b52b91505bfba5ef9f25526aa0361788/modules/workload-identity/main.tf", + Location: ftypes.Location{ + StartLine: 87, + EndLine: 93, + }, + }, + }, + }, + }, + }, + }, + }, + }, + want: &sarif.Report{ + Version: "2.1.0", + Schema: "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json", + Runs: []*sarif.Run{ + { + Tool: *sarif.NewTool( + &sarif.ToolComponent{ + FullName: lo.ToPtr("Trivy Vulnerability Scanner"), + Name: "Trivy", + Version: lo.ToPtr(""), + InformationURI: lo.ToPtr("https://github.com/aquasecurity/trivy"), + Rules: []*sarif.ReportingDescriptor{ + { + ID: "AVD-GCP-0007", + Name: lo.ToPtr("Misconfiguration"), + ShortDescription: sarif.NewMultiformatMessageString("Service accounts should not have roles assigned with excessive privileges"), + FullDescription: sarif.NewMultiformatMessageString("Service accounts should have a minimal set of permissions assigned in order to do their job. They should never have excessive access as if compromised, an attacker can escalate privileges and take over the entire account."), + DefaultConfiguration: &sarif.ReportingConfiguration{ + Level: "error", + }, + HelpURI: lo.ToPtr("https://avd.aquasec.com/misconfig/avd-gcp-0007"), + Help: &sarif.MultiformatMessageString{ + Text: lo.ToPtr("Misconfiguration AVD-GCP-0007\nType: Terraform Security Check\nSeverity: HIGH\nCheck: Service accounts should not have roles assigned with excessive privileges\nMessage: Service account is granted a privileged role.\nLink: [AVD-GCP-0007](https://avd.aquasec.com/misconfig/avd-gcp-0007)\nService accounts should have a minimal set of permissions assigned in order to do their job. They should never have excessive access as if compromised, an attacker can escalate privileges and take over the entire account."), + Markdown: lo.ToPtr("**Misconfiguration AVD-GCP-0007**\n| Type | Severity | Check | Message | Link |\n| --- | --- | --- | --- | --- |\n|Terraform Security Check|HIGH|Service accounts should not have roles assigned with excessive privileges|Service account is granted a privileged role.|[AVD-GCP-0007](https://avd.aquasec.com/misconfig/avd-gcp-0007)|\n\nService accounts should have a minimal set of permissions assigned in order to do their job. They should never have excessive access as if compromised, an attacker can escalate privileges and take over the entire account."), + }, + Properties: sarif.Properties{ + "tags": []interface{}{ + "misconfiguration", + "security", + "HIGH", + }, + "precision": "very-high", + "security-severity": "8.0", + }, + }, + }, + }, + ), + Results: []*sarif.Result{ + { + RuleID: lo.ToPtr("AVD-GCP-0007"), + RuleIndex: lo.ToPtr(uint(0)), + Level: lo.ToPtr("error"), + Message: *sarif.NewTextMessage("Artifact: github.com/terraform-google-modules/terraform-google-kubernetes-engine?ref=c4809044b52b91505bfba5ef9f25526aa0361788/modules/workload-identity/main.tf\nType: terraform\nVulnerability AVD-GCP-0007\nSeverity: HIGH\nMessage: Service account is granted a privileged role.\nLink: [AVD-GCP-0007](https://avd.aquasec.com/misconfig/avd-gcp-0007)"), + Locations: []*sarif.Location{ + { + PhysicalLocation: sarif.NewPhysicalLocation(). + WithArtifactLocation( + &sarif.ArtifactLocation{ + URI: lo.ToPtr("github.com/terraform-google-modules/terraform-google-kubernetes-engine?ref=c4809044b52b91505bfba5ef9f25526aa0361788/modules/workload-identity/main.tf"), + URIBaseId: lo.ToPtr("ROOTPATH"), + }, + ). + WithRegion( + &sarif.Region{ + StartLine: lo.ToPtr(91), + StartColumn: lo.ToPtr(1), + EndLine: lo.ToPtr(91), + EndColumn: lo.ToPtr(1), + }, + ), + Message: sarif.NewTextMessage("github.com/terraform-google-modules/terraform-google-kubernetes-engine?ref=c4809044b52b91505bfba5ef9f25526aa0361788/modules/workload-identity/main.tf"), + }, + }, + }, + }, + ColumnKind: "utf16CodeUnits", + OriginalUriBaseIDs: map[string]*sarif.ArtifactLocation{ + "ROOTPATH": { + URI: lo.ToPtr("file:///"), + }, + }, + }, + }, + }, + }, } for _, tt := range tests { @@ -549,7 +670,7 @@ func TestReportWriter_Sarif(t *testing.T) { w := report.SarifWriter{ Output: sarifWritten, } - err := w.Write(nil, tt.input) + err := w.Write(context.TODO(), tt.input) assert.NoError(t, err) result := &sarif.Report{} From 87a9aa60d13a7263e9fa4be01ec8693e17c9d4e3 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Fri, 29 Mar 2024 04:11:27 +0300 Subject: [PATCH 25/57] feat(aws): quiet flag support (#6331) --- pkg/cloud/aws/commands/run.go | 4 ---- pkg/cloud/aws/scanner/progress.go | 10 +++++++--- pkg/cloud/aws/scanner/scanner.go | 8 +++++--- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/pkg/cloud/aws/commands/run.go b/pkg/cloud/aws/commands/run.go index a4541e9f0544..23406aeafda5 100644 --- a/pkg/cloud/aws/commands/run.go +++ b/pkg/cloud/aws/commands/run.go @@ -132,10 +132,6 @@ func Run(ctx context.Context, opt flag.Options) error { ctx, cancel := context.WithTimeout(ctx, opt.GlobalOptions.Timeout) defer cancel() - if err := log.InitLogger(opt.Debug, false); err != nil { - return xerrors.Errorf("logger error: %w", err) - } - var err error defer func() { if errors.Is(err, context.DeadlineExceeded) { diff --git a/pkg/cloud/aws/scanner/progress.go b/pkg/cloud/aws/scanner/progress.go index 243e6eb47f39..a313dd482c6c 100644 --- a/pkg/cloud/aws/scanner/progress.go +++ b/pkg/cloud/aws/scanner/progress.go @@ -2,6 +2,7 @@ package scanner import ( "fmt" + "io" "os" "github.com/aquasecurity/loading/pkg/bar" @@ -12,15 +13,17 @@ type progressTracker struct { serviceTotal int serviceCurrent int isTTY bool + debugWriter io.Writer } -func newProgressTracker() *progressTracker { +func newProgressTracker(w io.Writer) *progressTracker { var isTTY bool if stat, err := os.Stdout.Stat(); err == nil { isTTY = stat.Mode()&os.ModeCharDevice == os.ModeCharDevice } return &progressTracker{ - isTTY: isTTY, + isTTY: isTTY, + debugWriter: w, } } @@ -69,7 +72,8 @@ func (m *progressTracker) StartService(name string) { if !m.isTTY { return } - fmt.Printf("[%d/%d] Scanning %s...\n", m.serviceCurrent+1, m.serviceTotal, name) + + fmt.Fprintf(m.debugWriter, "[%d/%d] Scanning %s...\n", m.serviceCurrent+1, m.serviceTotal, name) m.serviceBar = bar.New( bar.OptionHideOnFinish(true), bar.OptionWithAutoComplete(false), diff --git a/pkg/cloud/aws/scanner/scanner.go b/pkg/cloud/aws/scanner/scanner.go index d91721512505..97f544492747 100644 --- a/pkg/cloud/aws/scanner/scanner.go +++ b/pkg/cloud/aws/scanner/scanner.go @@ -31,9 +31,11 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result awsCache := cache.New(option.CacheDir, option.MaxCacheAge, option.Account, option.Region) included, missing := awsCache.ListServices(option.Services) + prefixedLogger := &log.PrefixedLogger{Name: "aws"} + var scannerOpts []options.ScannerOption if !option.NoProgress { - tracker := newProgressTracker() + tracker := newProgressTracker(prefixedLogger) defer tracker.Finish() scannerOpts = append(scannerOpts, aws.ScannerWithProgressTracker(tracker)) } @@ -43,11 +45,11 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result } if option.Debug { - scannerOpts = append(scannerOpts, options.ScannerWithDebug(&log.PrefixedLogger{Name: "aws"})) + scannerOpts = append(scannerOpts, options.ScannerWithDebug(prefixedLogger)) } if option.Trace { - scannerOpts = append(scannerOpts, options.ScannerWithTrace(&log.PrefixedLogger{Name: "aws"})) + scannerOpts = append(scannerOpts, options.ScannerWithTrace(prefixedLogger)) } if option.Region != "" { From 09e37b7c67664ca28923d392dc33fb1ca2600d35 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Fri, 29 Mar 2024 04:12:23 +0300 Subject: [PATCH 26/57] feat(aws): apply filter options to result (#6367) --- pkg/cloud/aws/commands/run_test.go | 81 ++++++++++++++++++++ pkg/cloud/aws/commands/testdata/.trivyignore | 8 ++ pkg/cloud/report/report.go | 10 ++- pkg/result/filter.go | 2 +- pkg/result/ignore.go | 2 +- 5 files changed, 97 insertions(+), 6 deletions(-) create mode 100644 pkg/cloud/aws/commands/testdata/.trivyignore diff --git a/pkg/cloud/aws/commands/run_test.go b/pkg/cloud/aws/commands/run_test.go index feacdcc5a762..fe25bf20098d 100644 --- a/pkg/cloud/aws/commands/run_test.go +++ b/pkg/cloud/aws/commands/run_test.go @@ -267,6 +267,63 @@ const expectedS3ScanResult = `{ } ` +const expectedS3ScanResultWithExceptions = `{ + "CreatedAt": "2021-08-25T12:20:30.000000005Z", + "ArtifactName": "12345678", + "ArtifactType": "aws_account", + "Metadata": { + "ImageConfig": { + "architecture": "", + "created": "0001-01-01T00:00:00Z", + "os": "", + "rootfs": { + "type": "", + "diff_ids": null + }, + "config": {} + } + }, + "Results": [ + { + "Target": "arn:aws:s3:::examplebucket", + "Class": "config", + "Type": "cloud", + "MisconfSummary": { + "Successes": 0, + "Failures": 1, + "Exceptions": 8 + }, + "Misconfigurations": [ + { + "Type": "AWS", + "ID": "AVD-AWS-0094", + "AVDID": "AVD-AWS-0094", + "Title": "S3 buckets should each define an aws_s3_bucket_public_access_block", + "Description": "The \"block public access\" settings in S3 override individual policies that apply to a given bucket, meaning that all public access can be controlled in one central types for that bucket. It is therefore good practice to define these settings for each bucket in order to clearly define the public access that can be allowed for it.", + "Message": "Bucket does not have a corresponding public access block.", + "Resolution": "Define a aws_s3_bucket_public_access_block for the given bucket to control public access policies", + "Severity": "LOW", + "PrimaryURL": "https://avd.aquasec.com/misconfig/avd-aws-0094", + "References": [ + "https://avd.aquasec.com/misconfig/avd-aws-0094" + ], + "Status": "FAIL", + "Layer": {}, + "CauseMetadata": { + "Resource": "arn:aws:s3:::examplebucket", + "Provider": "aws", + "Service": "s3", + "Code": { + "Lines": null + } + } + } + ] + } + ] +} +` + const expectedCustomScanResult = `{ "CreatedAt": "2021-08-25T12:20:30.000000005Z", "ArtifactName": "12345678", @@ -915,6 +972,7 @@ func Test_Run(t *testing.T) { regoPolicy string allServices []string inputData string + ignoreFile string }{ { name: "succeed with cached infra", @@ -1140,6 +1198,25 @@ Summary Report for compliance: my-custom-spec cacheContent: "testdata/s3andcloudtrailcache.json", expectErr: true, }, + { + name: "ignore findings with .trivyignore", + options: flag.Options{ + RegoOptions: flag.RegoOptions{SkipPolicyUpdate: true}, + AWSOptions: flag.AWSOptions{ + Region: "us-east-1", + Services: []string{"s3"}, + Account: "12345678", + }, + CloudOptions: flag.CloudOptions{ + MaxCacheAge: time.Hour * 24 * 365 * 100, + }, + MisconfOptions: flag.MisconfOptions{IncludeNonFailures: true}, + }, + cacheContent: "testdata/s3onlycache.json", + allServices: []string{"s3"}, + ignoreFile: "testdata/.trivyignore", + want: expectedS3ScanResultWithExceptions, + }, } ctx := clock.With(context.Background(), time.Date(2021, 8, 25, 12, 20, 30, 5, time.UTC)) @@ -1192,6 +1269,10 @@ Summary Report for compliance: my-custom-spec require.NoError(t, os.WriteFile(cacheFile, cacheData, 0600)) } + if test.ignoreFile != "" { + test.options.ReportOptions.IgnoreFile = test.ignoreFile + } + err := Run(ctx, test.options) if test.expectErr { assert.Error(t, err) diff --git a/pkg/cloud/aws/commands/testdata/.trivyignore b/pkg/cloud/aws/commands/testdata/.trivyignore new file mode 100644 index 000000000000..44ef395ee173 --- /dev/null +++ b/pkg/cloud/aws/commands/testdata/.trivyignore @@ -0,0 +1,8 @@ +AVD-AWS-0086 +AVD-AWS-0087 +AVD-AWS-0088 +AVD-AWS-0090 +AVD-AWS-0132 +AVD-AWS-0091 +AVD-AWS-0092 +AVD-AWS-0093 \ No newline at end of file diff --git a/pkg/cloud/report/report.go b/pkg/cloud/report/report.go index b2a9d50cf507..2b2f8f3f17ea 100644 --- a/pkg/cloud/report/report.go +++ b/pkg/cloud/report/report.go @@ -70,16 +70,18 @@ func Write(ctx context.Context, rep *Report, opt flag.Options, fromCache bool) e return writeCompliance(ctx, rep, opt, output) } + ignoreConf, err := result.ParseIgnoreFile(ctx, opt.IgnoreFile) + if err != nil { + return xerrors.Errorf("%s error: %w", opt.IgnoreFile, err) + } + var filtered []types.Result // filter results for _, resultsAtTime := range rep.Results { for _, res := range resultsAtTime.Results { resCopy := res - if err := result.FilterResult(ctx, &resCopy, result.IgnoreConfig{}, result.FilterOption{ - Severities: opt.Severities, - IncludeNonFailures: opt.IncludeNonFailures, - }); err != nil { + if err := result.FilterResult(ctx, &resCopy, ignoreConf, opt.FilterOpts()); err != nil { return err } sort.Slice(resCopy.Misconfigurations, func(i, j int) bool { diff --git a/pkg/result/filter.go b/pkg/result/filter.go index 6edcef72046a..dc92d8aff5ec 100644 --- a/pkg/result/filter.go +++ b/pkg/result/filter.go @@ -37,7 +37,7 @@ type FilterOption struct { // Filter filters out the report func Filter(ctx context.Context, report types.Report, opt FilterOption) error { - ignoreConf, err := parseIgnoreFile(ctx, opt.IgnoreFile) + ignoreConf, err := ParseIgnoreFile(ctx, opt.IgnoreFile) if err != nil { return xerrors.Errorf("%s error: %w", opt.IgnoreFile, err) } diff --git a/pkg/result/ignore.go b/pkg/result/ignore.go index 58e7e3c109a8..25f7d03837d7 100644 --- a/pkg/result/ignore.go +++ b/pkg/result/ignore.go @@ -181,7 +181,7 @@ func (c *IgnoreConfig) MatchLicense(licenseID, filePath string) *IgnoreFinding { return c.Licenses.Match(licenseID, filePath, nil) } -func parseIgnoreFile(ctx context.Context, ignoreFile string) (IgnoreConfig, error) { +func ParseIgnoreFile(ctx context.Context, ignoreFile string) (IgnoreConfig, error) { var conf IgnoreConfig if _, err := os.Stat(ignoreFile); errors.Is(err, fs.ErrNotExist) { // .trivyignore doesn't necessarily exist From 1a67472d2bd6efaf0d0698365d877145f8bc7551 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Fri, 29 Mar 2024 06:55:00 +0300 Subject: [PATCH 27/57] perf(helm): load in-memory files (#6383) --- pkg/iac/scanners/helm/parser/parser.go | 58 +++++++++-------------- pkg/iac/scanners/helm/test/parser_test.go | 5 +- 2 files changed, 24 insertions(+), 39 deletions(-) diff --git a/pkg/iac/scanners/helm/parser/parser.go b/pkg/iac/scanners/helm/parser/parser.go index 20228258e5d3..3123b04e4b9c 100644 --- a/pkg/iac/scanners/helm/parser/parser.go +++ b/pkg/iac/scanners/helm/parser/parser.go @@ -7,7 +7,6 @@ import ( "fmt" "io" "io/fs" - "os" "path/filepath" "regexp" "sort" @@ -192,17 +191,7 @@ func (p *Parser) extractChartName(chartPath string) error { } func (p *Parser) RenderedChartFiles() ([]ChartFile, error) { - - tempDir, err := os.MkdirTemp(os.TempDir(), "defsec") - if err != nil { - return nil, err - } - - if err := p.writeBuildFiles(tempDir); err != nil { - return nil, err - } - - workingChart, err := loadChart(tempDir) + workingChart, err := p.loadChart() if err != nil { return nil, err } @@ -246,19 +235,36 @@ func (p *Parser) getRelease(chrt *chart.Chart) (*release.Release, error) { return r, nil } -func loadChart(tempFs string) (*chart.Chart, error) { - loadedChart, err := loader.Load(tempFs) +func (p *Parser) loadChart() (*chart.Chart, error) { + + var files []*loader.BufferedFile + + for _, filePath := range p.filepaths { + b, err := fs.ReadFile(p.workingFS, filePath) + if err != nil { + return nil, err + } + + filePath = strings.TrimPrefix(filePath, p.rootPath+"/") + filePath = filepath.ToSlash(filePath) + files = append(files, &loader.BufferedFile{ + Name: filePath, + Data: b, + }) + } + + c, err := loader.LoadFiles(files) if err != nil { return nil, err } - if req := loadedChart.Metadata.Dependencies; req != nil { - if err := action.CheckDependencies(loadedChart, req); err != nil { + if req := c.Metadata.Dependencies; req != nil { + if err := action.CheckDependencies(c, req); err != nil { return nil, err } } - return loadedChart, nil + return c, nil } func (*Parser) getRenderedManifests(manifestsKeys []string, splitManifests map[string]string) []ChartFile { @@ -290,24 +296,6 @@ func getManifestPath(manifest string) string { return manifestFilePathParts[0] } -func (p *Parser) writeBuildFiles(tempFs string) error { - for _, path := range p.filepaths { - content, err := fs.ReadFile(p.workingFS, path) - if err != nil { - return err - } - workingPath := strings.TrimPrefix(path, p.rootPath) - workingPath = filepath.Join(tempFs, workingPath) - if err := os.MkdirAll(filepath.Dir(workingPath), os.ModePerm); err != nil { - return err - } - if err := os.WriteFile(workingPath, content, os.ModePerm); err != nil { - return err - } - } - return nil -} - func (p *Parser) required(path string, workingFS fs.FS) bool { if p.skipRequired { return true diff --git a/pkg/iac/scanners/helm/test/parser_test.go b/pkg/iac/scanners/helm/test/parser_test.go index 6d9f5ad0cff3..0d12f33fe827 100644 --- a/pkg/iac/scanners/helm/test/parser_test.go +++ b/pkg/iac/scanners/helm/test/parser_test.go @@ -32,11 +32,8 @@ func Test_helm_parser(t *testing.T) { for _, test := range tests { t.Run(test.testName, func(t *testing.T) { chartName := test.chartName - - t.Logf("Running test: %s", test.testName) - helmParser := parser.New(chartName) - err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + err := helmParser.ParseFS(context.TODO(), os.DirFS("testdata"), chartName) require.NoError(t, err) manifests, err := helmParser.RenderedChartFiles() require.NoError(t, err) From 29dee32814729f8ba2382f975582d1dbd092cf5c Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Fri, 29 Mar 2024 06:55:18 +0300 Subject: [PATCH 28/57] feat(terraform): ignore resources by nested attributes (#6302) --- docs/docs/scanner/misconfiguration/index.md | 17 +- pkg/iac/scanners/terraform/ignore_test.go | 458 +++++++++++++++----- pkg/iac/terraform/ignore.go | 2 +- 3 files changed, 354 insertions(+), 123 deletions(-) diff --git a/docs/docs/scanner/misconfiguration/index.md b/docs/docs/scanner/misconfiguration/index.md index b1107a530718..6cf4473d6652 100644 --- a/docs/docs/scanner/misconfiguration/index.md +++ b/docs/docs/scanner/misconfiguration/index.md @@ -494,8 +494,21 @@ resource "aws_security_group_rule" "example" { } ``` -!!! note - Currently nested attributes are not supported. For example you will not be able to reference the `each.key` attribute. +Checks can also be ignored by nested attributes, but certain restrictions apply: + +- You cannot access an individual block using indexes, for example when working with dynamic blocks. +- Special variables like [each](https://developer.hashicorp.com/terraform/language/meta-arguments/for_each#the-each-object) and [count](https://developer.hashicorp.com/terraform/language/meta-arguments/count#the-count-object) cannot be accessed. + +```tf +#trivy:ignore:*[logging_config.prefix=myprefix] +resource "aws_cloudfront_distribution" "example" { + logging_config { + include_cookies = false + bucket = "mylogs.s3.amazonaws.com" + prefix = "myprefix" + } +} +``` #### Ignoring module issues diff --git a/pkg/iac/scanners/terraform/ignore_test.go b/pkg/iac/scanners/terraform/ignore_test.go index 6e561d256653..0e8c0c8bfdd5 100644 --- a/pkg/iac/scanners/terraform/ignore_test.go +++ b/pkg/iac/scanners/terraform/ignore_test.go @@ -24,12 +24,18 @@ var exampleRule = scan.Rule{ Terraform: &scan.TerraformCustomCheck{ RequiredLabels: []string{"bad"}, Check: func(resourceBlock *terraform.Block, _ *terraform.Module) (results scan.Results) { - attr := resourceBlock.GetAttribute("secure") - if attr.IsNil() { - results.Add("example problem", resourceBlock) - } - if attr.IsFalse() { - results.Add("example problem", attr) + if attr, _ := resourceBlock.GetNestedAttribute("secure_settings.enabled"); attr.IsNotNil() { + if attr.IsFalse() { + results.Add("example problem", attr) + } + } else { + attr := resourceBlock.GetAttribute("secure") + if attr.IsNil() { + results.Add("example problem", resourceBlock) + } + if attr.IsFalse() { + results.Add("example problem", attr) + } } return }, @@ -44,58 +50,92 @@ func Test_IgnoreAll(t *testing.T) { inputOptions string assertLength int }{ - {name: "IgnoreAll", inputOptions: ` + { + name: "IgnoreAll", + inputOptions: ` resource "bad" "my-rule" { secure = false // tfsec:ignore:* } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheBlock", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheBlock", + inputOptions: ` // tfsec:ignore:* resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheBlockMatchingParamBool", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheBlockMatchingParamBool", + inputOptions: ` // tfsec:ignore:*[secure=false] resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheBlockNotMatchingParamBool", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheBlockNotMatchingParamBool", + inputOptions: ` // tfsec:ignore:*[secure=true] resource "bad" "my-rule" { secure = false } -`, assertLength: 1}, - {name: "IgnoreLineAboveTheBlockMatchingParamString", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreLineAboveTheBlockMatchingParamString", + inputOptions: ` // tfsec:ignore:*[name=myrule] resource "bad" "my-rule" { name = "myrule" secure = false } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheBlockNotMatchingParamString", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheBlockNotMatchingParamString", + inputOptions: ` // tfsec:ignore:*[name=myrule2] resource "bad" "my-rule" { name = "myrule" secure = false } -`, assertLength: 1}, - {name: "IgnoreLineAboveTheBlockMatchingParamInt", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreLineAboveTheBlockMatchingParamInt", + inputOptions: ` // tfsec:ignore:*[port=123] resource "bad" "my-rule" { secure = false port = 123 } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheBlockNotMatchingParamInt", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheBlockNotMatchingParamInt", + inputOptions: ` // tfsec:ignore:*[port=456] resource "bad" "my-rule" { secure = false port = 123 } -`, assertLength: 1}, - {name: "IgnoreLineStackedAboveTheBlock", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreLineStackedAboveTheBlock", + inputOptions: ` // tfsec:ignore:* // tfsec:ignore:a // tfsec:ignore:b @@ -104,8 +144,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "IgnoreLineStackedAboveTheBlockWithoutMatch", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineStackedAboveTheBlockWithoutMatch", + inputOptions: ` #tfsec:ignore:* #tfsec:ignore:x @@ -116,8 +160,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 1}, - {name: "IgnoreLineStackedAboveTheBlockWithHashesWithoutSpaces", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreLineStackedAboveTheBlockWithHashesWithoutSpaces", + inputOptions: ` #tfsec:ignore:* #tfsec:ignore:a #tfsec:ignore:b @@ -126,8 +174,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "IgnoreLineStackedAboveTheBlockWithoutSpaces", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineStackedAboveTheBlockWithoutSpaces", + inputOptions: ` //tfsec:ignore:* //tfsec:ignore:a //tfsec:ignore:b @@ -136,135 +188,261 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "IgnoreLineAboveTheLine", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreLineAboveTheLine", + inputOptions: ` resource "bad" "my-rule" { # tfsec:ignore:aws-service-abc123 secure = false } -`, assertLength: 0}, - {name: "IgnoreWithExpDateIfDateBreachedThenDontIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreWithExpDateIfDateBreachedThenDontIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # tfsec:ignore:aws-service-abc123:exp:2000-01-02 } -`, assertLength: 1}, - {name: "IgnoreWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreWithExpDateIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # tfsec:ignore:aws-service-abc123:exp:2221-01-02 } -`, assertLength: 0}, - {name: "IgnoreWithExpDateIfDateInvalidThenDropTheIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreWithExpDateIfDateInvalidThenDropTheIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # tfsec:ignore:aws-service-abc123:exp:2221-13-02 } -`, assertLength: 1}, - {name: "IgnoreAboveResourceBlockWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "IgnoreAboveResourceBlockWithExpDateIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` #tfsec:ignore:aws-service-abc123:exp:2221-01-02 resource "bad" "my-rule" { } -`, assertLength: 0}, - {name: "IgnoreAboveResourceBlockWithExpDateAndMultipleIgnoresIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreAboveResourceBlockWithExpDateAndMultipleIgnoresIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` # tfsec:ignore:aws-service-abc123:exp:2221-01-02 resource "bad" "my-rule" { } -`, assertLength: 0}, - {name: "IgnoreForImpliedIAMResource", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "IgnoreForImpliedIAMResource", + inputOptions: ` terraform { -required_version = "~> 1.1.6" + required_version = "~> 1.1.6" -required_providers { -aws = { -source = "hashicorp/aws" -version = "~> 3.48" -} -} + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 3.48" + } + } } # Retrieve an IAM group defined outside of this Terraform config. # tfsec:ignore:aws-iam-enforce-mfa data "aws_iam_group" "externally_defined_group" { -group_name = "group-name" # tfsec:ignore:aws-iam-enforce-mfa + group_name = "group-name" # tfsec:ignore:aws-iam-enforce-mfa } # Create an IAM policy and attach it to the group. # tfsec:ignore:aws-iam-enforce-mfa resource "aws_iam_policy" "test_policy" { -name = "test-policy" # tfsec:ignore:aws-iam-enforce-mfa -policy = data.aws_iam_policy_document.test_policy.json # tfsec:ignore:aws-iam-enforce-mfa + name = "test-policy" # tfsec:ignore:aws-iam-enforce-mfa + policy = data.aws_iam_policy_document.test_policy.json # tfsec:ignore:aws-iam-enforce-mfa } # tfsec:ignore:aws-iam-enforce-mfa resource "aws_iam_group_policy_attachment" "test_policy_attachment" { -group = data.aws_iam_group.externally_defined_group.group_name # tfsec:ignore:aws-iam-enforce-mfa -policy_arn = aws_iam_policy.test_policy.arn # tfsec:ignore:aws-iam-enforce-mfa + group = data.aws_iam_group.externally_defined_group.group_name # tfsec:ignore:aws-iam-enforce-mfa + policy_arn = aws_iam_policy.test_policy.arn # tfsec:ignore:aws-iam-enforce-mfa } # tfsec:ignore:aws-iam-enforce-mfa data "aws_iam_policy_document" "test_policy" { -statement { -sid = "PublishToCloudWatch" # tfsec:ignore:aws-iam-enforce-mfa -actions = [ -"cloudwatch:PutMetricData", # tfsec:ignore:aws-iam-enforce-mfa -] -resources = ["*"] # tfsec:ignore:aws-iam-enforce-mfa -} -} -`, assertLength: 0}, - {name: "TrivyIgnoreAll", inputOptions: ` + statement { + sid = "PublishToCloudWatch" # tfsec:ignore:aws-iam-enforce-mfa + actions = [ + "cloudwatch:PutMetricData", # tfsec:ignore:aws-iam-enforce-mfa + ] + resources = ["*"] # tfsec:ignore:aws-iam-enforce-mfa + } +} +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreAll", + inputOptions: ` resource "bad" "my-rule" { secure = false // trivy:ignore:* } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheBlock", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheBlock", + inputOptions: ` // trivy:ignore:* resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheBlockMatchingParamBool", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheBlockMatchingParamBool", + inputOptions: ` // trivy:ignore:*[secure=false] resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamBool", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamBool", + inputOptions: ` // trivy:ignore:*[secure=true] resource "bad" "my-rule" { secure = false } -`, assertLength: 1}, - {name: "TrivyIgnoreLineAboveTheBlockMatchingParamString", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "TrivyIgnoreLineAboveTheBlockMatchingParamString", + inputOptions: ` // trivy:ignore:*[name=myrule] resource "bad" "my-rule" { name = "myrule" secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamString", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamString", + inputOptions: ` // trivy:ignore:*[name=myrule2] resource "bad" "my-rule" { name = "myrule" secure = false } -`, assertLength: 1}, - {name: "TrivyIgnoreLineAboveTheBlockMatchingParamInt", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "TrivyIgnoreLineAboveTheBlockMatchingParamInt", + inputOptions: ` // trivy:ignore:*[port=123] resource "bad" "my-rule" { secure = false port = 123 } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamInt", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheBlockNotMatchingParamInt", + inputOptions: ` // trivy:ignore:*[port=456] resource "bad" "my-rule" { secure = false port = 123 } -`, assertLength: 1}, - {name: "TrivyIgnoreLineStackedAboveTheBlock", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "ignore by nested attribute", + inputOptions: ` +// trivy:ignore:*[secure_settings.enabled=false] +resource "bad" "my-rule" { + secure_settings { + enabled = false + } +} +`, + assertLength: 0, + }, + { + name: "ignore by nested attribute of another type", + inputOptions: ` +// trivy:ignore:*[secure_settings.enabled=1] +resource "bad" "my-rule" { + secure_settings { + enabled = false + } +} +`, + assertLength: 1, + }, + { + name: "ignore by non-existent nested attribute", + inputOptions: ` +// trivy:ignore:*[secure_settings.rule=myrule] +resource "bad" "my-rule" { + secure_settings { + enabled = false + } +} +`, + assertLength: 1, + }, + { + name: "ignore resource with `for_each` meta-argument", + inputOptions: ` +// trivy:ignore:*[secure=false] +resource "bad" "my-rule" { + for_each = toset(["false", "true", "false"]) + secure = each.key +} +`, + assertLength: 0, + }, + { + name: "ignore by dynamic block value", + inputOptions: ` +// trivy:ignore:*[secure_settings.enabled=false] +resource "bad" "my-rule" { + dynamic "secure_settings" { + for_each = ["false", "true"] + content { + enabled = secure_settings.value + } + } +} +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineStackedAboveTheBlock", + inputOptions: ` // trivy:ignore:* // trivy:ignore:a // trivy:ignore:b @@ -273,8 +451,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineStackedAboveTheBlockWithoutMatch", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineStackedAboveTheBlockWithoutMatch", + inputOptions: ` #trivy:ignore:* #trivy:ignore:x @@ -285,8 +467,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 1}, - {name: "TrivyIgnoreLineStackedAboveTheBlockWithHashesWithoutSpaces", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "TrivyIgnoreLineStackedAboveTheBlockWithHashesWithoutSpaces", + inputOptions: ` #trivy:ignore:* #trivy:ignore:a #trivy:ignore:b @@ -295,8 +481,12 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineStackedAboveTheBlockWithoutSpaces", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineStackedAboveTheBlockWithoutSpaces", + inputOptions: ` //trivy:ignore:* //trivy:ignore:a //trivy:ignore:b @@ -305,81 +495,109 @@ resource "bad" "my-rule" { resource "bad" "my-rule" { secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreLineAboveTheLine", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreLineAboveTheLine", + inputOptions: ` resource "bad" "my-rule" { # trivy:ignore:aws-service-abc123 secure = false } -`, assertLength: 0}, - {name: "TrivyIgnoreWithExpDateIfDateBreachedThenDontIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreWithExpDateIfDateBreachedThenDontIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # trivy:ignore:aws-service-abc123:exp:2000-01-02 } -`, assertLength: 1}, - {name: "TrivyIgnoreWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "TrivyIgnoreWithExpDateIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # trivy:ignore:aws-service-abc123:exp:2221-01-02 } -`, assertLength: 0}, - {name: "TrivyIgnoreWithExpDateIfDateInvalidThenDropTheIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreWithExpDateIfDateInvalidThenDropTheIgnore", + inputOptions: ` resource "bad" "my-rule" { secure = false # trivy:ignore:aws-service-abc123:exp:2221-13-02 } -`, assertLength: 1}, - {name: "TrivyIgnoreAboveResourceBlockWithExpDateIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 1, + }, + { + name: "TrivyIgnoreAboveResourceBlockWithExpDateIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` #trivy:ignore:aws-service-abc123:exp:2221-01-02 resource "bad" "my-rule" { } -`, assertLength: 0}, - {name: "TrivyIgnoreAboveResourceBlockWithExpDateAndMultipleIgnoresIfDateNotBreachedThenIgnoreIgnore", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreAboveResourceBlockWithExpDateAndMultipleIgnoresIfDateNotBreachedThenIgnoreIgnore", + inputOptions: ` # trivy:ignore:aws-service-abc123:exp:2221-01-02 resource "bad" "my-rule" { } -`, assertLength: 0}, - {name: "TrivyIgnoreForImpliedIAMResource", inputOptions: ` +`, + assertLength: 0, + }, + { + name: "TrivyIgnoreForImpliedIAMResource", + inputOptions: ` terraform { -required_version = "~> 1.1.6" + required_version = "~> 1.1.6" -required_providers { -aws = { -source = "hashicorp/aws" -version = "~> 3.48" -} -} + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 3.48" + } + } } # Retrieve an IAM group defined outside of this Terraform config. # trivy:ignore:aws-iam-enforce-mfa data "aws_iam_group" "externally_defined_group" { -group_name = "group-name" # trivy:ignore:aws-iam-enforce-mfa + group_name = "group-name" # trivy:ignore:aws-iam-enforce-mfa } # Create an IAM policy and attach it to the group. # trivy:ignore:aws-iam-enforce-mfa resource "aws_iam_policy" "test_policy" { -name = "test-policy" # trivy:ignore:aws-iam-enforce-mfa -policy = data.aws_iam_policy_document.test_policy.json # trivy:ignore:aws-iam-enforce-mfa + name = "test-policy" # trivy:ignore:aws-iam-enforce-mfa + policy = data.aws_iam_policy_document.test_policy.json # trivy:ignore:aws-iam-enforce-mfa } # trivy:ignore:aws-iam-enforce-mfa resource "aws_iam_group_policy_attachment" "test_policy_attachment" { -group = data.aws_iam_group.externally_defined_group.group_name # trivy:ignore:aws-iam-enforce-mfa -policy_arn = aws_iam_policy.test_policy.arn # trivy:ignore:aws-iam-enforce-mfa + group = data.aws_iam_group.externally_defined_group.group_name # trivy:ignore:aws-iam-enforce-mfa + policy_arn = aws_iam_policy.test_policy.arn # trivy:ignore:aws-iam-enforce-mfa } # trivy:ignore:aws-iam-enforce-mfa data "aws_iam_policy_document" "test_policy" { -statement { -sid = "PublishToCloudWatch" # trivy:ignore:aws-iam-enforce-mfa -actions = [ -"cloudwatch:PutMetricData", # trivy:ignore:aws-iam-enforce-mfa -] -resources = ["*"] # trivy:ignore:aws-iam-enforce-mfa -} + statement { + sid = "PublishToCloudWatch" # trivy:ignore:aws-iam-enforce-mfa + actions = [ + "cloudwatch:PutMetricData", # trivy:ignore:aws-iam-enforce-mfa + ] + resources = ["*"] # trivy:ignore:aws-iam-enforce-mfa + } } `, assertLength: 0}} diff --git a/pkg/iac/terraform/ignore.go b/pkg/iac/terraform/ignore.go index e52fbf202be5..69e0341ed4be 100644 --- a/pkg/iac/terraform/ignore.go +++ b/pkg/iac/terraform/ignore.go @@ -71,7 +71,7 @@ func (ignore Ignore) MatchParams(modules Modules, blockMetadata *iacTypes.Metada return true } for key, val := range ignore.Params { - attr := block.GetAttribute(key) + attr, _ := block.GetNestedAttribute(key) if attr.IsNil() || !attr.Value().IsKnown() { return false } From df024e88ddccc0bd9158e7a4a553983438399826 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Fri, 29 Mar 2024 08:23:01 +0300 Subject: [PATCH 29/57] feat(cloudformation): inline ignore support for YAML templates (#6358) --- docs/docs/scanner/misconfiguration/index.md | 13 +- pkg/iac/ignore/parse.go | 168 ++++++++++++ pkg/iac/ignore/rule.go | 98 +++++++ pkg/iac/ignore/rule_test.go | 253 ++++++++++++++++++ pkg/iac/scan/code_test.go | 6 +- pkg/iac/scan/result.go | 17 ++ .../cloudformation/parser/file_context.go | 2 + .../scanners/cloudformation/parser/parser.go | 11 +- .../cloudformation/parser/property.go | 15 +- .../cloudformation/parser/reference.go | 37 --- pkg/iac/scanners/cloudformation/scanner.go | 24 +- .../scanners/cloudformation/scanner_test.go | 128 +++++++++ .../scanners/terraform/executor/executor.go | 172 ++++++------ pkg/iac/scanners/terraform/executor/option.go | 12 - pkg/iac/scanners/terraform/options.go | 16 -- .../scanners/terraform/parser/evaluator.go | 5 +- .../scanners/terraform/parser/load_blocks.go | 131 --------- .../terraform/parser/load_blocks_test.go | 13 - pkg/iac/scanners/terraform/parser/parser.go | 59 +++- pkg/iac/scanners/terraform/scanner_test.go | 36 --- pkg/iac/terraform/ignore.go | 100 ------- pkg/iac/terraform/module.go | 8 +- 22 files changed, 856 insertions(+), 468 deletions(-) create mode 100644 pkg/iac/ignore/parse.go create mode 100644 pkg/iac/ignore/rule.go create mode 100644 pkg/iac/ignore/rule_test.go delete mode 100644 pkg/iac/scanners/terraform/parser/load_blocks.go delete mode 100644 pkg/iac/scanners/terraform/parser/load_blocks_test.go delete mode 100644 pkg/iac/terraform/ignore.go diff --git a/docs/docs/scanner/misconfiguration/index.md b/docs/docs/scanner/misconfiguration/index.md index 6cf4473d6652..f76dc9392363 100644 --- a/docs/docs/scanner/misconfiguration/index.md +++ b/docs/docs/scanner/misconfiguration/index.md @@ -381,7 +381,7 @@ If multiple variables evaluate to the same hostname, Trivy will choose the envir ### Skipping resources by inline comments -Trivy supports ignoring misconfigured resources by inline comments for Terraform configuration files only. +Trivy supports ignoring misconfigured resources by inline comments for Terraform and CloudFormation configuration files only. In cases where Trivy can detect comments of a specific format immediately adjacent to resource definitions, it is possible to ignore findings from a single source of resource definition (in contrast to `.trivyignore`, which has a directory-wide scope on all of the files scanned). The format for these comments is `trivy:ignore:` immediately following the format-specific line-comment [token](https://developer.hashicorp.com/terraform/language/syntax/configuration#comments). @@ -422,6 +422,17 @@ As an example, consider the following check metadata: Long ID would look like the following: `aws-s3-enable-logging`. +Example for CloudFromation: +```yaml +AWSTemplateFormatVersion: "2010-09-09" +Resources: +#trivy:ignore:* + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: test-bucket +``` + #### Expiration Date You can specify the expiration date of the ignore rule in `yyyy-mm-dd` format. This is a useful feature when you want to make sure that an ignored issue is not forgotten and worth revisiting in the future. For example: diff --git a/pkg/iac/ignore/parse.go b/pkg/iac/ignore/parse.go new file mode 100644 index 000000000000..075f1f621203 --- /dev/null +++ b/pkg/iac/ignore/parse.go @@ -0,0 +1,168 @@ +package ignore + +import ( + "errors" + "strings" + "time" + + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/aquasecurity/trivy/pkg/log" +) + +// RuleSectionParser defines the interface for parsing ignore rules. +type RuleSectionParser interface { + Key() string + Parse(string) bool + Param() any +} + +// Parse parses the configuration file and returns the Rules +func Parse(src, path string, parsers ...RuleSectionParser) Rules { + var rules Rules + for i, line := range strings.Split(src, "\n") { + line = strings.TrimSpace(line) + rng := types.NewRange(path, i+1, i+1, "", nil) + lineIgnores := parseLine(line, rng, parsers) + for _, lineIgnore := range lineIgnores { + rules = append(rules, lineIgnore) + } + } + + rules.shift() + + return rules +} + +func parseLine(line string, rng types.Range, parsers []RuleSectionParser) []Rule { + var rules []Rule + + sections := strings.Split(strings.TrimSpace(line), " ") + for _, section := range sections { + section := strings.TrimSpace(section) + section = strings.TrimLeftFunc(section, func(r rune) bool { + return r == '#' || r == '/' || r == '*' + }) + + section, exists := hasIgnoreRulePrefix(section) + if !exists { + continue + } + + rule, err := parseComment(section, rng, parsers) + if err != nil { + log.Logger.Debugf("Failed to parse rule at %s: %s", rng.String(), err.Error()) + continue + } + rules = append(rules, rule) + } + + return rules +} + +func hasIgnoreRulePrefix(s string) (string, bool) { + for _, prefix := range []string{"tfsec:", "trivy:"} { + if after, found := strings.CutPrefix(s, prefix); found { + return after, true + } + } + + return "", false +} + +func parseComment(input string, rng types.Range, parsers []RuleSectionParser) (Rule, error) { + rule := Rule{ + rng: rng, + sections: make(map[string]any), + } + + parsers = append(parsers, &expiryDateParser{ + rng: rng, + }) + + segments := strings.Split(input, ":") + + for i := 0; i < len(segments)-1; i += 2 { + key := segments[i] + val := segments[i+1] + if key == "ignore" { + // special case, because id and parameters are in the same section + idParser := &checkIDParser{ + StringMatchParser{SectionKey: "id"}, + } + if idParser.Parse(val) { + rule.sections[idParser.Key()] = idParser.Param() + } + } + + for _, parser := range parsers { + if parser.Key() != key { + continue + } + + if parser.Parse(val) { + rule.sections[parser.Key()] = parser.Param() + } + } + } + + if _, exists := rule.sections["id"]; !exists { + return Rule{}, errors.New("rule section with the `ignore` key is required") + } + + return rule, nil +} + +type StringMatchParser struct { + SectionKey string + param string +} + +func (s *StringMatchParser) Key() string { + return s.SectionKey +} + +func (s *StringMatchParser) Parse(str string) bool { + s.param = str + return str != "" +} + +func (s *StringMatchParser) Param() any { + return s.param +} + +type checkIDParser struct { + StringMatchParser +} + +func (s *checkIDParser) Parse(str string) bool { + if idx := strings.Index(str, "["); idx != -1 { + str = str[:idx] + } + return s.StringMatchParser.Parse(str) +} + +type expiryDateParser struct { + rng types.Range + expiry time.Time +} + +func (s *expiryDateParser) Key() string { + return "exp" +} + +func (s *expiryDateParser) Parse(str string) bool { + parsed, err := time.Parse("2006-01-02", str) + if err != nil { + log.Logger.Debugf("Incorrect time to ignore is specified: %s", str) + parsed = time.Time{} + } else if time.Now().After(parsed) { + log.Logger.Debug("Ignore rule time has expired for location: %s", s.rng.String()) + } + + s.expiry = parsed + return true +} + +func (s *expiryDateParser) Param() any { + return s.expiry +} diff --git a/pkg/iac/ignore/rule.go b/pkg/iac/ignore/rule.go new file mode 100644 index 000000000000..d81f17576915 --- /dev/null +++ b/pkg/iac/ignore/rule.go @@ -0,0 +1,98 @@ +package ignore + +import ( + "slices" + "time" + + "github.com/samber/lo" + + "github.com/aquasecurity/trivy/pkg/iac/types" +) + +// Ignorer represents a function that checks if the rule should be ignored. +type Ignorer func(resultMeta types.Metadata, param any) bool + +type Rules []Rule + +// Ignore checks if the rule should be ignored based on provided metadata, IDs, and ignorer functions. +func (r Rules) Ignore(m types.Metadata, ids []string, ignorers map[string]Ignorer) bool { + return slices.ContainsFunc(r, func(r Rule) bool { + return r.ignore(m, ids, ignorers) + }) +} + +func (r Rules) shift() { + var ( + currentRange *types.Range + offset int + ) + + for i := len(r) - 1; i > 0; i-- { + currentIgnore, nextIgnore := r[i], r[i-1] + if currentRange == nil { + currentRange = ¤tIgnore.rng + } + if nextIgnore.rng.GetStartLine()+1+offset == currentIgnore.rng.GetStartLine() { + r[i-1].rng = *currentRange + offset++ + } else { + currentRange = nil + offset = 0 + } + } +} + +// Rule represents a rule for ignoring vulnerabilities. +type Rule struct { + rng types.Range + sections map[string]any +} + +func (r Rule) ignore(m types.Metadata, ids []string, ignorers map[string]Ignorer) bool { + matchMeta, ok := r.matchRange(&m) + if !ok { + return false + } + + ignorers = lo.Assign(defaultIgnorers(ids), ignorers) + + for ignoreID, ignore := range ignorers { + if param, exists := r.sections[ignoreID]; exists { + if !ignore(*matchMeta, param) { + return false + } + } + } + + return true +} + +func (r Rule) matchRange(m *types.Metadata) (*types.Metadata, bool) { + metaHierarchy := m + for metaHierarchy != nil { + if r.rng.GetFilename() != metaHierarchy.Range().GetFilename() { + metaHierarchy = metaHierarchy.Parent() + continue + } + if metaHierarchy.Range().GetStartLine() == r.rng.GetStartLine()+1 || + metaHierarchy.Range().GetStartLine() == r.rng.GetStartLine() { + return metaHierarchy, true + } + metaHierarchy = metaHierarchy.Parent() + } + + return nil, false +} + +func defaultIgnorers(ids []string) map[string]Ignorer { + return map[string]Ignorer{ + "id": func(_ types.Metadata, param any) bool { + id, ok := param.(string) + return ok && (id == "*" || len(ids) == 0 || slices.Contains(ids, id)) + }, + "exp": func(_ types.Metadata, param any) bool { + expiry, ok := param.(time.Time) + return ok && time.Now().Before(expiry) + }, + } +} diff --git a/pkg/iac/ignore/rule_test.go b/pkg/iac/ignore/rule_test.go new file mode 100644 index 000000000000..6b35e52efe43 --- /dev/null +++ b/pkg/iac/ignore/rule_test.go @@ -0,0 +1,253 @@ +package ignore_test + +import ( + "testing" + + "github.com/aquasecurity/trivy/pkg/iac/ignore" + "github.com/aquasecurity/trivy/pkg/iac/types" + "github.com/stretchr/testify/assert" +) + +func metadataWithLine(path string, line int) types.Metadata { + return types.NewMetadata(types.NewRange(path, line, line, "", nil), "") +} + +func TestRules_Ignore(t *testing.T) { + + const filename = "test" + + type args struct { + metadata types.Metadata + ids []string + } + + tests := []struct { + name string + src string + args args + shouldIgnore bool + }{ + { + name: "no ignore", + src: `#test`, + shouldIgnore: false, + }, + { + name: "one ignore rule", + src: `#trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "blank line between rule and finding", + src: `#trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine(filename, 3), + ids: []string{"rule-1"}, + }, + shouldIgnore: false, + }, + { + name: "blank line between rules", + src: `#trivy:ignore:rule-1 + +#trivy:ignore:rule-2 +`, + args: args{ + metadata: metadataWithLine(filename, 4), + ids: []string{"rule-1"}, + }, + shouldIgnore: false, + }, + { + name: "rule and a finding on the same line", + src: `#trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine(filename, 1), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "rule and a finding on the same line", + src: `test #trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine(filename, 1), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "multiple rules on one line", + src: `test #trivy:ignore:rule-1 #trivy:ignore:rule-2`, + args: args{ + metadata: metadataWithLine(filename, 1), + ids: []string{"rule-2"}, + }, + shouldIgnore: true, + }, + { + name: "rule and find from different files", + src: `test #trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine("another-file", 1), + ids: []string{"rule-2"}, + }, + shouldIgnore: false, + }, + { + name: "multiple ignore rule", + src: `#trivy:ignore:rule-1 +#trivy:ignore:rule-2 +`, + args: args{ + metadata: metadataWithLine(filename, 3), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "ignore section with params", + src: `#trivy:ignore:rule-1[param1=1]`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "id's don't match", + src: `#trivy:ignore:rule-1`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-2"}, + }, + shouldIgnore: false, + }, + { + name: "without ignore section", + src: `#trivy:exp:2022-01-01`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-2"}, + }, + shouldIgnore: false, + }, + { + name: "non valid ignore section", + src: `#trivy:ignore`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-2"}, + }, + shouldIgnore: false, + }, + { + name: "ignore rule with expiry date passed", + src: `#trivy:ignore:rule-1:exp:2022-01-01`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: false, + }, + { + name: "ignore rule with expiry date not passed", + src: `#trivy:ignore:rule-1:exp:2026-01-01`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "ignore rule with invalid expiry date", + src: `#trivy:ignore:rule-1:exp:2026-99-01`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + rules := ignore.Parse(tt.src, filename) + got := rules.Ignore(tt.args.metadata, tt.args.ids, nil) + assert.Equal(t, tt.shouldIgnore, got) + }) + } +} + +func TestRules_IgnoreWithCustomIgnorer(t *testing.T) { + const filename = "test" + + type args struct { + metadata types.Metadata + ids []string + ignorers map[string]ignore.Ignorer + } + + tests := []struct { + name string + src string + parser ignore.RuleSectionParser + args args + shouldIgnore bool + }{ + { + name: "happy", + src: `#trivy:ignore:rule-1:ws:dev`, + parser: &ignore.StringMatchParser{ + SectionKey: "ws", + }, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + ignorers: map[string]ignore.Ignorer{ + "ws": func(_ types.Metadata, param any) bool { + ws, ok := param.(string) + if !ok { + return false + } + return ws == "dev" + }, + }, + }, + shouldIgnore: true, + }, + { + name: "bad", + src: `#trivy:ignore:rule-1:ws:prod`, + parser: &ignore.StringMatchParser{ + SectionKey: "ws", + }, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + ignorers: map[string]ignore.Ignorer{ + "ws": func(_ types.Metadata, param any) bool { + ws, ok := param.(string) + if !ok { + return false + } + return ws == "dev" + }, + }, + }, + shouldIgnore: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + rules := ignore.Parse(tt.src, filename, tt.parser) + got := rules.Ignore(tt.args.metadata, tt.args.ids, tt.args.ignorers) + assert.Equal(t, tt.shouldIgnore, got) + }) + } +} diff --git a/pkg/iac/scan/code_test.go b/pkg/iac/scan/code_test.go index e0591ed23c85..c3ffe3725ef1 100644 --- a/pkg/iac/scan/code_test.go +++ b/pkg/iac/scan/code_test.go @@ -5,13 +5,11 @@ import ( "strings" "testing" - iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" - + "github.com/liamg/memoryfs" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/liamg/memoryfs" + iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) func TestResult_GetCode(t *testing.T) { diff --git a/pkg/iac/scan/result.go b/pkg/iac/scan/result.go index 861171e2dcc0..d9924c6aaeef 100644 --- a/pkg/iac/scan/result.go +++ b/pkg/iac/scan/result.go @@ -7,6 +7,7 @@ import ( "reflect" "strings" + "github.com/aquasecurity/trivy/pkg/iac/ignore" "github.com/aquasecurity/trivy/pkg/iac/severity" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) @@ -261,6 +262,22 @@ func (r *Results) AddIgnored(source interface{}, descriptions ...string) { *r = append(*r, res) } +func (r *Results) Ignore(ignoreRules ignore.Rules, ignores map[string]ignore.Ignorer) { + for i, result := range *r { + allIDs := []string{ + result.Rule().LongID(), + result.Rule().AVDID, + strings.ToLower(result.Rule().AVDID), + result.Rule().ShortCode, + } + allIDs = append(allIDs, result.Rule().Aliases...) + + if ignoreRules.Ignore(result.Metadata(), allIDs, ignores) { + (*r)[i].OverrideStatus(StatusIgnored) + } + } +} + func (r *Results) SetRule(rule Rule) { for i := range *r { (*r)[i].rule = rule diff --git a/pkg/iac/scanners/cloudformation/parser/file_context.go b/pkg/iac/scanners/cloudformation/parser/file_context.go index 4904d13f29d0..746dae7e024b 100644 --- a/pkg/iac/scanners/cloudformation/parser/file_context.go +++ b/pkg/iac/scanners/cloudformation/parser/file_context.go @@ -1,6 +1,7 @@ package parser import ( + "github.com/aquasecurity/trivy/pkg/iac/ignore" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) @@ -17,6 +18,7 @@ type FileContext struct { filepath string lines []string SourceFormat SourceFormat + Ignores ignore.Rules Parameters map[string]*Parameter `json:"Parameters" yaml:"Parameters"` Resources map[string]*Resource `json:"Resources" yaml:"Resources"` Globals map[string]*Resource `json:"Globals" yaml:"Globals"` diff --git a/pkg/iac/scanners/cloudformation/parser/parser.go b/pkg/iac/scanners/cloudformation/parser/parser.go index 43e4099289c5..65bf1440432d 100644 --- a/pkg/iac/scanners/cloudformation/parser/parser.go +++ b/pkg/iac/scanners/cloudformation/parser/parser.go @@ -16,6 +16,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/debug" "github.com/aquasecurity/trivy/pkg/iac/detection" + "github.com/aquasecurity/trivy/pkg/iac/ignore" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" ) @@ -165,12 +166,14 @@ func (p *Parser) ParseFile(ctx context.Context, fsys fs.FS, path string) (fctx * SourceFormat: sourceFmt, } - if strings.HasSuffix(strings.ToLower(path), ".json") { - if err := jfather.Unmarshal(content, fctx); err != nil { + switch sourceFmt { + case YamlSourceFormat: + if err := yaml.Unmarshal(content, fctx); err != nil { return nil, NewErrInvalidContent(path, err) } - } else { - if err := yaml.Unmarshal(content, fctx); err != nil { + fctx.Ignores = ignore.Parse(string(content), path) + case JsonSourceFormat: + if err := jfather.Unmarshal(content, fctx); err != nil { return nil, NewErrInvalidContent(path, err) } } diff --git a/pkg/iac/scanners/cloudformation/parser/property.go b/pkg/iac/scanners/cloudformation/parser/property.go index e667c7844f44..ae0c57050a23 100644 --- a/pkg/iac/scanners/cloudformation/parser/property.go +++ b/pkg/iac/scanners/cloudformation/parser/property.go @@ -113,19 +113,8 @@ func (p *Property) Range() iacTypes.Range { } func (p *Property) Metadata() iacTypes.Metadata { - base := p - if p.isFunction() { - if resolved, ok := p.resolveValue(); ok { - base = resolved - } - } - ref := NewCFReferenceWithValue(p.parentRange, *base, p.logicalId) - return iacTypes.NewMetadata(p.Range(), ref.String()) -} - -func (p *Property) MetadataWithValue(resolvedValue *Property) iacTypes.Metadata { - ref := NewCFReferenceWithValue(p.parentRange, *resolvedValue, p.logicalId) - return iacTypes.NewMetadata(p.Range(), ref.String()) + return iacTypes.NewMetadata(p.Range(), p.name). + WithParent(iacTypes.NewMetadata(p.parentRange, p.logicalId)) } func (p *Property) isFunction() bool { diff --git a/pkg/iac/scanners/cloudformation/parser/reference.go b/pkg/iac/scanners/cloudformation/parser/reference.go index 59cbf583c8cf..705eef2747af 100644 --- a/pkg/iac/scanners/cloudformation/parser/reference.go +++ b/pkg/iac/scanners/cloudformation/parser/reference.go @@ -1,15 +1,12 @@ package parser import ( - "fmt" - iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" ) type CFReference struct { logicalId string resourceRange iacTypes.Range - resolvedValue Property } func NewCFReference(id string, resourceRange iacTypes.Range) CFReference { @@ -19,40 +16,6 @@ func NewCFReference(id string, resourceRange iacTypes.Range) CFReference { } } -func NewCFReferenceWithValue(resourceRange iacTypes.Range, resolvedValue Property, logicalId string) CFReference { - return CFReference{ - resourceRange: resourceRange, - resolvedValue: resolvedValue, - logicalId: logicalId, - } -} - func (cf CFReference) String() string { return cf.resourceRange.String() } - -func (cf CFReference) LogicalID() string { - return cf.logicalId -} - -func (cf CFReference) ResourceRange() iacTypes.Range { - return cf.resourceRange -} - -func (cf CFReference) PropertyRange() iacTypes.Range { - if cf.resolvedValue.IsNotNil() { - return cf.resolvedValue.Range() - } - return iacTypes.Range{} -} - -func (cf CFReference) DisplayValue() string { - if cf.resolvedValue.IsNotNil() { - return fmt.Sprintf("%v", cf.resolvedValue.RawValue()) - } - return "" -} - -func (cf *CFReference) Comment() string { - return cf.resolvedValue.Comment() -} diff --git a/pkg/iac/scanners/cloudformation/scanner.go b/pkg/iac/scanners/cloudformation/scanner.go index 4c0cbbc4216d..0920f4425fdb 100644 --- a/pkg/iac/scanners/cloudformation/scanner.go +++ b/pkg/iac/scanners/cloudformation/scanner.go @@ -15,7 +15,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scan" "github.com/aquasecurity/trivy/pkg/iac/scanners" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/types" ) @@ -23,7 +23,7 @@ import ( func WithParameters(params map[string]any) options.ScannerOption { return func(cs options.ConfigurableScanner) { if s, ok := cs.(*Scanner); ok { - s.addParserOptions(parser2.WithParameters(params)) + s.addParserOptions(parser.WithParameters(params)) } } } @@ -31,7 +31,7 @@ func WithParameters(params map[string]any) options.ScannerOption { func WithParameterFiles(files ...string) options.ScannerOption { return func(cs options.ConfigurableScanner) { if s, ok := cs.(*Scanner); ok { - s.addParserOptions(parser2.WithParameterFiles(files...)) + s.addParserOptions(parser.WithParameterFiles(files...)) } } } @@ -39,7 +39,7 @@ func WithParameterFiles(files ...string) options.ScannerOption { func WithConfigsFS(fsys fs.FS) options.ScannerOption { return func(cs options.ConfigurableScanner) { if s, ok := cs.(*Scanner); ok { - s.addParserOptions(parser2.WithConfigsFS(fsys)) + s.addParserOptions(parser.WithConfigsFS(fsys)) } } } @@ -51,7 +51,7 @@ type Scanner struct { // nolint: gocritic debug debug.Logger policyDirs []string policyReaders []io.Reader - parser *parser2.Parser + parser *parser.Parser regoScanner *rego.Scanner skipRequired bool regoOnly bool @@ -131,7 +131,7 @@ func New(opts ...options.ScannerOption) *Scanner { opt(s) } s.addParserOptions(options.ParserWithSkipRequiredCheck(s.skipRequired)) - s.parser = parser2.New(s.parserOptions...) + s.parser = parser.New(s.parserOptions...) return s } @@ -206,7 +206,7 @@ func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.R return results, nil } -func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser2.FileContext, fsys fs.FS) (results scan.Results, err error) { +func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner, cfCtx *parser.FileContext, fsys fs.FS) (results scan.Results, err error) { state := adapter.Adapt(*cfCtx) if state == nil { return nil, nil @@ -247,7 +247,15 @@ func (s *Scanner) scanFileContext(ctx context.Context, regoScanner *rego.Scanner if err != nil { return nil, fmt.Errorf("rego scan error: %w", err) } - return append(results, regoResults...), nil + results = append(results, regoResults...) + + results.Ignore(cfCtx.Ignores, nil) + + for _, ignored := range results.GetIgnored() { + s.debug.Log("Ignored '%s' at '%s'.", ignored.Rule().LongID(), ignored.Range()) + } + + return results, nil } func getDescription(scanResult scan.Result, ref string) string { diff --git a/pkg/iac/scanners/cloudformation/scanner_test.go b/pkg/iac/scanners/cloudformation/scanner_test.go index 6aea88abc1af..3264609557ac 100644 --- a/pkg/iac/scanners/cloudformation/scanner_test.go +++ b/pkg/iac/scanners/cloudformation/scanner_test.go @@ -2,6 +2,7 @@ package cloudformation import ( "context" + "strings" "testing" "github.com/aquasecurity/trivy/internal/testutil" @@ -101,3 +102,130 @@ deny[res] { }, }, actualCode.Lines) } + +const bucketNameCheck = `# METADATA +# title: "test rego" +# scope: package +# schemas: +# - input: schema["cloud"] +# custom: +# id: AVD-AWS-001 +# avd_id: AVD-AWS-001 +# provider: aws +# service: s3 +# severity: LOW +# input: +# selector: +# - type: cloud +# subtypes: +# - service: s3 +# provider: aws +package user.aws.aws001 + +deny[res] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "test-bucket" + res := result.new("Denied", bucket.name) +} + +deny[res] { + bucket := input.aws.s3.buckets[_] + algo := bucket.encryption.algorithm + algo.value == "AES256" + res := result.new("Denied", algo) +} +` + +func TestIgnore(t *testing.T) { + tests := []struct { + name string + src string + ignored int + }{ + { + name: "without ignore", + src: `--- +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: test-bucket +`, + ignored: 0, + }, + { + name: "rule before resource", + src: `--- +Resources: +#trivy:ignore:AVD-AWS-001 + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: test-bucket +`, + ignored: 1, + }, + { + name: "rule before property", + src: `--- +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: +#trivy:ignore:AVD-AWS-001 + BucketName: test-bucket +`, + ignored: 1, + }, + { + name: "rule on the same line with the property", + src: `--- +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: test-bucket #trivy:ignore:AVD-AWS-001 +`, + ignored: 1, + }, + { + name: "rule on the same line with the nested property", + src: `--- +Resources: + S3Bucket: + Type: 'AWS::S3::Bucket' + Properties: + BucketName: test-bucket + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 #trivy:ignore:AVD-AWS-001 +`, + ignored: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fsys := testutil.CreateFS(t, map[string]string{ + "/code/main.yaml": tt.src, + }) + + scanner := New( + options.ScannerWithRegoOnly(true), + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithPolicyReader(strings.NewReader(bucketNameCheck)), + options.ScannerWithPolicyNamespaces("user"), + ) + + results, err := scanner.ScanFS(context.TODO(), fsys, "code") + require.NoError(t, err) + + if tt.ignored == 0 { + require.Len(t, results.GetFailed(), 1) + } else { + assert.Len(t, results.GetIgnored(), tt.ignored) + } + }) + } +} diff --git a/pkg/iac/scanners/terraform/executor/executor.go b/pkg/iac/scanners/terraform/executor/executor.go index 003b5b7f4db2..a4d15f45a9bb 100644 --- a/pkg/iac/scanners/terraform/executor/executor.go +++ b/pkg/iac/scanners/terraform/executor/executor.go @@ -1,39 +1,41 @@ package executor import ( + "fmt" "runtime" "sort" - "strings" "time" + "github.com/zclconf/go-cty/cty" + adapter "github.com/aquasecurity/trivy/pkg/iac/adapters/terraform" "github.com/aquasecurity/trivy/pkg/iac/debug" "github.com/aquasecurity/trivy/pkg/iac/framework" + "github.com/aquasecurity/trivy/pkg/iac/ignore" "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scan" "github.com/aquasecurity/trivy/pkg/iac/severity" "github.com/aquasecurity/trivy/pkg/iac/state" "github.com/aquasecurity/trivy/pkg/iac/terraform" + "github.com/aquasecurity/trivy/pkg/iac/types" ) // Executor scans HCL blocks by running all registered rules against them type Executor struct { - enableIgnores bool - excludedRuleIDs []string - excludeIgnoresIDs []string - includedRuleIDs []string - ignoreCheckErrors bool - workspaceName string - useSingleThread bool - debug debug.Logger - resultsFilters []func(scan.Results) scan.Results - alternativeIDProviderFunc func(string) []string - severityOverrides map[string]string - regoScanner *rego.Scanner - regoOnly bool - stateFuncs []func(*state.State) - frameworks []framework.Framework + enableIgnores bool + excludedRuleIDs []string + includedRuleIDs []string + ignoreCheckErrors bool + workspaceName string + useSingleThread bool + debug debug.Logger + resultsFilters []func(scan.Results) scan.Results + severityOverrides map[string]string + regoScanner *rego.Scanner + regoOnly bool + stateFuncs []func(*state.State) + frameworks []framework.Framework } type Metrics struct { @@ -66,16 +68,11 @@ func New(options ...Option) *Executor { } // Find element in list -func checkInList(id string, altIDs, list []string) bool { +func checkInList(id string, list []string) bool { for _, codeIgnored := range list { if codeIgnored == id { return true } - for _, alt := range altIDs { - if alt == codeIgnored { - return true - } - } } return false } @@ -119,35 +116,36 @@ func (e *Executor) Execute(modules terraform.Modules) (scan.Results, Metrics, er if e.enableIgnores { e.debug.Log("Applying ignores...") - var ignores terraform.Ignores + var ignores ignore.Rules for _, module := range modules { ignores = append(ignores, module.Ignores()...) } - ignores = e.removeExcludedIgnores(ignores) + ignorers := map[string]ignore.Ignorer{ + "ws": func(_ types.Metadata, param any) bool { + ws, ok := param.(string) + if !ok { + return false + } - for i, result := range results { - allIDs := []string{ - result.Rule().LongID(), - result.Rule().AVDID, - strings.ToLower(result.Rule().AVDID), - result.Rule().ShortCode, - } - allIDs = append(allIDs, result.Rule().Aliases...) + return ws == e.workspaceName + }, + "ignore": func(resultMeta types.Metadata, param any) bool { + params, ok := param.(map[string]string) + if !ok { + return false + } - if e.alternativeIDProviderFunc != nil { - allIDs = append(allIDs, e.alternativeIDProviderFunc(result.Rule().LongID())...) - } - if ignores.Covering( - modules, - result.Metadata(), - e.workspaceName, - allIDs..., - ) != nil { - e.debug.Log("Ignored '%s' at '%s'.", result.Rule().LongID(), result.Range()) - results[i].OverrideStatus(scan.StatusIgnored) - } + return ignoreByParams(params, modules, &resultMeta) + }, + } + + results.Ignore(ignores, ignorers) + + for _, ignored := range results.GetIgnored() { + e.debug.Log("Ignored '%s' at '%s'.", ignored.Rule().LongID(), ignored.Range()) } + } else { e.debug.Log("Ignores are disabled.") } @@ -175,25 +173,6 @@ func (e *Executor) Execute(modules terraform.Modules) (scan.Results, Metrics, er return results, metrics, nil } -func (e *Executor) removeExcludedIgnores(ignores terraform.Ignores) terraform.Ignores { - var filteredIgnores terraform.Ignores - for _, ignore := range ignores { - if !contains(e.excludeIgnoresIDs, ignore.RuleID) { - filteredIgnores = append(filteredIgnores, ignore) - } - } - return filteredIgnores -} - -func contains(arr []string, s string) bool { - for _, elem := range arr { - if elem == s { - return true - } - } - return false -} - func (e *Executor) updateSeverity(results []scan.Result) scan.Results { if len(e.severityOverrides) == 0 { return results @@ -202,25 +181,15 @@ func (e *Executor) updateSeverity(results []scan.Result) scan.Results { var overriddenResults scan.Results for _, res := range results { for code, sev := range e.severityOverrides { - - var altMatch bool - if e.alternativeIDProviderFunc != nil { - alts := e.alternativeIDProviderFunc(res.Rule().LongID()) - for _, alt := range alts { - if alt == code { - altMatch = true - break - } - } + if res.Rule().LongID() != code { + continue } - if altMatch || res.Rule().LongID() == code { - overrides := scan.Results([]scan.Result{res}) - override := res.Rule() - override.Severity = severity.Severity(sev) - overrides.SetRule(override) - res = overrides[0] - } + overrides := scan.Results([]scan.Result{res}) + override := res.Rule() + override.Severity = severity.Severity(sev) + overrides.SetRule(override) + res = overrides[0] } overriddenResults = append(overriddenResults, res) } @@ -232,11 +201,7 @@ func (e *Executor) filterResults(results scan.Results) scan.Results { includedOnly := len(e.includedRuleIDs) > 0 for i, result := range results { id := result.Rule().LongID() - var altIDs []string - if e.alternativeIDProviderFunc != nil { - altIDs = e.alternativeIDProviderFunc(id) - } - if (includedOnly && !checkInList(id, altIDs, e.includedRuleIDs)) || checkInList(id, altIDs, e.excludedRuleIDs) { + if (includedOnly && !checkInList(id, e.includedRuleIDs)) || checkInList(id, e.excludedRuleIDs) { e.debug.Log("Excluding '%s' at '%s'.", result.Rule().LongID(), result.Range()) results[i].OverrideStatus(scan.StatusIgnored) } @@ -266,3 +231,40 @@ func (e *Executor) sortResults(results []scan.Result) { } }) } + +func ignoreByParams(params map[string]string, modules terraform.Modules, m *types.Metadata) bool { + if len(params) == 0 { + return true + } + block := modules.GetBlockByIgnoreRange(m) + if block == nil { + return true + } + for key, val := range params { + attr, _ := block.GetNestedAttribute(key) + if attr.IsNil() || !attr.Value().IsKnown() { + return false + } + switch attr.Type() { + case cty.String: + if !attr.Equals(val) { + return false + } + case cty.Number: + bf := attr.Value().AsBigFloat() + f64, _ := bf.Float64() + comparableInt := fmt.Sprintf("%d", int(f64)) + comparableFloat := fmt.Sprintf("%f", f64) + if val != comparableInt && val != comparableFloat { + return false + } + case cty.Bool: + if fmt.Sprintf("%t", attr.IsTrue()) != val { + return false + } + default: + return false + } + } + return true +} diff --git a/pkg/iac/scanners/terraform/executor/option.go b/pkg/iac/scanners/terraform/executor/option.go index d32abb7afdcb..1e9ab5b9d998 100644 --- a/pkg/iac/scanners/terraform/executor/option.go +++ b/pkg/iac/scanners/terraform/executor/option.go @@ -18,12 +18,6 @@ func OptionWithFrameworks(frameworks ...framework.Framework) Option { } } -func OptionWithAlternativeIDProvider(f func(string) []string) Option { - return func(s *Executor) { - s.alternativeIDProviderFunc = f - } -} - func OptionWithResultsFilter(f func(scan.Results) scan.Results) Option { return func(s *Executor) { s.resultsFilters = append(s.resultsFilters, f) @@ -54,12 +48,6 @@ func OptionExcludeRules(ruleIDs []string) Option { } } -func OptionExcludeIgnores(ruleIDs []string) Option { - return func(s *Executor) { - s.excludeIgnoresIDs = ruleIDs - } -} - func OptionIncludeRules(ruleIDs []string) Option { return func(s *Executor) { s.includedRuleIDs = ruleIDs diff --git a/pkg/iac/scanners/terraform/options.go b/pkg/iac/scanners/terraform/options.go index 2dddb856c049..d78c1f0cf897 100644 --- a/pkg/iac/scanners/terraform/options.go +++ b/pkg/iac/scanners/terraform/options.go @@ -27,14 +27,6 @@ func ScannerWithTFVarsPaths(paths ...string) options.ScannerOption { } } -func ScannerWithAlternativeIDProvider(f func(string) []string) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithAlternativeIDProvider(f)) - } - } -} - func ScannerWithSeverityOverrides(overrides map[string]string) options.ScannerOption { return func(s options.ConfigurableScanner) { if tf, ok := s.(ConfigurableTerraformScanner); ok { @@ -59,14 +51,6 @@ func ScannerWithExcludedRules(ruleIDs []string) options.ScannerOption { } } -func ScannerWithExcludeIgnores(ruleIDs []string) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionExcludeIgnores(ruleIDs)) - } - } -} - func ScannerWithIncludedRules(ruleIDs []string) options.ScannerOption { return func(s options.ConfigurableScanner) { if tf, ok := s.(ConfigurableTerraformScanner); ok { diff --git a/pkg/iac/scanners/terraform/parser/evaluator.go b/pkg/iac/scanners/terraform/parser/evaluator.go index 1fe9a72fdcac..e7e3415e1b52 100644 --- a/pkg/iac/scanners/terraform/parser/evaluator.go +++ b/pkg/iac/scanners/terraform/parser/evaluator.go @@ -14,6 +14,7 @@ import ( "golang.org/x/exp/slices" "github.com/aquasecurity/trivy/pkg/iac/debug" + "github.com/aquasecurity/trivy/pkg/iac/ignore" "github.com/aquasecurity/trivy/pkg/iac/terraform" tfcontext "github.com/aquasecurity/trivy/pkg/iac/terraform/context" "github.com/aquasecurity/trivy/pkg/iac/types" @@ -32,7 +33,7 @@ type evaluator struct { projectRootPath string // root of the current scan modulePath string moduleName string - ignores terraform.Ignores + ignores ignore.Rules parentParser *Parser debug debug.Logger allowDownloads bool @@ -50,7 +51,7 @@ func newEvaluator( inputVars map[string]cty.Value, moduleMetadata *modulesMetadata, workspace string, - ignores []terraform.Ignore, + ignores ignore.Rules, logger debug.Logger, allowDownloads bool, skipCachedModules bool, diff --git a/pkg/iac/scanners/terraform/parser/load_blocks.go b/pkg/iac/scanners/terraform/parser/load_blocks.go deleted file mode 100644 index c5409d42f27b..000000000000 --- a/pkg/iac/scanners/terraform/parser/load_blocks.go +++ /dev/null @@ -1,131 +0,0 @@ -package parser - -import ( - "fmt" - "regexp" - "strings" - "time" - - "github.com/hashicorp/hcl/v2" - - "github.com/aquasecurity/trivy/pkg/iac/terraform" - "github.com/aquasecurity/trivy/pkg/iac/types" -) - -func loadBlocksFromFile(file sourceFile, moduleSource string) (hcl.Blocks, []terraform.Ignore, error) { - ignores := parseIgnores(file.file.Bytes, file.path, moduleSource) - contents, diagnostics := file.file.Body.Content(terraform.Schema) - if diagnostics != nil && diagnostics.HasErrors() { - return nil, nil, diagnostics - } - if contents == nil { - return nil, nil, nil - } - return contents.Blocks, ignores, nil -} - -func parseIgnores(data []byte, path, moduleSource string) []terraform.Ignore { - var ignores []terraform.Ignore - for i, line := range strings.Split(string(data), "\n") { - line = strings.TrimSpace(line) - lineIgnores := parseIgnoresFromLine(line) - for _, lineIgnore := range lineIgnores { - lineIgnore.Range = types.NewRange(path, i+1, i+1, moduleSource, nil) - ignores = append(ignores, lineIgnore) - } - } - for a, ignoreA := range ignores { - if !ignoreA.Block { - continue - } - for _, ignoreB := range ignores { - if !ignoreB.Block { - continue - } - if ignoreA.Range.GetStartLine()+1 == ignoreB.Range.GetStartLine() { - ignoreA.Range = ignoreB.Range - ignores[a] = ignoreA - } - } - } - return ignores - -} - -var commentPattern = regexp.MustCompile(`^\s*([/]+|/\*|#)+\s*tfsec:`) -var trivyCommentPattern = regexp.MustCompile(`^\s*([/]+|/\*|#)+\s*trivy:`) - -func parseIgnoresFromLine(input string) []terraform.Ignore { - - var ignores []terraform.Ignore - - input = commentPattern.ReplaceAllString(input, "tfsec:") - input = trivyCommentPattern.ReplaceAllString(input, "trivy:") - - bits := strings.Split(strings.TrimSpace(input), " ") - for i, bit := range bits { - bit := strings.TrimSpace(bit) - bit = strings.TrimPrefix(bit, "#") - bit = strings.TrimPrefix(bit, "//") - bit = strings.TrimPrefix(bit, "/*") - - if strings.HasPrefix(bit, "tfsec:") || strings.HasPrefix(bit, "trivy:") { - ignore, err := parseIgnoreFromComment(bit) - if err != nil { - continue - } - ignore.Block = i == 0 - ignores = append(ignores, *ignore) - } - } - - return ignores -} - -func parseIgnoreFromComment(input string) (*terraform.Ignore, error) { - var ignore terraform.Ignore - if !strings.HasPrefix(input, "tfsec:") && !strings.HasPrefix(input, "trivy:") { - return nil, fmt.Errorf("invalid ignore") - } - - input = input[6:] - - segments := strings.Split(input, ":") - - for i := 0; i < len(segments)-1; i += 2 { - key := segments[i] - val := segments[i+1] - switch key { - case "ignore": - ignore.RuleID, ignore.Params = parseIDWithParams(val) - case "exp": - parsed, err := time.Parse("2006-01-02", val) - if err != nil { - return &ignore, err - } - ignore.Expiry = &parsed - case "ws": - ignore.Workspace = val - } - } - - return &ignore, nil -} - -func parseIDWithParams(input string) (string, map[string]string) { - params := make(map[string]string) - if !strings.Contains(input, "[") { - return input, params - } - parts := strings.Split(input, "[") - id := parts[0] - paramStr := strings.TrimSuffix(parts[1], "]") - for _, pair := range strings.Split(paramStr, ",") { - parts := strings.Split(pair, "=") - if len(parts) != 2 { - continue - } - params[parts[0]] = parts[1] - } - return id, params -} diff --git a/pkg/iac/scanners/terraform/parser/load_blocks_test.go b/pkg/iac/scanners/terraform/parser/load_blocks_test.go deleted file mode 100644 index e32d19a75044..000000000000 --- a/pkg/iac/scanners/terraform/parser/load_blocks_test.go +++ /dev/null @@ -1,13 +0,0 @@ -package parser - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestParsingDoubleComment(t *testing.T) { - ignores := parseIgnoresFromLine("## tfsec:ignore:abc") - assert.Equal(t, 1, len(ignores)) - assert.Truef(t, ignores[0].Block, "Expected ignore to be a block") -} diff --git a/pkg/iac/scanners/terraform/parser/parser.go b/pkg/iac/scanners/terraform/parser/parser.go index e09e9e621ef4..35a8b454f00f 100644 --- a/pkg/iac/scanners/terraform/parser/parser.go +++ b/pkg/iac/scanners/terraform/parser/parser.go @@ -17,6 +17,7 @@ import ( "github.com/aquasecurity/trivy/pkg/extrafs" "github.com/aquasecurity/trivy/pkg/iac/debug" + "github.com/aquasecurity/trivy/pkg/iac/ignore" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/terraform" tfcontext "github.com/aquasecurity/trivy/pkg/iac/terraform/context" @@ -326,12 +327,12 @@ func (p *Parser) GetFilesystemMap() map[string]fs.FS { return p.fsMap } -func (p *Parser) readBlocks(files []sourceFile) (terraform.Blocks, terraform.Ignores, error) { +func (p *Parser) readBlocks(files []sourceFile) (terraform.Blocks, ignore.Rules, error) { var blocks terraform.Blocks - var ignores terraform.Ignores + var ignores ignore.Rules moduleCtx := tfcontext.NewContext(&hcl.EvalContext{}, nil) for _, file := range files { - fileBlocks, fileIgnores, err := loadBlocksFromFile(file, p.moduleSource) + fileBlocks, err := loadBlocksFromFile(file) if err != nil { if p.stopOnHCLError { return nil, nil, err @@ -342,9 +343,61 @@ func (p *Parser) readBlocks(files []sourceFile) (terraform.Blocks, terraform.Ign for _, fileBlock := range fileBlocks { blocks = append(blocks, terraform.NewBlock(fileBlock, moduleCtx, p.moduleBlock, nil, p.moduleSource, p.moduleFS)) } + fileIgnores := ignore.Parse( + string(file.file.Bytes), + file.path, + &ignore.StringMatchParser{ + SectionKey: "ws", + }, + ¶mParser{}, + ) ignores = append(ignores, fileIgnores...) } sortBlocksByHierarchy(blocks) return blocks, ignores, nil } + +func loadBlocksFromFile(file sourceFile) (hcl.Blocks, error) { + contents, diagnostics := file.file.Body.Content(terraform.Schema) + if diagnostics != nil && diagnostics.HasErrors() { + return nil, diagnostics + } + if contents == nil { + return nil, nil + } + return contents.Blocks, nil +} + +type paramParser struct { + params map[string]string +} + +func (s *paramParser) Key() string { + return "ignore" +} + +func (s *paramParser) Parse(str string) bool { + s.params = make(map[string]string) + + idx := strings.Index(str, "[") + if idx == -1 { + return false + } + + str = str[idx+1:] + + paramStr := strings.TrimSuffix(str, "]") + for _, pair := range strings.Split(paramStr, ",") { + parts := strings.Split(pair, "=") + if len(parts) != 2 { + continue + } + s.params[parts[0]] = parts[1] + } + return true +} + +func (s *paramParser) Param() any { + return s.params +} diff --git a/pkg/iac/scanners/terraform/scanner_test.go b/pkg/iac/scanners/terraform/scanner_test.go index 9e44893e0ff7..dbc2d67c3c64 100644 --- a/pkg/iac/scanners/terraform/scanner_test.go +++ b/pkg/iac/scanners/terraform/scanner_test.go @@ -68,42 +68,6 @@ func scanWithOptions(t *testing.T, code string, opt ...options.ScannerOption) sc return results } -func Test_OptionWithAlternativeIDProvider(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - options := []options.ScannerOption{ - ScannerWithAlternativeIDProvider(func(s string) []string { - return []string{"something", "altid", "blah"} - }), - } - results := scanWithOptions(t, ` -//tfsec:ignore:altid -resource "something" "else" {} -`, options...) - require.Len(t, results.GetFailed(), 0) - require.Len(t, results.GetIgnored(), 1) - -} - -func Test_TrivyOptionWithAlternativeIDProvider(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - options := []options.ScannerOption{ - ScannerWithAlternativeIDProvider(func(s string) []string { - return []string{"something", "altid", "blah"} - }), - } - results := scanWithOptions(t, ` -//trivy:ignore:altid -resource "something" "else" {} -`, options...) - require.Len(t, results.GetFailed(), 0) - require.Len(t, results.GetIgnored(), 1) - -} - func Test_OptionWithSeverityOverrides(t *testing.T) { reg := rules.Register(alwaysFailRule) defer rules.Deregister(reg) diff --git a/pkg/iac/terraform/ignore.go b/pkg/iac/terraform/ignore.go deleted file mode 100644 index 69e0341ed4be..000000000000 --- a/pkg/iac/terraform/ignore.go +++ /dev/null @@ -1,100 +0,0 @@ -package terraform - -import ( - "fmt" - "time" - - "github.com/zclconf/go-cty/cty" - - iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" -) - -type Ignore struct { - Range iacTypes.Range - RuleID string - Expiry *time.Time - Workspace string - Block bool - Params map[string]string -} - -type Ignores []Ignore - -func (ignores Ignores) Covering(modules Modules, m iacTypes.Metadata, workspace string, ids ...string) *Ignore { - for _, ignore := range ignores { - if ignore.Covering(modules, m, workspace, ids...) { - return &ignore - } - } - return nil -} - -func (ignore Ignore) Covering(modules Modules, m iacTypes.Metadata, workspace string, ids ...string) bool { - if ignore.Expiry != nil && time.Now().After(*ignore.Expiry) { - return false - } - if ignore.Workspace != "" && ignore.Workspace != workspace { - return false - } - idMatch := ignore.RuleID == "*" || len(ids) == 0 - for _, id := range ids { - if id == ignore.RuleID { - idMatch = true - break - } - } - if !idMatch { - return false - } - - metaHierarchy := &m - for metaHierarchy != nil { - if ignore.Range.GetFilename() != metaHierarchy.Range().GetFilename() { - metaHierarchy = metaHierarchy.Parent() - continue - } - if metaHierarchy.Range().GetStartLine() == ignore.Range.GetStartLine()+1 || metaHierarchy.Range().GetStartLine() == ignore.Range.GetStartLine() { - return ignore.MatchParams(modules, metaHierarchy) - } - metaHierarchy = metaHierarchy.Parent() - } - return false - -} - -func (ignore Ignore) MatchParams(modules Modules, blockMetadata *iacTypes.Metadata) bool { - if len(ignore.Params) == 0 { - return true - } - block := modules.GetBlockByIgnoreRange(blockMetadata) - if block == nil { - return true - } - for key, val := range ignore.Params { - attr, _ := block.GetNestedAttribute(key) - if attr.IsNil() || !attr.Value().IsKnown() { - return false - } - switch attr.Type() { - case cty.String: - if !attr.Equals(val) { - return false - } - case cty.Number: - bf := attr.Value().AsBigFloat() - f64, _ := bf.Float64() - comparableInt := fmt.Sprintf("%d", int(f64)) - comparableFloat := fmt.Sprintf("%f", f64) - if val != comparableInt && val != comparableFloat { - return false - } - case cty.Bool: - if fmt.Sprintf("%t", attr.IsTrue()) != val { - return false - } - default: - return false - } - } - return true -} diff --git a/pkg/iac/terraform/module.go b/pkg/iac/terraform/module.go index dd89fa2bd40d..fec6ad7c8d0e 100644 --- a/pkg/iac/terraform/module.go +++ b/pkg/iac/terraform/module.go @@ -3,6 +3,8 @@ package terraform import ( "fmt" "strings" + + "github.com/aquasecurity/trivy/pkg/iac/ignore" ) type Module struct { @@ -10,11 +12,11 @@ type Module struct { blockMap map[string]Blocks rootPath string modulePath string - ignores Ignores + ignores ignore.Rules parent *Module } -func NewModule(rootPath, modulePath string, blocks Blocks, ignores Ignores) *Module { +func NewModule(rootPath, modulePath string, blocks Blocks, ignores ignore.Rules) *Module { blockMap := make(map[string]Blocks) @@ -41,7 +43,7 @@ func (c *Module) RootPath() string { return c.rootPath } -func (c *Module) Ignores() Ignores { +func (c *Module) Ignores() ignore.Rules { return c.ignores } From f23ed7759802391b33d957e21334e661f3bb92ae Mon Sep 17 00:00:00 2001 From: Jeff Rescignano Date: Mon, 1 Apr 2024 01:45:58 -0400 Subject: [PATCH 30/57] feat(misconf): Support private registries for misconf check bundle (#6327) --- pkg/cloud/aws/scanner/scanner.go | 2 +- pkg/commands/artifact/run.go | 2 +- pkg/commands/operation/operation.go | 6 +++--- pkg/policy/policy.go | 12 ++++++------ pkg/policy/policy_test.go | 4 ++-- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/pkg/cloud/aws/scanner/scanner.go b/pkg/cloud/aws/scanner/scanner.go index 97f544492747..84b5cf6c640e 100644 --- a/pkg/cloud/aws/scanner/scanner.go +++ b/pkg/cloud/aws/scanner/scanner.go @@ -69,7 +69,7 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result var policyPaths []string var downloadedPolicyPaths []string var err error - downloadedPolicyPaths, err = operation.InitBuiltinPolicies(context.Background(), option.CacheDir, option.Quiet, option.SkipPolicyUpdate, option.MisconfOptions.PolicyBundleRepository) + downloadedPolicyPaths, err = operation.InitBuiltinPolicies(context.Background(), option.CacheDir, option.Quiet, option.SkipPolicyUpdate, option.MisconfOptions.PolicyBundleRepository, option.RegistryOpts()) if err != nil { if !option.SkipPolicyUpdate { log.Logger.Errorf("Falling back to embedded policies: %s", err) diff --git a/pkg/commands/artifact/run.go b/pkg/commands/artifact/run.go index 4f8be6e1d911..ca1b80749c46 100644 --- a/pkg/commands/artifact/run.go +++ b/pkg/commands/artifact/run.go @@ -584,7 +584,7 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi var downloadedPolicyPaths []string var disableEmbedded bool - downloadedPolicyPaths, err := operation.InitBuiltinPolicies(context.Background(), opts.CacheDir, opts.Quiet, opts.SkipPolicyUpdate, opts.MisconfOptions.PolicyBundleRepository) + downloadedPolicyPaths, err := operation.InitBuiltinPolicies(context.Background(), opts.CacheDir, opts.Quiet, opts.SkipPolicyUpdate, opts.MisconfOptions.PolicyBundleRepository, opts.RegistryOpts()) if err != nil { if !opts.SkipPolicyUpdate { log.Logger.Errorf("Falling back to embedded policies: %s", err) diff --git a/pkg/commands/operation/operation.go b/pkg/commands/operation/operation.go index 5ca8301b422d..7eab510f539b 100644 --- a/pkg/commands/operation/operation.go +++ b/pkg/commands/operation/operation.go @@ -148,7 +148,7 @@ func showDBInfo(cacheDir string) error { } // InitBuiltinPolicies downloads the built-in policies and loads them -func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate bool, policyBundleRepository string) ([]string, error) { +func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate bool, policyBundleRepository string, registryOpts ftypes.RegistryOptions) ([]string, error) { mu.Lock() defer mu.Unlock() @@ -159,7 +159,7 @@ func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate needsUpdate := false if !skipUpdate { - needsUpdate, err = client.NeedsUpdate(ctx) + needsUpdate, err = client.NeedsUpdate(ctx, registryOpts) if err != nil { return nil, xerrors.Errorf("unable to check if built-in policies need to be updated: %w", err) } @@ -168,7 +168,7 @@ func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate if needsUpdate { log.Logger.Info("Need to update the built-in policies") log.Logger.Info("Downloading the built-in policies...") - if err = client.DownloadBuiltinPolicies(ctx); err != nil { + if err = client.DownloadBuiltinPolicies(ctx, registryOpts); err != nil { return nil, xerrors.Errorf("failed to download built-in policies: %w", err) } } diff --git a/pkg/policy/policy.go b/pkg/policy/policy.go index b5b6792953ca..9dc802c8207e 100644 --- a/pkg/policy/policy.go +++ b/pkg/policy/policy.go @@ -89,10 +89,10 @@ func NewClient(cacheDir string, quiet bool, policyBundleRepo string, opts ...Opt }, nil } -func (c *Client) populateOCIArtifact() error { +func (c *Client) populateOCIArtifact(registryOpts types.RegistryOptions) error { if c.artifact == nil { log.Logger.Debugf("Using URL: %s to load policy bundle", c.policyBundleRepo) - art, err := oci.NewArtifact(c.policyBundleRepo, c.quiet, types.RegistryOptions{}) + art, err := oci.NewArtifact(c.policyBundleRepo, c.quiet, registryOpts) if err != nil { return xerrors.Errorf("OCI artifact error: %w", err) } @@ -102,8 +102,8 @@ func (c *Client) populateOCIArtifact() error { } // DownloadBuiltinPolicies download default policies from GitHub Pages -func (c *Client) DownloadBuiltinPolicies(ctx context.Context) error { - if err := c.populateOCIArtifact(); err != nil { +func (c *Client) DownloadBuiltinPolicies(ctx context.Context, registryOpts types.RegistryOptions) error { + if err := c.populateOCIArtifact(registryOpts); err != nil { return xerrors.Errorf("OPA bundle error: %w", err) } @@ -154,7 +154,7 @@ func (c *Client) LoadBuiltinPolicies() ([]string, error) { } // NeedsUpdate returns if the default policy should be updated -func (c *Client) NeedsUpdate(ctx context.Context) (bool, error) { +func (c *Client) NeedsUpdate(ctx context.Context, registryOpts types.RegistryOptions) (bool, error) { meta, err := c.GetMetadata() if err != nil { return true, nil @@ -165,7 +165,7 @@ func (c *Client) NeedsUpdate(ctx context.Context) (bool, error) { return false, nil } - if err = c.populateOCIArtifact(); err != nil { + if err = c.populateOCIArtifact(registryOpts); err != nil { return false, xerrors.Errorf("OPA bundle error: %w", err) } diff --git a/pkg/policy/policy_test.go b/pkg/policy/policy_test.go index a72dca8ac89d..0eb3190bf31d 100644 --- a/pkg/policy/policy_test.go +++ b/pkg/policy/policy_test.go @@ -264,7 +264,7 @@ func TestClient_NeedsUpdate(t *testing.T) { require.NoError(t, err) // Assert results - got, err := c.NeedsUpdate(context.Background()) + got, err := c.NeedsUpdate(context.Background(), ftypes.RegistryOptions{}) assert.Equal(t, tt.wantErr, err != nil) assert.Equal(t, tt.want, got) }) @@ -367,7 +367,7 @@ func TestClient_DownloadBuiltinPolicies(t *testing.T) { c, err := policy.NewClient(tempDir, true, "", policy.WithClock(tt.clock), policy.WithOCIArtifact(art)) require.NoError(t, err) - err = c.DownloadBuiltinPolicies(context.Background()) + err = c.DownloadBuiltinPolicies(context.Background(), ftypes.RegistryOptions{}) if tt.wantErr != "" { require.NotNil(t, err) assert.Contains(t, err.Error(), tt.wantErr) From 826fe60732b3dcf3ee9553b8fdcb98aaeeb0b786 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 15:17:12 +0400 Subject: [PATCH 31/57] chore(deps): bump actions/cache from 4.0.0 to 4.0.2 (#6436) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/canary.yaml | 2 +- .github/workflows/release.yaml | 2 +- .github/workflows/reusable-release.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/canary.yaml b/.github/workflows/canary.yaml index 516fa54fd12e..d65878b8d84b 100644 --- a/.github/workflows/canary.yaml +++ b/.github/workflows/canary.yaml @@ -25,7 +25,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Restore Trivy binaries from cache - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.2 with: path: dist/ key: ${{ runner.os }}-bins-${{github.workflow}}-${{github.sha}} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 40baeb83c01a..acf840769eea 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -24,7 +24,7 @@ jobs: fetch-depth: 0 - name: Restore Trivy binaries from cache - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.2 with: path: dist/ key: ${{ runner.os }}-bins-${{github.workflow}}-${{github.sha}} diff --git a/.github/workflows/reusable-release.yaml b/.github/workflows/reusable-release.yaml index f518c5080aad..53869517256d 100644 --- a/.github/workflows/reusable-release.yaml +++ b/.github/workflows/reusable-release.yaml @@ -121,7 +121,7 @@ jobs: public.ecr.aws/aquasecurity/trivy:canary - name: Cache Trivy binaries - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.2 with: path: dist/ # use 'github.sha' to create a unique cache folder for each run. From 6625bd32e0e8b469ab06096e6f17b1b76a8ab565 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 15:18:50 +0400 Subject: [PATCH 32/57] chore(deps): bump github.com/aws/aws-sdk-go-v2/service/ec2 from 1.149.1 to 1.155.1 (#6433) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 14 +++++++------- go.sum | 28 ++++++++++++++-------------- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/go.mod b/go.mod index ea85b6e33f29..9549706cd3ab 100644 --- a/go.mod +++ b/go.mod @@ -26,11 +26,11 @@ require ( github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48 github.com/aquasecurity/trivy-kubernetes v0.6.3 github.com/aquasecurity/trivy-policies v0.10.0 - github.com/aws/aws-sdk-go-v2 v1.25.2 + github.com/aws/aws-sdk-go-v2 v1.26.1 github.com/aws/aws-sdk-go-v2/config v1.27.4 github.com/aws/aws-sdk-go-v2/credentials v1.17.4 github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.15.15 - github.com/aws/aws-sdk-go-v2/service/ec2 v1.149.1 + github.com/aws/aws-sdk-go-v2/service/ec2 v1.155.1 github.com/aws/aws-sdk-go-v2/service/ecr v1.24.6 github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1 github.com/aws/aws-sdk-go-v2/service/sts v1.28.1 @@ -123,7 +123,7 @@ require ( github.com/alecthomas/chroma v0.10.0 github.com/antchfx/htmlquery v1.3.0 github.com/apparentlymart/go-cidr v1.1.0 - github.com/aws/smithy-go v1.20.1 + github.com/aws/smithy-go v1.20.2 github.com/hashicorp/go-uuid v1.0.3 github.com/hashicorp/go-version v1.6.0 github.com/hashicorp/hc-install v0.6.3 @@ -179,8 +179,8 @@ require ( github.com/aws/aws-sdk-go v1.49.21 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.2 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.2 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.2 // indirect github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.26.7 // indirect @@ -203,10 +203,10 @@ require ( github.com/aws/aws-sdk-go-v2/service/elasticsearchservice v1.25.0 // indirect github.com/aws/aws-sdk-go-v2/service/emr v1.36.0 // indirect github.com/aws/aws-sdk-go-v2/service/iam v1.28.7 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 // indirect github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2 // indirect github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 // indirect github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2 // indirect github.com/aws/aws-sdk-go-v2/service/kafka v1.28.5 // indirect github.com/aws/aws-sdk-go-v2/service/kinesis v1.24.6 // indirect diff --git a/go.sum b/go.sum index a1af99060dc5..cbf52b07b648 100644 --- a/go.sum +++ b/go.sum @@ -368,8 +368,8 @@ github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZo github.com/aws/aws-sdk-go v1.44.122/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= github.com/aws/aws-sdk-go v1.49.21 h1:Rl8KW6HqkwzhATwvXhyr7vD4JFUMi7oXGAw9SrxxIFY= github.com/aws/aws-sdk-go v1.49.21/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= -github.com/aws/aws-sdk-go-v2 v1.25.2 h1:/uiG1avJRgLGiQM9X3qJM8+Qa6KRGK5rRPuXE0HUM+w= -github.com/aws/aws-sdk-go-v2 v1.25.2/go.mod h1:Evoc5AsmtveRt1komDwIsjHFyrP5tDuF1D1U+6z6pNo= +github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= +github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 h1:gTK2uhtAPtFcdRRJilZPx8uJLL2J85xK11nKtWL0wfU= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1/go.mod h1:sxpLb+nZk7tIfCWChfd+h4QwHNUR57d8hA1cleTkjJo= github.com/aws/aws-sdk-go-v2/config v1.27.4 h1:AhfWb5ZwimdsYTgP7Od8E9L1u4sKmDW2ZVeLcf2O42M= @@ -380,10 +380,10 @@ github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2 h1:AK0J8iYBFeUk2Ax7O8YpLtF github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2/go.mod h1:iRlGzMix0SExQEviAyptRWRGdYNo3+ufW/lCzvKVTUc= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.15.15 h1:2MUXyGW6dVaQz6aqycpbdLIH1NMcUI6kW6vQ0RabGYg= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.15.15/go.mod h1:aHbhbR6WEQgHAiRj41EQ2W47yOYwNtIkWTXmcAtYqj8= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.2 h1:bNo4LagzUKbjdxE0tIcR9pMzLR2U/Tgie1Hq1HQ3iH8= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.2/go.mod h1:wRQv0nN6v9wDXuWThpovGQjqF1HFdcgWjporw14lS8k= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.2 h1:EtOU5jsPdIQNP+6Q2C5e3d65NKT1PeCiQk+9OdzO12Q= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.2/go.mod h1:tyF5sKccmDz0Bv4NrstEr+/9YkSPJHrcO7UsUKf7pWM= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5/go.mod h1:FSaRudD0dXiMPK2UjknVwwTYyZMRsHv3TtkabsZih5I= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3CIQsRIrtTlUC3lP84taWzHlq0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.2 h1:en92G0Z7xlksoOylkUhuBSfJgijC7rHVLRdnIlHEs0E= @@ -412,8 +412,8 @@ github.com/aws/aws-sdk-go-v2/service/dynamodb v1.26.8 h1:XKO0BswTDeZMLDBd/b5pCEZ github.com/aws/aws-sdk-go-v2/service/dynamodb v1.26.8/go.mod h1:N5tqZcYMM0N1PN7UQYJNWuGyO886OfnMhf/3MAbqMcI= github.com/aws/aws-sdk-go-v2/service/ebs v1.21.7 h1:CRzzXjmgx9p362yO39D6hbZULdMI23gaKqSxijJCXHM= github.com/aws/aws-sdk-go-v2/service/ebs v1.21.7/go.mod h1:wnsHqpi3RgDwklS5SPHUgjcUUpontGPKJ+GJYOdV7pY= -github.com/aws/aws-sdk-go-v2/service/ec2 v1.149.1 h1:OGZUMBYZnz+R5nkW6FS1J8UlfLeM/pKojck+74+ZQGY= -github.com/aws/aws-sdk-go-v2/service/ec2 v1.149.1/go.mod h1:XxJNg7fIkR8cbm89i0zVZSxKpcPYsC8BWRwMIJOWbnk= +github.com/aws/aws-sdk-go-v2/service/ec2 v1.155.1 h1:JBwnHlQvL39eeT03+vmBZuziutTKljmOKboKxQuIBck= +github.com/aws/aws-sdk-go-v2/service/ec2 v1.155.1/go.mod h1:xejKuuRDjz6z5OqyeLsz01MlOqqW7CqpAB4PabNvpu8= github.com/aws/aws-sdk-go-v2/service/ecr v1.24.6 h1:cT7h+GWP2k0hJSsPmppKgxl4C9R6gCC5/oF4oHnmpK4= github.com/aws/aws-sdk-go-v2/service/ecr v1.24.6/go.mod h1:AOHmGMoPtSY9Zm2zBuwUJQBisIvYAZeA1n7b6f4e880= github.com/aws/aws-sdk-go-v2/service/ecs v1.35.6 h1:Sc2mLjyA1R8z2l705AN7Wr7QOlnUxVnGPJeDIVyUSrs= @@ -432,14 +432,14 @@ github.com/aws/aws-sdk-go-v2/service/emr v1.36.0 h1:FdeZ7AYOvyL09KH250Ncz4LF4SB1 github.com/aws/aws-sdk-go-v2/service/emr v1.36.0/go.mod h1:Drh6y2qLaw/wnDKTIcdqM2m358MIRXsZ2Bj2tjhVLq0= github.com/aws/aws-sdk-go-v2/service/iam v1.28.7 h1:FKPRDYZOO0Eur19vWUL1B40Op0j89KQj3kARjrszMK8= github.com/aws/aws-sdk-go-v2/service/iam v1.28.7/go.mod h1:YzMYyQ7S4twfYzLjwP24G1RAxypozVZeNaG1r2jxRms= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 h1:EyBZibRTVAs6ECHZOw5/wlylS9OcTzwyjeQMudmREjE= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1/go.mod h1:JKpmtYhhPs7D97NL/ltqz7yCkERFW5dOlHyVl66ZYF8= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2 h1:zSdTXYLwuXDNPUS+V41i1SFDXG7V0ITp0D9UT9Cvl18= github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2/go.mod h1:v8m8k+qVy95nYi7d56uP1QImleIIY25BPiNJYzPBdFE= github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11 h1:e9AVb17H4x5FTE5KWIP5M1Du+9M86pS+Hw0lBUdN8EY= github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11/go.mod h1:B90ZQJa36xo0ph9HsoteI1+r8owgQH/U1QNfqZQkj1Q= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.2 h1:5ffmXjPtwRExp1zc7gENLgCPyHFbhEPwVTkTiH9niSk= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.2/go.mod h1:Ru7vg1iQ7cR4i7SZ/JTLYN9kaXtbL69UdgG0OQWQxW0= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 h1:ogRAwT1/gxJBcSWDMZlgyFUM962F51A5CRhDLbxLdmo= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7/go.mod h1:YCsIZhXfRPLFFCl5xxY+1T9RKzOKjCut+28JSX2DnAk= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2 h1:1oY1AVEisRI4HNuFoLdRUB0hC63ylDAN6Me3MrfclEg= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2/go.mod h1:KZ03VgvZwSjkT7fOetQ/wF3MZUvYFirlI1H5NklUNsY= github.com/aws/aws-sdk-go-v2/service/kafka v1.28.5 h1:yCkyZDGahaCaAkdpVx8Te05t6eW2FarBLunVC8S23nU= @@ -474,8 +474,8 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.28.1 h1:3I2cBEYgKhrWlwyZgfpSO2BpaMY1 github.com/aws/aws-sdk-go-v2/service/sts v1.28.1/go.mod h1:uQ7YYKZt3adCRrdCBREm1CD3efFLOUNH77MrUCvx5oA= github.com/aws/aws-sdk-go-v2/service/workspaces v1.38.1 h1:pqxn3fcZDgWmo8GMUjlxVBdakcGo0AeUb7mjX33pJIQ= github.com/aws/aws-sdk-go-v2/service/workspaces v1.38.1/go.mod h1:kP5rUlnqfno/obflnKX4KMBWkoVHLDI8oCka9U0opRo= -github.com/aws/smithy-go v1.20.1 h1:4SZlSlMr36UEqC7XOyRVb27XMeZubNcBNN+9IgEPIQw= -github.com/aws/smithy-go v1.20.1/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= +github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= From e75a90f2e56ab9700f912c72ceccd56ddc773000 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 15:20:08 +0400 Subject: [PATCH 33/57] chore(deps): bump github.com/google/wire from 0.5.0 to 0.6.0 (#6425) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 4 ++-- go.sum | 31 +++++++++++++++++++++++++------ 2 files changed, 27 insertions(+), 8 deletions(-) diff --git a/go.mod b/go.mod index 9549706cd3ab..336519c4c53d 100644 --- a/go.mod +++ b/go.mod @@ -52,7 +52,7 @@ require ( github.com/google/go-containerregistry v0.19.0 github.com/google/licenseclassifier/v2 v2.0.0 github.com/google/uuid v1.6.0 - github.com/google/wire v0.5.0 + github.com/google/wire v0.6.0 github.com/hashicorp/go-getter v1.7.3 github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-retryablehttp v0.7.5 @@ -390,7 +390,7 @@ require ( golang.org/x/oauth2 v0.16.0 // indirect golang.org/x/sys v0.17.0 // indirect golang.org/x/time v0.5.0 // indirect - golang.org/x/tools v0.16.1 // indirect + golang.org/x/tools v0.17.0 // indirect google.golang.org/api v0.155.0 // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 // indirect diff --git a/go.sum b/go.sum index cbf52b07b648..f5288504ffc4 100644 --- a/go.sum +++ b/go.sum @@ -1016,7 +1016,7 @@ github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= -github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= +github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -1024,8 +1024,8 @@ github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/wire v0.5.0 h1:I7ELFeVBr3yfPIcc8+MWvrjk+3VjbcSzoXm3JVa+jD8= -github.com/google/wire v0.5.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU= +github.com/google/wire v0.6.0 h1:HBkoIh4BdSxoyo9PveV8giw7ZsaBOvzWKfcg/6MrVwI= +github.com/google/wire v0.6.0/go.mod h1:F4QhpQ9EDIdJ1Mbop/NZBRB+5yrR6qg3BnctaoUk6NA= github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg= @@ -1774,6 +1774,8 @@ golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0 golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -1814,6 +1816,9 @@ golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1882,6 +1887,9 @@ golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1926,6 +1934,8 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -2046,6 +2056,9 @@ golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -2055,6 +2068,9 @@ golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -2070,6 +2086,8 @@ golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -2088,7 +2106,6 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3 golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -2140,8 +2157,10 @@ golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.16.1 h1:TLyB3WofjdOEepBHAU20JdNC1Zbg87elYofWYAY5oZA= -golang.org/x/tools v0.16.1/go.mod h1:kYVVN6I1mBNoB1OX+noeBjbRk4IUEPa7JJ+TJMEooJ0= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc= +golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= From 8baccd7909a4b91970f2a8effcfce2628a42c206 Mon Sep 17 00:00:00 2001 From: DmitriyLewen <91113035+DmitriyLewen@users.noreply.github.com> Date: Tue, 2 Apr 2024 17:22:43 +0600 Subject: [PATCH 34/57] fix(db): check schema version for image name only (#6410) Signed-off-by: knqyf263 Co-authored-by: knqyf263 --- pkg/commands/operation/operation.go | 4 +- pkg/db/db.go | 25 +++++---- pkg/fanal/analyzer/analyzer_test.go | 28 ++++++---- .../analyzer/language/java/jar/jar_test.go | 8 ++- pkg/flag/db_flags.go | 47 +++++++++++++---- pkg/flag/db_flags_test.go | 52 ++++++++++++++----- pkg/javadb/client.go | 20 ++++--- pkg/rpc/server/listen.go | 5 +- 8 files changed, 127 insertions(+), 62 deletions(-) diff --git a/pkg/commands/operation/operation.go b/pkg/commands/operation/operation.go index 7eab510f539b..8f8561a7c290 100644 --- a/pkg/commands/operation/operation.go +++ b/pkg/commands/operation/operation.go @@ -9,6 +9,7 @@ import ( "sync" "github.com/go-redis/redis/v8" + "github.com/google/go-containerregistry/pkg/name" "github.com/google/wire" "github.com/samber/lo" "golang.org/x/xerrors" @@ -110,7 +111,8 @@ func (c Cache) ClearArtifacts() error { } // DownloadDB downloads the DB -func DownloadDB(ctx context.Context, appVersion, cacheDir, dbRepository string, quiet, skipUpdate bool, opt ftypes.RegistryOptions) error { +func DownloadDB(ctx context.Context, appVersion, cacheDir string, dbRepository name.Reference, quiet, skipUpdate bool, + opt ftypes.RegistryOptions) error { mu.Lock() defer mu.Unlock() diff --git a/pkg/db/db.go b/pkg/db/db.go index fddd3393ea70..9ecb281b064e 100644 --- a/pkg/db/db.go +++ b/pkg/db/db.go @@ -4,9 +4,9 @@ import ( "context" "errors" "fmt" - "strings" "time" + "github.com/google/go-containerregistry/pkg/name" "github.com/google/go-containerregistry/pkg/v1/remote/transport" "golang.org/x/xerrors" "k8s.io/utils/clock" @@ -19,8 +19,13 @@ import ( ) const ( - dbMediaType = "application/vnd.aquasec.trivy.db.layer.v1.tar+gzip" - defaultDBRepository = "ghcr.io/aquasecurity/trivy-db" + SchemaVersion = db.SchemaVersion + dbMediaType = "application/vnd.aquasec.trivy.db.layer.v1.tar+gzip" +) + +var ( + DefaultRepository = fmt.Sprintf("%s:%d", "ghcr.io/aquasecurity/trivy-db", db.SchemaVersion) + defaultRepository, _ = name.NewTag(DefaultRepository) ) // Operation defines the DB operations @@ -32,7 +37,7 @@ type Operation interface { type options struct { artifact *oci.Artifact clock clock.Clock - dbRepository string + dbRepository name.Reference } // Option is a functional option @@ -46,7 +51,7 @@ func WithOCIArtifact(art *oci.Artifact) Option { } // WithDBRepository takes a dbRepository -func WithDBRepository(dbRepository string) Option { +func WithDBRepository(dbRepository name.Reference) Option { return func(opts *options) { opts.dbRepository = dbRepository } @@ -72,19 +77,13 @@ type Client struct { func NewClient(cacheDir string, quiet bool, opts ...Option) *Client { o := &options{ clock: clock.RealClock{}, - dbRepository: defaultDBRepository, + dbRepository: defaultRepository, } for _, opt := range opts { opt(o) } - // Add the schema version as a tag if the tag doesn't exist. - // This is required for backward compatibility. - if !strings.Contains(o.dbRepository, ":") { - o.dbRepository = fmt.Sprintf("%s:%d", o.dbRepository, db.SchemaVersion) - } - return &Client{ options: o, cacheDir: cacheDir, @@ -195,7 +194,7 @@ func (c *Client) initOCIArtifact(opt types.RegistryOptions) (*oci.Artifact, erro return c.artifact, nil } - art, err := oci.NewArtifact(c.dbRepository, c.quiet, opt) + art, err := oci.NewArtifact(c.dbRepository.String(), c.quiet, opt) if err != nil { var terr *transport.Error if errors.As(err, &terr) { diff --git a/pkg/fanal/analyzer/analyzer_test.go b/pkg/fanal/analyzer/analyzer_test.go index 2c7284a1ae83..8fee82acf600 100644 --- a/pkg/fanal/analyzer/analyzer_test.go +++ b/pkg/fanal/analyzer/analyzer_test.go @@ -3,6 +3,7 @@ package analyzer_test import ( "context" "fmt" + "github.com/google/go-containerregistry/pkg/name" "os" "sync" "testing" @@ -12,11 +13,11 @@ import ( "golang.org/x/sync/semaphore" "golang.org/x/xerrors" - xio "github.com/aquasecurity/trivy/pkg/x/io" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/javadb" "github.com/aquasecurity/trivy/pkg/mapfs" + xio "github.com/aquasecurity/trivy/pkg/x/io" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/imgconf/apk" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/java/jar" @@ -335,15 +336,18 @@ func TestAnalyzerGroup_AnalyzeFile(t *testing.T) { FilePath: "/lib/apk/db/installed", Packages: types.Packages{ { - ID: "musl@1.1.24-r2", - Name: "musl", - Version: "1.1.24-r2", - SrcName: "musl", - SrcVersion: "1.1.24-r2", - Licenses: []string{"MIT"}, - Arch: "x86_64", - Digest: "sha1:cb2316a189ebee5282c4a9bd98794cc2477a74c6", - InstalledFiles: []string{"lib/libc.musl-x86_64.so.1", "lib/ld-musl-x86_64.so.1"}, + ID: "musl@1.1.24-r2", + Name: "musl", + Version: "1.1.24-r2", + SrcName: "musl", + SrcVersion: "1.1.24-r2", + Licenses: []string{"MIT"}, + Arch: "x86_64", + Digest: "sha1:cb2316a189ebee5282c4a9bd98794cc2477a74c6", + InstalledFiles: []string{ + "lib/libc.musl-x86_64.so.1", + "lib/ld-musl-x86_64.so.1", + }, }, }, }, @@ -615,7 +619,9 @@ func TestAnalyzerGroup_PostAnalyze(t *testing.T) { if tt.analyzerType == analyzer.TypeJar { // init java-trivy-db with skip update - javadb.Init("./language/java/jar/testdata", "ghcr.io/aquasecurity/trivy-java-db", true, false, types.RegistryOptions{Insecure: false}) + repo, err := name.NewTag(javadb.DefaultRepository) + require.NoError(t, err) + javadb.Init("./language/java/jar/testdata", repo, true, false, types.RegistryOptions{Insecure: false}) } ctx := context.Background() diff --git a/pkg/fanal/analyzer/language/java/jar/jar_test.go b/pkg/fanal/analyzer/language/java/jar/jar_test.go index 133ead426d7a..3988dc27daf5 100644 --- a/pkg/fanal/analyzer/language/java/jar/jar_test.go +++ b/pkg/fanal/analyzer/language/java/jar/jar_test.go @@ -2,6 +2,8 @@ package jar import ( "context" + "github.com/google/go-containerregistry/pkg/name" + "github.com/stretchr/testify/require" "os" "path/filepath" "testing" @@ -130,13 +132,15 @@ func Test_javaLibraryAnalyzer_Analyze(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // init java-trivy-db with skip update - javadb.Init("testdata", defaultJavaDBRepository, true, false, types.RegistryOptions{Insecure: false}) + repo, err := name.NewTag(javadb.DefaultRepository) + require.NoError(t, err) + javadb.Init("testdata", repo, true, false, types.RegistryOptions{Insecure: false}) a := javaLibraryAnalyzer{} ctx := context.Background() mfs := mapfs.New() - err := mfs.MkdirAll(filepath.Dir(tt.inputFile), os.ModePerm) + err = mfs.MkdirAll(filepath.Dir(tt.inputFile), os.ModePerm) assert.NoError(t, err) err = mfs.WriteFile(tt.inputFile, tt.inputFile) assert.NoError(t, err) diff --git a/pkg/flag/db_flags.go b/pkg/flag/db_flags.go index 7e018e865a77..58e7809a2152 100644 --- a/pkg/flag/db_flags.go +++ b/pkg/flag/db_flags.go @@ -1,14 +1,17 @@ package flag import ( + "fmt" + + "github.com/google/go-containerregistry/pkg/name" + "go.uber.org/zap" "golang.org/x/xerrors" + "github.com/aquasecurity/trivy/pkg/db" + "github.com/aquasecurity/trivy/pkg/javadb" "github.com/aquasecurity/trivy/pkg/log" ) -const defaultDBRepository = "ghcr.io/aquasecurity/trivy-db:2" -const defaultJavaDBRepository = "ghcr.io/aquasecurity/trivy-java-db:1" - var ( ResetFlag = Flag[bool]{ Name: "reset", @@ -49,13 +52,13 @@ var ( DBRepositoryFlag = Flag[string]{ Name: "db-repository", ConfigName: "db.repository", - Default: defaultDBRepository, + Default: db.DefaultRepository, Usage: "OCI repository to retrieve trivy-db from", } JavaDBRepositoryFlag = Flag[string]{ Name: "java-db-repository", ConfigName: "db.java-repository", - Default: defaultJavaDBRepository, + Default: javadb.DefaultRepository, Usage: "OCI repository to retrieve trivy-java-db from", } LightFlag = Flag[bool]{ @@ -86,8 +89,8 @@ type DBOptions struct { DownloadJavaDBOnly bool SkipJavaDBUpdate bool NoProgress bool - DBRepository string - JavaDBRepository string + DBRepository name.Reference + JavaDBRepository name.Reference Light bool // deprecated } @@ -145,6 +148,32 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { log.Logger.Warn("'--light' option is deprecated and will be removed. See also: https://github.com/aquasecurity/trivy/discussions/1649") } + var dbRepository, javaDBRepository name.Reference + var err error + if f.DBRepository != nil { + if dbRepository, err = name.ParseReference(f.DBRepository.Value(), name.WithDefaultTag("")); err != nil { + return DBOptions{}, xerrors.Errorf("invalid db repository: %w", err) + } + // Add the schema version if the tag is not specified for backward compatibility. + if t, ok := dbRepository.(name.Tag); ok && t.TagStr() == "" { + dbRepository = t.Tag(fmt.Sprint(db.SchemaVersion)) + log.Logger.Infow("Adding schema version to the DB repository for backward compatibility", + zap.String("repository", dbRepository.String())) + } + } + + if f.JavaDBRepository != nil { + if javaDBRepository, err = name.ParseReference(f.JavaDBRepository.Value(), name.WithDefaultTag("")); err != nil { + return DBOptions{}, xerrors.Errorf("invalid javadb repository: %w", err) + } + // Add the schema version if the tag is not specified for backward compatibility. + if t, ok := javaDBRepository.(name.Tag); ok && t.TagStr() == "" { + javaDBRepository = t.Tag(fmt.Sprint(javadb.SchemaVersion)) + log.Logger.Infow("Adding schema version to the Java DB repository for backward compatibility", + zap.String("repository", javaDBRepository.String())) + } + } + return DBOptions{ Reset: f.Reset.Value(), DownloadDBOnly: downloadDBOnly, @@ -153,7 +182,7 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { SkipJavaDBUpdate: skipJavaDBUpdate, Light: light, NoProgress: f.NoProgress.Value(), - DBRepository: f.DBRepository.Value(), - JavaDBRepository: f.JavaDBRepository.Value(), + DBRepository: dbRepository, + JavaDBRepository: javaDBRepository, }, nil } diff --git a/pkg/flag/db_flags_test.go b/pkg/flag/db_flags_test.go index c590ed49f7a3..b53f29135d74 100644 --- a/pkg/flag/db_flags_test.go +++ b/pkg/flag/db_flags_test.go @@ -1,6 +1,7 @@ package flag_test import ( + "github.com/google/go-containerregistry/pkg/name" "testing" "github.com/spf13/viper" @@ -15,9 +16,11 @@ import ( func TestDBFlagGroup_ToOptions(t *testing.T) { type fields struct { - SkipDBUpdate bool - DownloadDBOnly bool - Light bool + SkipDBUpdate bool + DownloadDBOnly bool + Light bool + DBRepository string + JavaDBRepository string } tests := []struct { name string @@ -29,22 +32,30 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { { name: "happy", fields: fields{ - SkipDBUpdate: true, - DownloadDBOnly: false, + SkipDBUpdate: true, + DownloadDBOnly: false, + DBRepository: "ghcr.io/aquasecurity/trivy-db", + JavaDBRepository: "ghcr.io/aquasecurity/trivy-java-db", }, want: flag.DBOptions{ - SkipDBUpdate: true, - DownloadDBOnly: false, + SkipDBUpdate: true, + DownloadDBOnly: false, + DBRepository: name.Tag{}, // All fields are unexported + JavaDBRepository: name.Tag{}, // All fields are unexported }, assertion: require.NoError, }, { name: "light", fields: fields{ - Light: true, + Light: true, + DBRepository: "ghcr.io/aquasecurity/trivy-db", + JavaDBRepository: "ghcr.io/aquasecurity/trivy-java-db", }, want: flag.DBOptions{ - Light: true, + Light: true, + DBRepository: name.Tag{}, // All fields are unexported + JavaDBRepository: name.Tag{}, // All fields are unexported }, wantLogs: []string{ "'--light' option is deprecated and will be removed. See also: https://github.com/aquasecurity/trivy/discussions/1649", @@ -61,6 +72,17 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { require.ErrorContains(t, err, "--skip-db-update and --download-db-only options can not be specified both") }, }, + { + name: "invalid repo", + fields: fields{ + SkipDBUpdate: true, + DownloadDBOnly: false, + DBRepository: "foo:bar:baz", + }, + assertion: func(t require.TestingT, err error, msgs ...interface{}) { + require.ErrorContains(t, err, "invalid db repository") + }, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -71,16 +93,20 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { viper.Set(flag.SkipDBUpdateFlag.ConfigName, tt.fields.SkipDBUpdate) viper.Set(flag.DownloadDBOnlyFlag.ConfigName, tt.fields.DownloadDBOnly) viper.Set(flag.LightFlag.ConfigName, tt.fields.Light) + viper.Set(flag.DBRepositoryFlag.ConfigName, tt.fields.DBRepository) + viper.Set(flag.JavaDBRepositoryFlag.ConfigName, tt.fields.JavaDBRepository) // Assert options f := &flag.DBFlagGroup{ - DownloadDBOnly: flag.DownloadDBOnlyFlag.Clone(), - SkipDBUpdate: flag.SkipDBUpdateFlag.Clone(), - Light: flag.LightFlag.Clone(), + DownloadDBOnly: flag.DownloadDBOnlyFlag.Clone(), + SkipDBUpdate: flag.SkipDBUpdateFlag.Clone(), + Light: flag.LightFlag.Clone(), + DBRepository: flag.DBRepositoryFlag.Clone(), + JavaDBRepository: flag.JavaDBRepositoryFlag.Clone(), } got, err := f.ToOptions() tt.assertion(t, err) - assert.Equalf(t, tt.want, got, "ToOptions()") + assert.EqualExportedValues(t, tt.want, got) // Assert log messages var gotMessages []string diff --git a/pkg/javadb/client.go b/pkg/javadb/client.go index faa110a8460a..86194b263569 100644 --- a/pkg/javadb/client.go +++ b/pkg/javadb/client.go @@ -7,10 +7,10 @@ import ( "os" "path/filepath" "sort" - "strings" "sync" "time" + "github.com/google/go-containerregistry/pkg/name" "golang.org/x/xerrors" "github.com/aquasecurity/trivy-java-db/pkg/db" @@ -22,13 +22,16 @@ import ( ) const ( - mediaType = "application/vnd.aquasec.trivy.javadb.layer.v1.tar+gzip" + SchemaVersion = db.SchemaVersion + mediaType = "application/vnd.aquasec.trivy.javadb.layer.v1.tar+gzip" ) +var DefaultRepository = fmt.Sprintf("%s:%d", "ghcr.io/aquasecurity/trivy-java-db", SchemaVersion) + var updater *Updater type Updater struct { - repo string + repo name.Reference dbDir string skip bool quiet bool @@ -50,14 +53,14 @@ func (u *Updater) Update() error { } } - if (meta.Version != db.SchemaVersion || meta.NextUpdate.Before(time.Now().UTC())) && !u.skip { + if (meta.Version != SchemaVersion || meta.NextUpdate.Before(time.Now().UTC())) && !u.skip { // Download DB log.Logger.Infof("Java DB Repository: %s", u.repo) log.Logger.Info("Downloading the Java DB...") // TODO: support remote options var a *oci.Artifact - if a, err = oci.NewArtifact(u.repo, u.quiet, u.registryOption); err != nil { + if a, err = oci.NewArtifact(u.repo.String(), u.quiet, u.registryOption); err != nil { return xerrors.Errorf("oci error: %w", err) } if err = a.Download(context.Background(), dbDir, oci.DownloadOption{MediaType: mediaType}); err != nil { @@ -82,12 +85,7 @@ func (u *Updater) Update() error { return nil } -func Init(cacheDir, javaDBRepository string, skip, quiet bool, registryOption ftypes.RegistryOptions) { - // Add the schema version as a tag if the tag doesn't exist. - // This is required for backward compatibility. - if !strings.Contains(javaDBRepository, ":") { - javaDBRepository = fmt.Sprintf("%s:%d", javaDBRepository, db.SchemaVersion) - } +func Init(cacheDir string, javaDBRepository name.Reference, skip, quiet bool, registryOption ftypes.RegistryOptions) { updater = &Updater{ repo: javaDBRepository, dbDir: filepath.Join(cacheDir, "java-db"), diff --git a/pkg/rpc/server/listen.go b/pkg/rpc/server/listen.go index 33a3a8ee8a81..7433bf20a560 100644 --- a/pkg/rpc/server/listen.go +++ b/pkg/rpc/server/listen.go @@ -9,6 +9,7 @@ import ( "time" "github.com/NYTimes/gziphandler" + "github.com/google/go-containerregistry/pkg/name" "github.com/twitchtv/twirp" "golang.org/x/xerrors" @@ -33,14 +34,14 @@ type Server struct { cacheDir string token string tokenHeader string - dbRepository string + dbRepository name.Reference // For OCI registries types.RegistryOptions } // NewServer returns an instance of Server -func NewServer(appVersion, addr, cacheDir, token, tokenHeader, dbRepository string, opt types.RegistryOptions) Server { +func NewServer(appVersion, addr, cacheDir, token, tokenHeader string, dbRepository name.Reference, opt types.RegistryOptions) Server { return Server{ appVersion: appVersion, addr: addr, From 3ad2b3e2553224cf3d3d4405b1c246b399976f9d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 15:23:47 +0400 Subject: [PATCH 35/57] chore(deps): bump go.etcd.io/bbolt from 1.3.8 to 1.3.9 (#6429) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 336519c4c53d..dc5da15d9458 100644 --- a/go.mod +++ b/go.mod @@ -103,7 +103,7 @@ require ( github.com/twitchtv/twirp v8.1.2+incompatible github.com/xeipuuv/gojsonschema v1.2.0 github.com/xlab/treeprint v1.2.0 - go.etcd.io/bbolt v1.3.8 + go.etcd.io/bbolt v1.3.9 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa golang.org/x/mod v0.15.0 diff --git a/go.sum b/go.sum index f5288504ffc4..bbd19517374d 100644 --- a/go.sum +++ b/go.sum @@ -1699,8 +1699,8 @@ github.com/zclconf/go-cty-yaml v1.0.3/go.mod h1:9YLUH4g7lOhVWqUbctnVlZ5KLpg7JApr go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= -go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= -go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= +go.etcd.io/bbolt v1.3.9 h1:8x7aARPEXiXbHmtUwAIv7eV2fQFHrLLavdiJ3uzJXoI= +go.etcd.io/bbolt v1.3.9/go.mod h1:zaO32+Ti0PK1ivdPtgMESzuzL2VPoIG1PCQNvOdo/dE= go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg= go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= From 4d00d8b52ab2be20d129e329a741d526884c3aa3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 18:32:29 +0400 Subject: [PATCH 36/57] chore(deps): bump github.com/aws/aws-sdk-go-v2/config from 1.27.4 to 1.27.10 (#6428) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 12 ++++++------ go.sum | 24 ++++++++++++------------ 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/go.mod b/go.mod index dc5da15d9458..217498193474 100644 --- a/go.mod +++ b/go.mod @@ -27,13 +27,13 @@ require ( github.com/aquasecurity/trivy-kubernetes v0.6.3 github.com/aquasecurity/trivy-policies v0.10.0 github.com/aws/aws-sdk-go-v2 v1.26.1 - github.com/aws/aws-sdk-go-v2/config v1.27.4 - github.com/aws/aws-sdk-go-v2/credentials v1.17.4 + github.com/aws/aws-sdk-go-v2/config v1.27.10 + github.com/aws/aws-sdk-go-v2/credentials v1.17.10 github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.15.15 github.com/aws/aws-sdk-go-v2/service/ec2 v1.155.1 github.com/aws/aws-sdk-go-v2/service/ecr v1.24.6 github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1 - github.com/aws/aws-sdk-go-v2/service/sts v1.28.1 + github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 github.com/bitnami/go-version v0.0.0-20231130084017-bb00604d650c github.com/bmatcuk/doublestar/v4 v4.6.1 github.com/cenkalti/backoff v2.2.1+incompatible @@ -178,7 +178,7 @@ require ( github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect github.com/aws/aws-sdk-go v1.49.21 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1 // indirect github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 // indirect github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect @@ -219,8 +219,8 @@ require ( github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.26.0 // indirect github.com/aws/aws-sdk-go-v2/service/sns v1.26.6 // indirect github.com/aws/aws-sdk-go-v2/service/sqs v1.29.6 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.20.1 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.1 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 // indirect github.com/aws/aws-sdk-go-v2/service/workspaces v1.38.1 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect diff --git a/go.sum b/go.sum index bbd19517374d..13d1f1f695b9 100644 --- a/go.sum +++ b/go.sum @@ -372,12 +372,12 @@ github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+ github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 h1:gTK2uhtAPtFcdRRJilZPx8uJLL2J85xK11nKtWL0wfU= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1/go.mod h1:sxpLb+nZk7tIfCWChfd+h4QwHNUR57d8hA1cleTkjJo= -github.com/aws/aws-sdk-go-v2/config v1.27.4 h1:AhfWb5ZwimdsYTgP7Od8E9L1u4sKmDW2ZVeLcf2O42M= -github.com/aws/aws-sdk-go-v2/config v1.27.4/go.mod h1:zq2FFXK3A416kiukwpsd+rD4ny6JC7QSkp4QdN1Mp2g= -github.com/aws/aws-sdk-go-v2/credentials v1.17.4 h1:h5Vztbd8qLppiPwX+y0Q6WiwMZgpd9keKe2EAENgAuI= -github.com/aws/aws-sdk-go-v2/credentials v1.17.4/go.mod h1:+30tpwrkOgvkJL1rUZuRLoxcJwtI/OkeBLYnHxJtVe0= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2 h1:AK0J8iYBFeUk2Ax7O8YpLtFsfhdOByh2QIkHmigpRYk= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2/go.mod h1:iRlGzMix0SExQEviAyptRWRGdYNo3+ufW/lCzvKVTUc= +github.com/aws/aws-sdk-go-v2/config v1.27.10 h1:PS+65jThT0T/snC5WjyfHHyUgG+eBoupSDV+f838cro= +github.com/aws/aws-sdk-go-v2/config v1.27.10/go.mod h1:BePM7Vo4OBpHreKRUMuDXX+/+JWP38FLkzl5m27/Jjs= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10 h1:qDZ3EA2lv1KangvQB6y258OssCHD0xvaGiEDkG4X/10= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10/go.mod h1:6t3sucOaYDwDssHQa0ojH1RpmVmF5/jArkye1b2FKMI= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1 h1:FVJ0r5XTHSmIHJV6KuDmdYhEpvlHpiSd38RQWhut5J4= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1/go.mod h1:zusuAeqezXzAB24LGuzuekqMAEgWkVYukBec3kr3jUg= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.15.15 h1:2MUXyGW6dVaQz6aqycpbdLIH1NMcUI6kW6vQ0RabGYg= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.15.15/go.mod h1:aHbhbR6WEQgHAiRj41EQ2W47yOYwNtIkWTXmcAtYqj8= github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg= @@ -466,12 +466,12 @@ github.com/aws/aws-sdk-go-v2/service/sns v1.26.6 h1:w2YwF8889ardGU3Y0qZbJ4Zzh+Q/ github.com/aws/aws-sdk-go-v2/service/sns v1.26.6/go.mod h1:IrcbquqMupzndZ20BXxDxjM7XenTRhbwBOetk4+Z5oc= github.com/aws/aws-sdk-go-v2/service/sqs v1.29.6 h1:UdbDTllc7cmusTTMy1dcTrYKRl4utDEsmKh9ZjvhJCc= github.com/aws/aws-sdk-go-v2/service/sqs v1.29.6/go.mod h1:mCUv04gd/7g+/HNzDB4X6dzJuygji0ckvB3Lg/TdG5Y= -github.com/aws/aws-sdk-go-v2/service/sso v1.20.1 h1:utEGkfdQ4L6YW/ietH7111ZYglLJvS+sLriHJ1NBJEQ= -github.com/aws/aws-sdk-go-v2/service/sso v1.20.1/go.mod h1:RsYqzYr2F2oPDdpy+PdhephuZxTfjHQe7SOBcZGoAU8= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.1 h1:9/GylMS45hGGFCcMrUZDVayQE1jYSIN6da9jo7RAYIw= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.1/go.mod h1:YjAPFn4kGFqKC54VsHs5fn5B6d+PCY2tziEa3U/GB5Y= -github.com/aws/aws-sdk-go-v2/service/sts v1.28.1 h1:3I2cBEYgKhrWlwyZgfpSO2BpaMY1LHPqXYk/QGlu2ew= -github.com/aws/aws-sdk-go-v2/service/sts v1.28.1/go.mod h1:uQ7YYKZt3adCRrdCBREm1CD3efFLOUNH77MrUCvx5oA= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 h1:WzFol5Cd+yDxPAdnzTA5LmpHYSWinhmSj4rQChV0ee8= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4/go.mod h1:qGzynb/msuZIE8I75DVRCUXw3o3ZyBmUvMwQ2t/BrGM= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 h1:Jux+gDDyi1Lruk+KHF91tK2KCuY61kzoCpvtvJJBtOE= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4/go.mod h1:mUYPBhaF2lGiukDEjJX2BLRRKTmoUSitGDUgM4tRxak= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 h1:cwIxeBttqPN3qkaAjcEcsh8NYr8n2HZPkcKgPAi1phU= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6/go.mod h1:FZf1/nKNEkHdGGJP/cI2MoIMquumuRK6ol3QQJNDxmw= github.com/aws/aws-sdk-go-v2/service/workspaces v1.38.1 h1:pqxn3fcZDgWmo8GMUjlxVBdakcGo0AeUb7mjX33pJIQ= github.com/aws/aws-sdk-go-v2/service/workspaces v1.38.1/go.mod h1:kP5rUlnqfno/obflnKX4KMBWkoVHLDI8oCka9U0opRo= github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= From a75839212cdee3cceee18669fbff26437bd4fd8b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 20:04:23 +0400 Subject: [PATCH 37/57] chore(deps): bump github.com/aws/aws-sdk-go-v2/service/s3 from 1.51.1 to 1.53.1 (#6424) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 10 +++++----- go.sum | 20 ++++++++++---------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/go.mod b/go.mod index 217498193474..64d64beeea65 100644 --- a/go.mod +++ b/go.mod @@ -32,7 +32,7 @@ require ( github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.15.15 github.com/aws/aws-sdk-go-v2/service/ec2 v1.155.1 github.com/aws/aws-sdk-go-v2/service/ecr v1.24.6 - github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1 + github.com/aws/aws-sdk-go-v2/service/s3 v1.53.1 github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 github.com/bitnami/go-version v0.0.0-20231130084017-bb00604d650c github.com/bmatcuk/doublestar/v4 v4.6.1 @@ -177,12 +177,12 @@ require ( github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect github.com/aws/aws-sdk-go v1.49.21 // indirect - github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1 // indirect github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 // indirect github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect - github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.2 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.5 // indirect github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.26.7 // indirect github.com/aws/aws-sdk-go-v2/service/apigateway v1.21.6 // indirect github.com/aws/aws-sdk-go-v2/service/apigatewayv2 v1.18.6 // indirect @@ -204,10 +204,10 @@ require ( github.com/aws/aws-sdk-go-v2/service/emr v1.36.0 // indirect github.com/aws/aws-sdk-go-v2/service/iam v1.28.7 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.7 // indirect github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.5 // indirect github.com/aws/aws-sdk-go-v2/service/kafka v1.28.5 // indirect github.com/aws/aws-sdk-go-v2/service/kinesis v1.24.6 // indirect github.com/aws/aws-sdk-go-v2/service/kms v1.27.7 // indirect diff --git a/go.sum b/go.sum index 13d1f1f695b9..b6eb0f6da7bc 100644 --- a/go.sum +++ b/go.sum @@ -370,8 +370,8 @@ github.com/aws/aws-sdk-go v1.49.21 h1:Rl8KW6HqkwzhATwvXhyr7vD4JFUMi7oXGAw9SrxxIF github.com/aws/aws-sdk-go v1.49.21/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 h1:gTK2uhtAPtFcdRRJilZPx8uJLL2J85xK11nKtWL0wfU= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1/go.mod h1:sxpLb+nZk7tIfCWChfd+h4QwHNUR57d8hA1cleTkjJo= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 h1:x6xsQXGSmW6frevwDA+vi/wqhp1ct18mVXYN08/93to= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2/go.mod h1:lPprDr1e6cJdyYeGXnRaJoP4Md+cDBvi2eOj00BlGmg= github.com/aws/aws-sdk-go-v2/config v1.27.10 h1:PS+65jThT0T/snC5WjyfHHyUgG+eBoupSDV+f838cro= github.com/aws/aws-sdk-go-v2/config v1.27.10/go.mod h1:BePM7Vo4OBpHreKRUMuDXX+/+JWP38FLkzl5m27/Jjs= github.com/aws/aws-sdk-go-v2/credentials v1.17.10 h1:qDZ3EA2lv1KangvQB6y258OssCHD0xvaGiEDkG4X/10= @@ -386,8 +386,8 @@ github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3C github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.2 h1:en92G0Z7xlksoOylkUhuBSfJgijC7rHVLRdnIlHEs0E= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.2/go.mod h1:HgtQ/wN5G+8QSlK62lbOtNwQ3wTSByJ4wH2rCkPt+AE= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.5 h1:81KE7vaZzrl7yHBYHVEzYB8sypz11NMOZ40YlWvPxsU= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.5/go.mod h1:LIt2rg7Mcgn09Ygbdh/RdIm0rQ+3BNkbP1gyVMFtRK0= github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.26.7 h1:rLdKcienXrk+JFX1+DZg160ebG8lIF2nFvnEZL7dnII= github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.26.7/go.mod h1:cwqaWBOZXu8pqEE1ZC4Sw2ycZLjwKrRP5tOAJFgCbYc= github.com/aws/aws-sdk-go-v2/service/apigateway v1.21.6 h1:ePPaOVn92r5n8Neecdpy93hDmR0PBH6H6b7VQCE5vKE= @@ -434,14 +434,14 @@ github.com/aws/aws-sdk-go-v2/service/iam v1.28.7 h1:FKPRDYZOO0Eur19vWUL1B40Op0j8 github.com/aws/aws-sdk-go-v2/service/iam v1.28.7/go.mod h1:YzMYyQ7S4twfYzLjwP24G1RAxypozVZeNaG1r2jxRms= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2 h1:zSdTXYLwuXDNPUS+V41i1SFDXG7V0ITp0D9UT9Cvl18= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2/go.mod h1:v8m8k+qVy95nYi7d56uP1QImleIIY25BPiNJYzPBdFE= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.7 h1:ZMeFZ5yk+Ek+jNr1+uwCd2tG89t6oTS5yVWpa6yy2es= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.7/go.mod h1:mxV05U+4JiHqIpGqqYXOHLPKUC6bDXC44bsUhNjOEwY= github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11 h1:e9AVb17H4x5FTE5KWIP5M1Du+9M86pS+Hw0lBUdN8EY= github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.8.11/go.mod h1:B90ZQJa36xo0ph9HsoteI1+r8owgQH/U1QNfqZQkj1Q= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 h1:ogRAwT1/gxJBcSWDMZlgyFUM962F51A5CRhDLbxLdmo= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7/go.mod h1:YCsIZhXfRPLFFCl5xxY+1T9RKzOKjCut+28JSX2DnAk= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2 h1:1oY1AVEisRI4HNuFoLdRUB0hC63ylDAN6Me3MrfclEg= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2/go.mod h1:KZ03VgvZwSjkT7fOetQ/wF3MZUvYFirlI1H5NklUNsY= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.5 h1:f9RyWNtS8oH7cZlbn+/JNPpjUk5+5fLd5lM9M0i49Ys= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.5/go.mod h1:h5CoMZV2VF297/VLhRhO1WF+XYWOzXo+4HsObA4HjBQ= github.com/aws/aws-sdk-go-v2/service/kafka v1.28.5 h1:yCkyZDGahaCaAkdpVx8Te05t6eW2FarBLunVC8S23nU= github.com/aws/aws-sdk-go-v2/service/kafka v1.28.5/go.mod h1:/KmX+vXMPJGAB56reo95tnsXa6QPNx6qli4L1AmYb7E= github.com/aws/aws-sdk-go-v2/service/kinesis v1.24.6 h1:FO/aIHk86VePDUh/3Q/A5pnvu45miO1GZB8rIq2BUlA= @@ -458,8 +458,8 @@ github.com/aws/aws-sdk-go-v2/service/rds v1.66.1 h1:TafjIpDW/+l7s+f3EIONaFsNvNfw github.com/aws/aws-sdk-go-v2/service/rds v1.66.1/go.mod h1:MYzRMSdY70kcS8AFg0aHmk/xj6VAe0UfaCCoLrBWPow= github.com/aws/aws-sdk-go-v2/service/redshift v1.39.7 h1:k4WaqQ7LHSGrSftCRXTRLv7WaozXu+fZ1jdisQSR2eU= github.com/aws/aws-sdk-go-v2/service/redshift v1.39.7/go.mod h1:8hU0Ax6q6QA+jrMcWTE0A4YH594MQoWP3EzGO3GH5Dw= -github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1 h1:juZ+uGargZOrQGNxkVHr9HHR/0N+Yu8uekQnV7EAVRs= -github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1/go.mod h1:SoR0c7Jnq8Tpmt0KSLXIavhjmaagRqQpe9r70W3POJg= +github.com/aws/aws-sdk-go-v2/service/s3 v1.53.1 h1:6cnno47Me9bRykw9AEv9zkXE+5or7jz8TsskTTccbgc= +github.com/aws/aws-sdk-go-v2/service/s3 v1.53.1/go.mod h1:qmdkIIAC+GCLASF7R2whgNrJADz0QZPX+Seiw/i4S3o= github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.26.0 h1:dPCRgAL4WD9tSMaDglRNGOiAtSTjkwNiUW5GDpWFfHA= github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.26.0/go.mod h1:4Ae1NCLK6ghmjzd45Tc33GgCKhUWD2ORAlULtMO1Cbs= github.com/aws/aws-sdk-go-v2/service/sns v1.26.6 h1:w2YwF8889ardGU3Y0qZbJ4Zzh+Q/QqKZ4kwkK7JFvnI= From 86714bf6bf40ea3e3c0cbc6d1c9d0a11bb5834bf Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Wed, 3 Apr 2024 01:41:30 +0300 Subject: [PATCH 38/57] feat(cloudformation): add support for logging and endpoint access for EKS (#6440) --- .../cloudformation/aws/eks/cluster.go | 98 +++++++++++++------ .../cloudformation/aws/eks/eks_test.go | 38 ++++++- 2 files changed, 103 insertions(+), 33 deletions(-) diff --git a/pkg/iac/adapters/cloudformation/aws/eks/cluster.go b/pkg/iac/adapters/cloudformation/aws/eks/cluster.go index d4c80e72dbd4..c960924e33d4 100644 --- a/pkg/iac/adapters/cloudformation/aws/eks/cluster.go +++ b/pkg/iac/adapters/cloudformation/aws/eks/cluster.go @@ -12,22 +12,11 @@ func getClusters(ctx parser.FileContext) (clusters []eks.Cluster) { for _, r := range clusterResources { cluster := eks.Cluster{ - Metadata: r.Metadata(), - // Logging not supported for cloudformation https://github.com/aws/containers-roadmap/issues/242 - // TODO: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eks-cluster.html#cfn-eks-cluster-logging - Logging: eks.Logging{ - Metadata: r.Metadata(), - API: iacTypes.BoolUnresolvable(r.Metadata()), - Audit: iacTypes.BoolUnresolvable(r.Metadata()), - Authenticator: iacTypes.BoolUnresolvable(r.Metadata()), - ControllerManager: iacTypes.BoolUnresolvable(r.Metadata()), - Scheduler: iacTypes.BoolUnresolvable(r.Metadata()), - }, - Encryption: getEncryptionConfig(r), - // endpoint protection not supported - https://github.com/aws/containers-roadmap/issues/242 - // TODO: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eks-cluster.html#cfn-eks-cluster-resourcesvpcconfig - PublicAccessEnabled: iacTypes.BoolUnresolvable(r.Metadata()), - PublicAccessCIDRs: nil, + Metadata: r.Metadata(), + Logging: getLogging(r), + Encryption: getEncryptionConfig(r), + PublicAccessEnabled: r.GetBoolProperty("ResourcesVpcConfig.EndpointPublicAccess"), + PublicAccessCIDRs: getPublicCIDRs(r), } clusters = append(clusters, cluster) @@ -35,26 +24,71 @@ func getClusters(ctx parser.FileContext) (clusters []eks.Cluster) { return clusters } +func getPublicCIDRs(r *parser.Resource) []iacTypes.StringValue { + publicAccessCidrs := r.GetProperty("ResourcesVpcConfig.PublicAccessCidrs") + if publicAccessCidrs.IsNotList() { + return nil + } + + var cidrs []iacTypes.StringValue + for _, el := range publicAccessCidrs.AsList() { + cidrs = append(cidrs, el.AsStringValue()) + } + + return cidrs +} + func getEncryptionConfig(r *parser.Resource) eks.Encryption { - encryption := eks.Encryption{ + encryptionConfigs := r.GetProperty("EncryptionConfig") + if encryptionConfigs.IsNotList() { + return eks.Encryption{ + Metadata: r.Metadata(), + } + } + + for _, encryptionConfig := range encryptionConfigs.AsList() { + resources := encryptionConfig.GetProperty("Resources") + hasSecrets := resources.IsList() && resources.Contains("secrets") + return eks.Encryption{ + Metadata: encryptionConfig.Metadata(), + KMSKeyID: encryptionConfig.GetStringProperty("Provider.KeyArn"), + Secrets: iacTypes.Bool(hasSecrets, resources.Metadata()), + } + } + + return eks.Encryption{ Metadata: r.Metadata(), - Secrets: iacTypes.BoolDefault(false, r.Metadata()), - KMSKeyID: iacTypes.StringDefault("", r.Metadata()), - } - - // TODO: EncryptionConfig is a list - // https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eks-cluster.html#cfn-eks-cluster-encryptionconfig - if encProp := r.GetProperty("EncryptionConfig"); encProp.IsNotNil() { - encryption.Metadata = encProp.Metadata() - encryption.KMSKeyID = encProp.GetStringProperty("Provider.KeyArn") - resourcesProp := encProp.GetProperty("Resources") - if resourcesProp.IsList() { - if resourcesProp.Contains("secrets") { - encryption.Secrets = iacTypes.Bool(true, resourcesProp.Metadata()) - } + } +} + +func getLogging(r *parser.Resource) eks.Logging { + enabledTypes := r.GetProperty("Logging.ClusterLogging.EnabledTypes") + if enabledTypes.IsNotList() { + return eks.Logging{ + Metadata: r.Metadata(), } } - return encryption + logging := eks.Logging{ + Metadata: enabledTypes.Metadata(), + } + + for _, typeConf := range enabledTypes.AsList() { + switch typ := typeConf.GetProperty("Type"); typ.AsString() { + case "api": + logging.API = iacTypes.Bool(true, typ.Metadata()) + case "audit": + logging.Audit = iacTypes.Bool(true, typ.Metadata()) + case "authenticator": + logging.Authenticator = iacTypes.Bool(true, typ.Metadata()) + case "controllerManager": + logging.ControllerManager = iacTypes.Bool(true, typ.Metadata()) + case "scheduler": + logging.Scheduler = iacTypes.Bool(true, typ.Metadata()) + } + + } + + return logging } diff --git a/pkg/iac/adapters/cloudformation/aws/eks/eks_test.go b/pkg/iac/adapters/cloudformation/aws/eks/eks_test.go index 84095c3b6592..36981f6bf544 100644 --- a/pkg/iac/adapters/cloudformation/aws/eks/eks_test.go +++ b/pkg/iac/adapters/cloudformation/aws/eks/eks_test.go @@ -5,6 +5,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation/testutil" "github.com/aquasecurity/trivy/pkg/iac/providers/aws/eks" + "github.com/aquasecurity/trivy/pkg/iac/types" ) func TestAdapt(t *testing.T) { @@ -19,9 +20,44 @@ func TestAdapt(t *testing.T) { Resources: EKSCluster: Type: AWS::EKS::Cluster + Properties: + Logging: + ClusterLogging: + EnabledTypes: + - Type: api + - Type: audit + - Type: authenticator + - Type: controllerManager + - Type: scheduler + EncryptionConfig: + - Provider: + KeyArn: alias/mykey + Resources: [secrets] + ResourcesVpcConfig: + EndpointPublicAccess: True + PublicAccessCidrs: + - 0.0.0.0/0 `, expected: eks.EKS{ - Clusters: []eks.Cluster{{}}, + Clusters: []eks.Cluster{ + { + Logging: eks.Logging{ + API: types.BoolTest(true), + Audit: types.BoolTest(true), + Authenticator: types.BoolTest(true), + ControllerManager: types.BoolTest(true), + Scheduler: types.BoolTest(true), + }, + Encryption: eks.Encryption{ + KMSKeyID: types.StringTest("alias/mykey"), + Secrets: types.BoolTest(true), + }, + PublicAccessEnabled: types.BoolTest(true), + PublicAccessCIDRs: []types.StringValue{ + types.StringTest("0.0.0.0/0"), + }, + }, + }, }, }, { From 245c12053245fb75c31ac67272a1de7e5ac35f8a Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Wed, 3 Apr 2024 01:41:57 +0300 Subject: [PATCH 39/57] refactor(terraform): remove metrics collection (#6444) --- .../adapters/terraform/tftestutil/testutil.go | 4 +- pkg/iac/scanners/helm/test/option_test.go | 16 +-- .../scanners/terraform/deterministic_test.go | 6 +- .../scanners/terraform/executor/executor.go | 45 +-------- .../terraform/executor/executor_test.go | 26 +++-- pkg/iac/scanners/terraform/module_test.go | 99 +++++++++++++------ .../scanners/terraform/parser/evaluator.go | 11 +-- pkg/iac/scanners/terraform/parser/parser.go | 41 +------- .../scanners/terraform/performance_test.go | 6 +- pkg/iac/scanners/terraform/scanner.go | 58 ++--------- pkg/iac/scanners/terraform/scanner_test.go | 6 +- pkg/iac/scanners/terraform/setup_test.go | 6 +- 12 files changed, 126 insertions(+), 198 deletions(-) diff --git a/pkg/iac/adapters/terraform/tftestutil/testutil.go b/pkg/iac/adapters/terraform/tftestutil/testutil.go index 5503bfac5b99..57535cf151c5 100644 --- a/pkg/iac/adapters/terraform/tftestutil/testutil.go +++ b/pkg/iac/adapters/terraform/tftestutil/testutil.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/aquasecurity/trivy/internal/testutil" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/aquasecurity/trivy/pkg/iac/terraform" ) @@ -13,7 +13,7 @@ func CreateModulesFromSource(t *testing.T, source, ext string) terraform.Modules fs := testutil.CreateFS(t, map[string]string{ "source" + ext: source, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) if err := p.ParseFS(context.TODO(), "."); err != nil { t.Fatal(err) } diff --git a/pkg/iac/scanners/helm/test/option_test.go b/pkg/iac/scanners/helm/test/option_test.go index 2ad7efc64008..d16d29039a15 100644 --- a/pkg/iac/scanners/helm/test/option_test.go +++ b/pkg/iac/scanners/helm/test/option_test.go @@ -7,10 +7,10 @@ import ( "strings" "testing" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/aquasecurity/trivy/pkg/iac/scanners/helm/parser" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" ) @@ -37,10 +37,10 @@ func Test_helm_parser_with_options_with_values_file(t *testing.T) { var opts []options.ParserOption if test.valuesFile != "" { - opts = append(opts, parser2.OptionWithValuesFile(test.valuesFile)) + opts = append(opts, parser.OptionWithValuesFile(test.valuesFile)) } - helmParser := parser2.New(chartName, opts...) + helmParser := parser.New(chartName, opts...) err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") require.NoError(t, err) manifests, err := helmParser.RenderedChartFiles() @@ -87,14 +87,14 @@ func Test_helm_parser_with_options_with_set_value(t *testing.T) { var opts []options.ParserOption if test.valuesFile != "" { - opts = append(opts, parser2.OptionWithValuesFile(test.valuesFile)) + opts = append(opts, parser.OptionWithValuesFile(test.valuesFile)) } if test.values != "" { - opts = append(opts, parser2.OptionWithValues(test.values)) + opts = append(opts, parser.OptionWithValues(test.values)) } - helmParser := parser2.New(chartName, opts...) + helmParser := parser.New(chartName, opts...) err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") require.NoError(t, err) manifests, err := helmParser.RenderedChartFiles() @@ -140,10 +140,10 @@ func Test_helm_parser_with_options_with_api_versions(t *testing.T) { var opts []options.ParserOption if len(test.apiVersions) > 0 { - opts = append(opts, parser2.OptionWithAPIVersions(test.apiVersions...)) + opts = append(opts, parser.OptionWithAPIVersions(test.apiVersions...)) } - helmParser := parser2.New(chartName, opts...) + helmParser := parser.New(chartName, opts...) err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") require.NoError(t, err) manifests, err := helmParser.RenderedChartFiles() diff --git a/pkg/iac/scanners/terraform/deterministic_test.go b/pkg/iac/scanners/terraform/deterministic_test.go index d47161ec0059..258fe5bbbd16 100644 --- a/pkg/iac/scanners/terraform/deterministic_test.go +++ b/pkg/iac/scanners/terraform/deterministic_test.go @@ -7,7 +7,7 @@ import ( "github.com/aquasecurity/trivy/internal/testutil" "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/stretchr/testify/require" ) @@ -39,12 +39,12 @@ locals { }) for i := 0; i < 100; i++ { - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), ".") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + results, _ := executor.New().Execute(modules) require.Len(t, results.GetFailed(), 2) } } diff --git a/pkg/iac/scanners/terraform/executor/executor.go b/pkg/iac/scanners/terraform/executor/executor.go index a4d15f45a9bb..efc140b89b46 100644 --- a/pkg/iac/scanners/terraform/executor/executor.go +++ b/pkg/iac/scanners/terraform/executor/executor.go @@ -4,7 +4,6 @@ import ( "fmt" "runtime" "sort" - "time" "github.com/zclconf/go-cty/cty" @@ -38,22 +37,6 @@ type Executor struct { frameworks []framework.Framework } -type Metrics struct { - Timings struct { - Adaptation time.Duration - RunningChecks time.Duration - } - Counts struct { - Ignored int - Failed int - Passed int - Critical int - High int - Medium int - Low int - } -} - // New creates a new Executor func New(options ...Option) *Executor { s := &Executor{ @@ -77,14 +60,10 @@ func checkInList(id string, list []string) bool { return false } -func (e *Executor) Execute(modules terraform.Modules) (scan.Results, Metrics, error) { - - var metrics Metrics +func (e *Executor) Execute(modules terraform.Modules) (scan.Results, error) { e.debug.Log("Adapting modules...") - adaptationTime := time.Now() infra := adapter.Adapt(modules) - metrics.Timings.Adaptation = time.Since(adaptationTime) e.debug.Log("Adapted %d module(s) into defsec state data.", len(modules)) threads := runtime.NumCPU() @@ -101,7 +80,6 @@ func (e *Executor) Execute(modules terraform.Modules) (scan.Results, Metrics, er f(infra) } - checksTime := time.Now() registeredRules := rules.GetRegistered(e.frameworks...) e.debug.Log("Initialized %d rule(s).", len(registeredRules)) @@ -109,9 +87,8 @@ func (e *Executor) Execute(modules terraform.Modules) (scan.Results, Metrics, er e.debug.Log("Created pool with %d worker(s) to apply rules.", threads) results, err := pool.Run() if err != nil { - return nil, metrics, err + return nil, err } - metrics.Timings.RunningChecks = time.Since(checksTime) e.debug.Log("Finished applying rules.") if e.enableIgnores { @@ -152,25 +129,9 @@ func (e *Executor) Execute(modules terraform.Modules) (scan.Results, Metrics, er results = e.updateSeverity(results) results = e.filterResults(results) - metrics.Counts.Ignored = len(results.GetIgnored()) - metrics.Counts.Passed = len(results.GetPassed()) - metrics.Counts.Failed = len(results.GetFailed()) - - for _, res := range results.GetFailed() { - switch res.Severity() { - case severity.Critical: - metrics.Counts.Critical++ - case severity.High: - metrics.Counts.High++ - case severity.Medium: - metrics.Counts.Medium++ - case severity.Low: - metrics.Counts.Low++ - } - } e.sortResults(results) - return results, metrics, nil + return results, nil } func (e *Executor) updateSeverity(results []scan.Result) scan.Results { diff --git a/pkg/iac/scanners/terraform/executor/executor_test.go b/pkg/iac/scanners/terraform/executor/executor_test.go index 952803a507f5..c8c3e2af60c5 100644 --- a/pkg/iac/scanners/terraform/executor/executor_test.go +++ b/pkg/iac/scanners/terraform/executor/executor_test.go @@ -8,7 +8,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/providers" "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scan" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/aquasecurity/trivy/pkg/iac/severity" "github.com/aquasecurity/trivy/pkg/iac/terraform" "github.com/stretchr/testify/assert" @@ -47,12 +47,15 @@ resource "problem" "this" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := New().Execute(modules) + + results, err := New().Execute(modules) + assert.Error(t, err) + assert.Equal(t, len(results.GetFailed()), 0) } @@ -69,12 +72,14 @@ resource "problem" "this" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) + modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - _, _, err = New(OptionStopOnErrors(false)).Execute(modules) + + _, err = New(OptionStopOnErrors(false)).Execute(modules) assert.Error(t, err) } @@ -91,12 +96,15 @@ resource "problem" "this" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := New().Execute(modules) + + results, _ := New().Execute(modules) + require.NoError(t, err) + assert.Equal(t, len(results.GetFailed()), 0) } @@ -113,12 +121,12 @@ resource "problem" "this" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - _, _, err = New(OptionStopOnErrors(false)).Execute(modules) + _, err = New(OptionStopOnErrors(false)).Execute(modules) assert.Error(t, err) } diff --git a/pkg/iac/scanners/terraform/module_test.go b/pkg/iac/scanners/terraform/module_test.go index ffed34718156..61b1a0e359f6 100644 --- a/pkg/iac/scanners/terraform/module_test.go +++ b/pkg/iac/scanners/terraform/module_test.go @@ -13,7 +13,7 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scan" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/aquasecurity/trivy/pkg/iac/severity" "github.com/aquasecurity/trivy/pkg/iac/terraform" "github.com/stretchr/testify/require" @@ -88,13 +88,15 @@ resource "problem" "uhoh" { debug := bytes.NewBuffer([]byte{}) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true), options.ParserWithDebug(debug)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true), options.ParserWithDebug(debug)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, err := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") if t.Failed() { fmt.Println(debug.String()) @@ -119,12 +121,15 @@ resource "problem" "uhoh" { `}, ) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -148,12 +153,15 @@ resource "problem" "uhoh" { `}, ) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleNotFound(t, badRule.LongID(), results, "") } @@ -175,12 +183,15 @@ resource "problem" "uhoh" { } `}) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -202,12 +213,15 @@ resource "problem" "uhoh" { } `}) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -238,12 +252,15 @@ resource "problem" "uhoh" { } `}) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -276,12 +293,15 @@ resource "problem" "uhoh" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true), options.ParserWithDebug(os.Stderr)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true), options.ParserWithDebug(os.Stderr)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -331,12 +351,15 @@ resource "problem" "uhoh" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -380,12 +403,15 @@ resource "problem" "uhoh" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -418,12 +444,15 @@ resource "problem" "uhoh" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -473,12 +502,15 @@ resource "problem" "uhoh" { `, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, badRule.LongID(), results, "") } @@ -523,12 +555,15 @@ resource "bad" "thing" { reg := rules.Register(r1) defer rules.Deregister(reg) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleFound(t, r1.LongID(), results, "") } @@ -572,12 +607,15 @@ resource "bad" "thing" { reg := rules.Register(r1) defer rules.Deregister(reg) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleNotFound(t, r1.LongID(), results, "") } @@ -621,12 +659,15 @@ data "aws_iam_policy_document" "policy" { } `}) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) err := p.ParseFS(context.TODO(), "project") require.NoError(t, err) modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - results, _, _ := executor.New().Execute(modules) + + results, err := executor.New().Execute(modules) + require.NoError(t, err) + testutil.AssertRuleNotFound(t, iam.CheckEnforceGroupMFA.LongID(), results, "") } diff --git a/pkg/iac/scanners/terraform/parser/evaluator.go b/pkg/iac/scanners/terraform/parser/evaluator.go index e7e3415e1b52..3633d22386a1 100644 --- a/pkg/iac/scanners/terraform/parser/evaluator.go +++ b/pkg/iac/scanners/terraform/parser/evaluator.go @@ -5,7 +5,6 @@ import ( "errors" "io/fs" "reflect" - "time" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/typeexpr" @@ -119,7 +118,7 @@ func (e *evaluator) exportOutputs() cty.Value { return cty.ObjectVal(data) } -func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[string]fs.FS, time.Duration) { +func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[string]fs.FS) { fsKey := types.CreateFSKey(e.filesystem) e.debug.Log("Filesystem key is '%s'", fsKey) @@ -127,10 +126,7 @@ func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[str fsMap := make(map[string]fs.FS) fsMap[fsKey] = e.filesystem - var parseDuration time.Duration - var lastContext hcl.EvalContext - start := time.Now() e.debug.Log("Starting module evaluation...") for i := 0; i < maxContextIterations; i++ { @@ -154,8 +150,6 @@ func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[str e.blocks = e.expandBlocks(e.blocks) e.blocks = e.expandBlocks(e.blocks) - parseDuration += time.Since(start) - e.debug.Log("Starting submodule evaluation...") var modules terraform.Modules for _, definition := range e.loadModules(ctx) { @@ -192,9 +186,8 @@ func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[str } e.debug.Log("Module evaluation complete.") - parseDuration += time.Since(start) rootModule := terraform.NewModule(e.projectRootPath, e.modulePath, e.blocks, e.ignores) - return append(terraform.Modules{rootModule}, modules...), fsMap, parseDuration + return append(terraform.Modules{rootModule}, modules...), fsMap } func (e *evaluator) expandBlocks(blocks terraform.Blocks) terraform.Blocks { diff --git a/pkg/iac/scanners/terraform/parser/parser.go b/pkg/iac/scanners/terraform/parser/parser.go index 35a8b454f00f..b80c4a6babf2 100644 --- a/pkg/iac/scanners/terraform/parser/parser.go +++ b/pkg/iac/scanners/terraform/parser/parser.go @@ -9,7 +9,6 @@ import ( "path/filepath" "sort" "strings" - "time" "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" @@ -28,19 +27,6 @@ type sourceFile struct { path string } -type Metrics struct { - Timings struct { - DiskIODuration time.Duration - ParseDuration time.Duration - } - Counts struct { - Blocks int - Modules int - ModuleDownloads int - Files int - } -} - var _ ConfigurableTerraformParser = (*Parser)(nil) // Parser is a tool for parsing terraform templates at a given file system location @@ -57,7 +43,6 @@ type Parser struct { workspaceName string underlying *hclparse.Parser children []*Parser - metrics Metrics options []options.ParserOption debug debug.Logger allowDownloads bool @@ -132,21 +117,7 @@ func (p *Parser) newModuleParser(moduleFS fs.FS, moduleSource, modulePath, modul return mp } -func (p *Parser) Metrics() Metrics { - total := p.metrics - for _, child := range p.children { - metrics := child.Metrics() - total.Counts.Files += metrics.Counts.Files - total.Counts.Blocks += metrics.Counts.Blocks - total.Timings.ParseDuration += metrics.Timings.ParseDuration - total.Timings.DiskIODuration += metrics.Timings.DiskIODuration - // NOTE: we don't add module count - this has already propagated to the top level - } - return total -} - func (p *Parser) ParseFile(_ context.Context, fullPath string) error { - diskStart := time.Now() isJSON := strings.HasSuffix(fullPath, ".tf.json") isHCL := strings.HasSuffix(fullPath, ".tf") @@ -165,14 +136,13 @@ func (p *Parser) ParseFile(_ context.Context, fullPath string) error { if err != nil { return err } - p.metrics.Timings.DiskIODuration += time.Since(diskStart) + if dir := path.Dir(fullPath); p.projectRoot == "" { p.debug.Log("Setting project/module root to '%s'", dir) p.projectRoot = dir p.modulePath = dir } - start := time.Now() var file *hcl.File var diag hcl.Diagnostics @@ -188,8 +158,7 @@ func (p *Parser) ParseFile(_ context.Context, fullPath string) error { file: file, path: fullPath, }) - p.metrics.Counts.Files++ - p.metrics.Timings.ParseDuration += time.Since(start) + p.debug.Log("Added file %s.", fullPath) return nil } @@ -270,8 +239,6 @@ func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, } p.debug.Log("Read %d block(s) and %d ignore(s) for module '%s' (%d file[s])...", len(blocks), len(ignores), p.moduleName, len(p.files)) - p.metrics.Counts.Blocks = len(blocks) - var inputVars map[string]cty.Value if p.moduleBlock != nil { inputVars = p.moduleBlock.Values().AsValueMap() @@ -312,9 +279,7 @@ func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, p.allowDownloads, p.skipCachedModules, ) - modules, fsMap, parseDuration := evaluator.EvaluateAll(ctx) - p.metrics.Counts.Modules = len(modules) - p.metrics.Timings.ParseDuration = parseDuration + modules, fsMap := evaluator.EvaluateAll(ctx) p.debug.Log("Finished parsing module '%s'.", p.moduleName) p.fsMap = fsMap return modules, evaluator.exportOutputs(), nil diff --git a/pkg/iac/scanners/terraform/performance_test.go b/pkg/iac/scanners/terraform/performance_test.go index f4a390a3b2cc..9015aa25b076 100644 --- a/pkg/iac/scanners/terraform/performance_test.go +++ b/pkg/iac/scanners/terraform/performance_test.go @@ -9,7 +9,7 @@ import ( "github.com/aquasecurity/trivy/internal/testutil" "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" ) func BenchmarkCalculate(b *testing.B) { @@ -21,7 +21,7 @@ func BenchmarkCalculate(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - p := parser2.New(f, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(f, "", parser.OptionStopOnHCLError(true)) if err := p.ParseFS(context.TODO(), "project"); err != nil { b.Fatal(err) } @@ -29,7 +29,7 @@ func BenchmarkCalculate(b *testing.B) { if err != nil { b.Fatal(err) } - _, _, _ = executor.New().Execute(modules) + executor.New().Execute(modules) } } diff --git a/pkg/iac/scanners/terraform/scanner.go b/pkg/iac/scanners/terraform/scanner.go index 5176b6471355..f5a3554d002d 100644 --- a/pkg/iac/scanners/terraform/scanner.go +++ b/pkg/iac/scanners/terraform/scanner.go @@ -10,7 +10,6 @@ import ( "sort" "strings" "sync" - "time" "github.com/aquasecurity/trivy/pkg/extrafs" "github.com/aquasecurity/trivy/pkg/iac/debug" @@ -21,7 +20,6 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" - "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/resolvers" "github.com/aquasecurity/trivy/pkg/iac/terraform" "github.com/aquasecurity/trivy/pkg/iac/types" ) @@ -120,14 +118,6 @@ func (s *Scanner) SetDataFilesystem(_ fs.FS) { } func (s *Scanner) SetRegoErrorLimit(_ int) {} -type Metrics struct { - Parser parser.Metrics - Executor executor.Metrics - Timings struct { - Total time.Duration - } -} - func New(opts ...options.ScannerOption) *Scanner { s := &Scanner{ dirs: make(map[string]struct{}), @@ -139,11 +129,6 @@ func New(opts ...options.ScannerOption) *Scanner { return s } -func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, dir string) (scan.Results, error) { - results, _, err := s.ScanFSWithMetrics(ctx, target, dir) - return results, err -} - func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) { s.Lock() defer s.Unlock() @@ -167,8 +152,7 @@ type terraformRootModule struct { fsMap map[string]fs.FS } -func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir string) (scan.Results, Metrics, error) { - var metrics Metrics +func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, dir string) (scan.Results, error) { s.debug.Log("Scanning [%s] at '%s'...", target, dir) @@ -178,12 +162,12 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin if len(modulePaths) == 0 { s.debug.Log("no modules found") - return nil, metrics, nil + return nil, nil } regoScanner, err := s.initRegoScanner(target) if err != nil { - return nil, metrics, err + return nil, err } s.execLock.Lock() @@ -195,7 +179,7 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin p := parser.New(target, "", s.parserOpt...) rootDirs, err := p.FindRootModules(ctx, modulePaths) if err != nil { - return nil, metrics, fmt.Errorf("failed to find root modules: %w", err) + return nil, fmt.Errorf("failed to find root modules: %w", err) } rootModules := make([]terraformRootModule, 0, len(rootDirs)) @@ -208,21 +192,14 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin p := parser.New(target, "", s.parserOpt...) if err := p.ParseFS(ctx, dir); err != nil { - return nil, metrics, err + return nil, err } modules, _, err := p.EvaluateAll(ctx) if err != nil { - return nil, metrics, err + return nil, err } - parserMetrics := p.Metrics() - metrics.Parser.Counts.Blocks += parserMetrics.Counts.Blocks - metrics.Parser.Counts.Modules += parserMetrics.Counts.Modules - metrics.Parser.Counts.Files += parserMetrics.Counts.Files - metrics.Parser.Timings.DiskIODuration += parserMetrics.Timings.DiskIODuration - metrics.Parser.Timings.ParseDuration += parserMetrics.Timings.ParseDuration - rootModules = append(rootModules, terraformRootModule{ rootPath: dir, childs: modules, @@ -234,9 +211,9 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin s.execLock.RLock() e := executor.New(s.executorOpt...) s.execLock.RUnlock() - results, execMetrics, err := e.Execute(module.childs) + results, err := e.Execute(module.childs) if err != nil { - return nil, metrics, err + return nil, err } for i, result := range results { @@ -256,27 +233,10 @@ func (s *Scanner) ScanFSWithMetrics(ctx context.Context, target fs.FS, dir strin } } - metrics.Executor.Counts.Passed += execMetrics.Counts.Passed - metrics.Executor.Counts.Failed += execMetrics.Counts.Failed - metrics.Executor.Counts.Ignored += execMetrics.Counts.Ignored - metrics.Executor.Counts.Critical += execMetrics.Counts.Critical - metrics.Executor.Counts.High += execMetrics.Counts.High - metrics.Executor.Counts.Medium += execMetrics.Counts.Medium - metrics.Executor.Counts.Low += execMetrics.Counts.Low - metrics.Executor.Timings.Adaptation += execMetrics.Timings.Adaptation - metrics.Executor.Timings.RunningChecks += execMetrics.Timings.RunningChecks - allResults = append(allResults, results...) } - metrics.Parser.Counts.ModuleDownloads = resolvers.Remote.GetDownloadCount() - - metrics.Timings.Total += metrics.Parser.Timings.DiskIODuration - metrics.Timings.Total += metrics.Parser.Timings.ParseDuration - metrics.Timings.Total += metrics.Executor.Timings.Adaptation - metrics.Timings.Total += metrics.Executor.Timings.RunningChecks - - return allResults, metrics, nil + return allResults, nil } func (s *Scanner) removeNestedDirs(dirs []string) []string { diff --git a/pkg/iac/scanners/terraform/scanner_test.go b/pkg/iac/scanners/terraform/scanner_test.go index dbc2d67c3c64..020954d811db 100644 --- a/pkg/iac/scanners/terraform/scanner_test.go +++ b/pkg/iac/scanners/terraform/scanner_test.go @@ -63,7 +63,7 @@ func scanWithOptions(t *testing.T, code string, opt ...options.ScannerOption) sc }) scanner := New(opt...) - results, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "project") + results, err := scanner.ScanFS(context.TODO(), fs, "project") require.NoError(t, err) return results } @@ -338,7 +338,7 @@ cause := bucket.name options.ScannerWithPolicyNamespaces(test.includedNamespaces...), ) - results, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "code") + results, err := scanner.ScanFS(context.TODO(), fs, "code") require.NoError(t, err) var found bool @@ -376,7 +376,7 @@ resource "aws_s3_bucket" "my-bucket" { }), ) - _, _, err := scanner.ScanFSWithMetrics(context.TODO(), fs, "code") + _, err := scanner.ScanFS(context.TODO(), fs, "code") require.NoError(t, err) assert.Equal(t, 1, len(actual.AWS.S3.Buckets)) diff --git a/pkg/iac/scanners/terraform/setup_test.go b/pkg/iac/scanners/terraform/setup_test.go index c1f1aeb2e8ca..84bf3fdcc338 100644 --- a/pkg/iac/scanners/terraform/setup_test.go +++ b/pkg/iac/scanners/terraform/setup_test.go @@ -7,7 +7,7 @@ import ( "github.com/aquasecurity/trivy/internal/testutil" "github.com/aquasecurity/trivy/pkg/iac/scan" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" - parser2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" "github.com/aquasecurity/trivy/pkg/iac/terraform" "github.com/stretchr/testify/require" ) @@ -17,7 +17,7 @@ func createModulesFromSource(t *testing.T, source string, ext string) terraform. "source" + ext: source, }) - p := parser2.New(fs, "", parser2.OptionStopOnHCLError(true)) + p := parser.New(fs, "", parser.OptionStopOnHCLError(true)) if err := p.ParseFS(context.TODO(), "."); err != nil { t.Fatal(err) } @@ -51,7 +51,7 @@ func scanJSON(t *testing.T, source string) scan.Results { }) s := New(options.ScannerWithEmbeddedPolicies(true), options.ScannerWithEmbeddedLibraries(true)) - results, _, err := s.ScanFSWithMetrics(context.TODO(), fs, ".") + results, err := s.ScanFS(context.TODO(), fs, ".") require.NoError(t, err) return results } From 74e4c6e0127c5594516ed54c1202213d4f670c8e Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Wed, 3 Apr 2024 01:42:46 +0300 Subject: [PATCH 40/57] fix(cloudformation): resolve `DedicatedMasterEnabled` parsing issue (#6439) --- .../aws/elasticsearch/domain.go | 10 +++++--- .../aws/elasticsearch/elasticsearch_test.go | 23 ++++++++++++++++++- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go b/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go index 26747e44999b..2ca77a5d7448 100644 --- a/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go +++ b/pkg/iac/adapters/cloudformation/aws/elasticsearch/domain.go @@ -16,9 +16,7 @@ func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { Metadata: r.Metadata(), DomainName: r.GetStringProperty("DomainName"), AccessPolicies: r.GetStringProperty("AccessPolicies"), - // TODO: ElasticsearchClusterConfig changed to ClusterConfig - DedicatedMasterEnabled: r.GetBoolProperty("ElasticsearchClusterConfig.DedicatedMasterEnabled"), - VpcId: iacTypes.String("", r.Metadata()), + VpcId: iacTypes.String("", r.Metadata()), LogPublishing: elasticsearch.LogPublishing{ Metadata: r.Metadata(), AuditEnabled: iacTypes.BoolDefault(false, r.Metadata()), @@ -46,6 +44,12 @@ func getDomains(ctx parser.FileContext) (domains []elasticsearch.Domain) { }, } + if r.Type() == "AWS::OpenSearchService::Domain" { + domain.DedicatedMasterEnabled = r.GetBoolProperty("ClusterConfig.DedicatedMasterEnabled") + } else { + domain.DedicatedMasterEnabled = r.GetBoolProperty("ElasticsearchClusterConfig.DedicatedMasterEnabled") + } + if prop := r.GetProperty("LogPublishingOptions"); prop.IsNotNil() { domain.LogPublishing = elasticsearch.LogPublishing{ Metadata: prop.Metadata(), diff --git a/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch_test.go b/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch_test.go index 514c689b8d28..afb9c3a81e22 100644 --- a/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch_test.go +++ b/pkg/iac/adapters/cloudformation/aws/elasticsearch/elasticsearch_test.go @@ -22,6 +22,8 @@ Resources: Type: AWS::OpenSearchService::Domain Properties: DomainName: 'test' + ClusterConfig: + DedicatedMasterEnabled: true NodeToNodeEncryptionOptions: Enabled: true EncryptionAtRestOptions: @@ -47,7 +49,8 @@ Resources: expected: elasticsearch.Elasticsearch{ Domains: []elasticsearch.Domain{ { - DomainName: types.StringTest("test"), + DomainName: types.StringTest("test"), + DedicatedMasterEnabled: types.BoolTest(true), LogPublishing: elasticsearch.LogPublishing{ AuditEnabled: types.BoolTest(true), CloudWatchLogGroupArn: types.StringTest("arn:aws:logs:us-east-1:123456789012:log-group:/aws/opensearch/domains/opensearch-application-logs"), @@ -78,6 +81,24 @@ Resources: Domains: []elasticsearch.Domain{{}}, }, }, + { + name: "Elasticsearch", + source: `AWSTemplateFormatVersion: 2010-09-09 +Resources: + ElasticsearchDomain: + Type: AWS::Elasticsearch::Domain + Properties: + ElasticsearchClusterConfig: + DedicatedMasterEnabled: true + `, + expected: elasticsearch.Elasticsearch{ + Domains: []elasticsearch.Domain{ + { + DedicatedMasterEnabled: types.BoolTest(true), + }, + }, + }, + }, } for _, tt := range tests { From 8dd0fcd61b37690f800f9aac6b5c95aec2bb6a65 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Wed, 3 Apr 2024 03:43:29 +0300 Subject: [PATCH 41/57] feat(misconf): add support for wildcard ignores (#6414) --- docs/docs/scanner/misconfiguration/index.md | 13 ++- pkg/iac/ignore/rule.go | 37 +++++++- pkg/iac/ignore/rule_test.go | 61 ++++++++++++ .../scanners/terraform/executor/executor.go | 38 ++++---- pkg/iac/scanners/terraform/ignore_test.go | 94 ++++++++++--------- 5 files changed, 178 insertions(+), 65 deletions(-) diff --git a/docs/docs/scanner/misconfiguration/index.md b/docs/docs/scanner/misconfiguration/index.md index f76dc9392363..b243d3e8dc17 100644 --- a/docs/docs/scanner/misconfiguration/index.md +++ b/docs/docs/scanner/misconfiguration/index.md @@ -547,4 +547,15 @@ module "s3_bucket" { bucket = each.value } ``` -[custom]: custom/index.md \ No newline at end of file + +#### Support for Wildcards + +You can use wildcards in the `ws` (workspace) and `ignore` sections of the ignore rules. + +```tf +# trivy:ignore:aws-s3-*:ws:dev-* +``` + +This example ignores all checks starting with `aws-s3-` for workspaces matching the pattern `dev-*`. + +[custom]: custom/index.md diff --git a/pkg/iac/ignore/rule.go b/pkg/iac/ignore/rule.go index d81f17576915..61057ce75f87 100644 --- a/pkg/iac/ignore/rule.go +++ b/pkg/iac/ignore/rule.go @@ -1,7 +1,9 @@ package ignore import ( + "regexp" "slices" + "strings" "time" "github.com/samber/lo" @@ -10,7 +12,7 @@ import ( ) // Ignorer represents a function that checks if the rule should be ignored. -type Ignorer func(resultMeta types.Metadata, param any) bool +type Ignorer func(resultMeta types.Metadata, ignoredParam any) bool type Rules []Rule @@ -88,7 +90,16 @@ func defaultIgnorers(ids []string) map[string]Ignorer { return map[string]Ignorer{ "id": func(_ types.Metadata, param any) bool { id, ok := param.(string) - return ok && (id == "*" || len(ids) == 0 || slices.Contains(ids, id)) + if !ok { + return false + } + if id == "*" || len(ids) == 0 { + return true + } + + return slices.ContainsFunc(ids, func(s string) bool { + return MatchPattern(s, id) + }) }, "exp": func(_ types.Metadata, param any) bool { expiry, ok := param.(time.Time) @@ -96,3 +107,25 @@ func defaultIgnorers(ids []string) map[string]Ignorer { }, } } + +// MatchPattern checks if the pattern string matches the input pattern. +// The wildcard '*' in the pattern matches any sequence of characters. +func MatchPattern(input, pattern string) bool { + matched, err := regexp.MatchString(regexpFromPattern(pattern), input) + return err == nil && matched +} + +func regexpFromPattern(pattern string) string { + parts := strings.Split(pattern, "*") + if len(parts) == 1 { + return "^" + pattern + "$" + } + var sb strings.Builder + for i, literal := range parts { + if i > 0 { + sb.WriteString(".*") + } + sb.WriteString(regexp.QuoteMeta(literal)) + } + return "^" + sb.String() + "$" +} diff --git a/pkg/iac/ignore/rule_test.go b/pkg/iac/ignore/rule_test.go index 6b35e52efe43..da89ca2a3595 100644 --- a/pkg/iac/ignore/rule_test.go +++ b/pkg/iac/ignore/rule_test.go @@ -172,6 +172,24 @@ func TestRules_Ignore(t *testing.T) { }, shouldIgnore: false, }, + { + name: "with valid wildcard", + src: `#trivy:ignore:rule-*`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + }, + shouldIgnore: true, + }, + { + name: "with non-valid wildcard", + src: `#trivy:ignore:rule-1-*d`, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1-abc"}, + }, + shouldIgnore: false, + }, } for _, tt := range tests { @@ -220,6 +238,27 @@ func TestRules_IgnoreWithCustomIgnorer(t *testing.T) { }, shouldIgnore: true, }, + { + name: "with wildcard", + src: `#trivy:ignore:rule-1:ws:dev-*`, + parser: &ignore.StringMatchParser{ + SectionKey: "ws", + }, + args: args{ + metadata: metadataWithLine(filename, 2), + ids: []string{"rule-1"}, + ignorers: map[string]ignore.Ignorer{ + "ws": func(_ types.Metadata, param any) bool { + ws, ok := param.(string) + if !ok { + return false + } + return ignore.MatchPattern("dev-stage1", ws) + }, + }, + }, + shouldIgnore: true, + }, { name: "bad", src: `#trivy:ignore:rule-1:ws:prod`, @@ -251,3 +290,25 @@ func TestRules_IgnoreWithCustomIgnorer(t *testing.T) { }) } } + +func TestMatchPattern(t *testing.T) { + tests := []struct { + input string + pattern string + expected bool + }{ + {"foo-test-bar", "*-test-*", true}, + {"foo-test-bar", "*-example-*", false}, + {"test", "*test", true}, + {"example", "test", false}, + {"example-test", "*-test*", true}, + {"example-test", "*example-*", true}, + } + + for _, tc := range tests { + t.Run(tc.input+":"+tc.pattern, func(t *testing.T) { + got := ignore.MatchPattern(tc.input, tc.pattern) + assert.Equal(t, tc.expected, got) + }) + } +} diff --git a/pkg/iac/scanners/terraform/executor/executor.go b/pkg/iac/scanners/terraform/executor/executor.go index efc140b89b46..96c4939d756b 100644 --- a/pkg/iac/scanners/terraform/executor/executor.go +++ b/pkg/iac/scanners/terraform/executor/executor.go @@ -99,22 +99,8 @@ func (e *Executor) Execute(modules terraform.Modules) (scan.Results, error) { } ignorers := map[string]ignore.Ignorer{ - "ws": func(_ types.Metadata, param any) bool { - ws, ok := param.(string) - if !ok { - return false - } - - return ws == e.workspaceName - }, - "ignore": func(resultMeta types.Metadata, param any) bool { - params, ok := param.(map[string]string) - if !ok { - return false - } - - return ignoreByParams(params, modules, &resultMeta) - }, + "ws": workspaceIgnorer(e.workspaceName), + "ignore": attributeIgnorer(modules), } results.Ignore(ignores, ignorers) @@ -229,3 +215,23 @@ func ignoreByParams(params map[string]string, modules terraform.Modules, m *type } return true } + +func workspaceIgnorer(ws string) ignore.Ignorer { + return func(_ types.Metadata, param any) bool { + ignoredWorkspace, ok := param.(string) + if !ok { + return false + } + return ignore.MatchPattern(ws, ignoredWorkspace) + } +} + +func attributeIgnorer(modules terraform.Modules) ignore.Ignorer { + return func(resultMeta types.Metadata, param any) bool { + params, ok := param.(map[string]string) + if !ok { + return false + } + return ignoreByParams(params, modules, &resultMeta) + } +} diff --git a/pkg/iac/scanners/terraform/ignore_test.go b/pkg/iac/scanners/terraform/ignore_test.go index 0e8c0c8bfdd5..ddddd7a6e04e 100644 --- a/pkg/iac/scanners/terraform/ignore_test.go +++ b/pkg/iac/scanners/terraform/ignore_test.go @@ -599,7 +599,10 @@ data "aws_iam_policy_document" "test_policy" { resources = ["*"] # trivy:ignore:aws-iam-enforce-mfa } } -`, assertLength: 0}} +`, + assertLength: 0, + }, + } reg := rules.Register(exampleRule) defer rules.Deregister(reg) @@ -612,16 +615,53 @@ data "aws_iam_policy_document" "test_policy" { } } -func Test_IgnoreIgnoreWithExpiryAndWorkspaceAndWorkspaceSupplied(t *testing.T) { +func Test_IgnoreByWorkspace(t *testing.T) { reg := rules.Register(exampleRule) defer rules.Deregister(reg) - results := scanHCLWithWorkspace(t, ` -# tfsec:ignore:aws-service-abc123:exp:2221-01-02:ws:testworkspace -resource "bad" "my-rule" { -} -`, "testworkspace") - assert.Len(t, results.GetFailed(), 0) + tests := []struct { + name string + src string + expectedFailed int + }{ + { + name: "with expiry and workspace", + src: `# tfsec:ignore:aws-service-abc123:exp:2221-01-02:ws:testworkspace +resource "bad" "my-rule" {}`, + expectedFailed: 0, + }, + { + name: "bad workspace", + src: `# tfsec:ignore:aws-service-abc123:exp:2221-01-02:ws:otherworkspace +resource "bad" "my-rule" {}`, + expectedFailed: 1, + }, + { + name: "with expiry and workspace, trivy prefix", + src: `# trivy:ignore:aws-service-abc123:exp:2221-01-02:ws:testworkspace +resource "bad" "my-rule" {}`, + expectedFailed: 0, + }, + { + name: "bad workspace, trivy prefix", + src: `# trivy:ignore:aws-service-abc123:exp:2221-01-02:ws:otherworkspace +resource "bad" "my-rule" {}`, + expectedFailed: 1, + }, + { + name: "workspace with wildcard", + src: `# tfsec:ignore:*:ws:test* +resource "bad" "my-rule" {}`, + expectedFailed: 0, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + results := scanHCLWithWorkspace(t, tt.src, "testworkspace") + assert.Len(t, results.GetFailed(), tt.expectedFailed) + }) + } } func Test_IgnoreInline(t *testing.T) { @@ -636,19 +676,6 @@ func Test_IgnoreInline(t *testing.T) { assert.Len(t, results.GetFailed(), 0) } -func Test_IgnoreIgnoreWithExpiryAndWorkspaceButWrongWorkspaceSupplied(t *testing.T) { - reg := rules.Register(exampleRule) - defer rules.Deregister(reg) - - results := scanHCLWithWorkspace(t, ` -# tfsec:ignore:aws-service-abc123:exp:2221-01-02:ws:otherworkspace -resource "bad" "my-rule" { - -} -`, "testworkspace") - assert.Len(t, results.GetFailed(), 1) -} - func Test_IgnoreWithAliasCodeStillIgnored(t *testing.T) { reg := rules.Register(exampleRule) defer rules.Deregister(reg) @@ -662,31 +689,6 @@ resource "bad" "my-rule" { assert.Len(t, results.GetFailed(), 0) } -func Test_TrivyIgnoreIgnoreWithExpiryAndWorkspaceAndWorkspaceSupplied(t *testing.T) { - reg := rules.Register(exampleRule) - defer rules.Deregister(reg) - - results := scanHCLWithWorkspace(t, ` -# trivy:ignore:aws-service-abc123:exp:2221-01-02:ws:testworkspace -resource "bad" "my-rule" { -} -`, "testworkspace") - assert.Len(t, results.GetFailed(), 0) -} - -func Test_TrivyIgnoreIgnoreWithExpiryAndWorkspaceButWrongWorkspaceSupplied(t *testing.T) { - reg := rules.Register(exampleRule) - defer rules.Deregister(reg) - - results := scanHCLWithWorkspace(t, ` -# trivy:ignore:aws-service-abc123:exp:2221-01-02:ws:otherworkspace -resource "bad" "my-rule" { - -} -`, "testworkspace") - assert.Len(t, results.GetFailed(), 1) -} - func Test_TrivyIgnoreWithAliasCodeStillIgnored(t *testing.T) { reg := rules.Register(exampleRule) defer rules.Deregister(reg) From 1c49a16c65ecc63a24d9957174ca91d088855a2a Mon Sep 17 00:00:00 2001 From: simar7 <1254783+simar7@users.noreply.github.com> Date: Tue, 2 Apr 2024 22:30:18 -0600 Subject: [PATCH 42/57] fix(misconf): Escape template value correctly (#6292) Signed-off-by: Simar --- .../terraformplan/tfjson/scanner_test.go | 131 +++-- .../terraformplan/tfjson/test/parser_test.go | 1 - .../test/testdata/plan_with_template.json | 480 ++++++++++++++++++ pkg/iac/terraform/resource_block.go | 27 +- 4 files changed, 580 insertions(+), 59 deletions(-) create mode 100644 pkg/iac/scanners/terraformplan/tfjson/test/testdata/plan_with_template.json diff --git a/pkg/iac/scanners/terraformplan/tfjson/scanner_test.go b/pkg/iac/scanners/terraformplan/tfjson/scanner_test.go index fe37184ebb20..664799f74036 100644 --- a/pkg/iac/scanners/terraformplan/tfjson/scanner_test.go +++ b/pkg/iac/scanners/terraformplan/tfjson/scanner_test.go @@ -13,11 +13,19 @@ import ( "github.com/stretchr/testify/require" ) -func Test_OptionWithPolicyDirs_OldRegoMetadata(t *testing.T) { - b, _ := os.ReadFile("test/testdata/plan.json") - fs := testutil.CreateFS(t, map[string]string{ - "/code/main.tfplan.json": string(b), - "/rules/test.rego": ` +func Test_TerraformScanner(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + inputFile string + inputRego string + options []options.ScannerOption + }{ + { + name: "old rego metadata", + inputFile: "test/testdata/plan.json", + inputRego: ` package defsec.abcdefg __rego_metadata__ := { @@ -43,36 +51,46 @@ deny[cause] { cause := bucket.name } `, - }) - - debugLog := bytes.NewBuffer([]byte{}) - scanner := New( - options.ScannerWithDebug(debugLog), - options.ScannerWithPolicyFilesystem(fs), - options.ScannerWithPolicyDirs("rules"), - options.ScannerWithRegoOnly(true), - options.ScannerWithEmbeddedPolicies(false), - ) - - results, err := scanner.ScanFS(context.TODO(), fs, "code") - require.NoError(t, err) - - require.Len(t, results.GetFailed(), 1) - - failure := results.GetFailed()[0] + options: []options.ScannerOption{ + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + options.ScannerWithEmbeddedPolicies(false)}, + }, + { + name: "with user namespace", + inputFile: "test/testdata/plan.json", + inputRego: ` +# METADATA +# title: Bad buckets are bad +# description: Bad buckets are bad because they are not good. +# scope: package +# schemas: +# - input: schema["input"] +# custom: +# avd_id: AVD-TEST-0123 +# severity: CRITICAL +# short_code: very-bad-misconfig +# recommended_action: "Fix the s3 bucket" - assert.Equal(t, "AVD-TEST-0123", failure.Rule().AVDID) - if t.Failed() { - fmt.Printf("Debug logs:\n%s\n", debugLog.String()) - } +package user.foobar.ABC001 +deny[cause] { + bucket := input.aws.s3.buckets[_] + bucket.name.value == "tfsec-plan-testing" + cause := bucket.name } - -func Test_OptionWithPolicyDirs_WithUserNamespace(t *testing.T) { - b, _ := os.ReadFile("test/testdata/plan.json") - fs := testutil.CreateFS(t, map[string]string{ - "/code/main.tfplan.json": string(b), - "/rules/test.rego": ` +`, + options: []options.ScannerOption{ + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithPolicyNamespaces("user"), + }, + }, + { + name: "with templated plan json", + inputFile: "test/testdata/plan_with_template.json", + inputRego: ` # METADATA # title: Bad buckets are bad # description: Bad buckets are bad because they are not good. @@ -89,32 +107,43 @@ package user.foobar.ABC001 deny[cause] { bucket := input.aws.s3.buckets[_] - bucket.name.value == "tfsec-plan-testing" + bucket.name.value == "${template-name-is-$evil}" cause := bucket.name } `, - }) + options: []options.ScannerOption{ + options.ScannerWithPolicyDirs("rules"), + options.ScannerWithRegoOnly(true), + options.ScannerWithEmbeddedPolicies(false), + options.ScannerWithPolicyNamespaces("user"), + }, + }, + } - debugLog := bytes.NewBuffer([]byte{}) - scanner := New( - options.ScannerWithDebug(debugLog), - options.ScannerWithPolicyFilesystem(fs), - options.ScannerWithPolicyDirs("rules"), - options.ScannerWithRegoOnly(true), - options.ScannerWithPolicyNamespaces("user"), - options.ScannerWithEmbeddedPolicies(false), - ) + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + b, _ := os.ReadFile(tc.inputFile) + fs := testutil.CreateFS(t, map[string]string{ + "/code/main.tfplan.json": string(b), + "/rules/test.rego": tc.inputRego, + }) - results, err := scanner.ScanFS(context.TODO(), fs, "code") - require.NoError(t, err) + debugLog := bytes.NewBuffer([]byte{}) + so := append(tc.options, options.ScannerWithDebug(debugLog), options.ScannerWithPolicyFilesystem(fs)) + scanner := New(so...) - require.Len(t, results.GetFailed(), 1) + results, err := scanner.ScanFS(context.TODO(), fs, "code") + require.NoError(t, err) - failure := results.GetFailed()[0] + require.Len(t, results.GetFailed(), 1) - assert.Equal(t, "AVD-TEST-0123", failure.Rule().AVDID) - if t.Failed() { - fmt.Printf("Debug logs:\n%s\n", debugLog.String()) - } + failure := results.GetFailed()[0] + assert.Equal(t, "AVD-TEST-0123", failure.Rule().AVDID) + if t.Failed() { + fmt.Printf("Debug logs:\n%s\n", debugLog.String()) + } + }) + } } diff --git a/pkg/iac/scanners/terraformplan/tfjson/test/parser_test.go b/pkg/iac/scanners/terraformplan/tfjson/test/parser_test.go index 4f81dec89751..97b9ba4fcf7b 100644 --- a/pkg/iac/scanners/terraformplan/tfjson/test/parser_test.go +++ b/pkg/iac/scanners/terraformplan/tfjson/test/parser_test.go @@ -9,7 +9,6 @@ import ( ) func Test_Parse_Plan_File(t *testing.T) { - planFile, err := parser.New().ParseFile("testdata/plan.json") require.NoError(t, err) diff --git a/pkg/iac/scanners/terraformplan/tfjson/test/testdata/plan_with_template.json b/pkg/iac/scanners/terraformplan/tfjson/test/testdata/plan_with_template.json new file mode 100644 index 000000000000..2ae6e5c8d7ed --- /dev/null +++ b/pkg/iac/scanners/terraformplan/tfjson/test/testdata/plan_with_template.json @@ -0,0 +1,480 @@ +{ + "format_version": "0.2", + "terraform_version": "1.0.3", + "variables": { + "bucket_name": { + "value": "${template-name-is-$evil}" + } + }, + "planned_values": { + "root_module": { + "resources": [ + { + "address": "aws_s3_bucket.planbucket", + "mode": "managed", + "type": "aws_s3_bucket", + "name": "planbucket", + "provider_name": "registry.terraform.io/hashicorp/aws", + "schema_version": 0, + "values": { + "bucket": "${template-name-is-$evil}", + "bucket_prefix": null, + "force_destroy": false, + "logging": [ + { + "target_bucket": "arn:aws:s3:::iac-tfsec-dev", + "target_prefix": null + } + ], + "tags": null, + "versioning": [ + { + "enabled": true, + "mfa_delete": false + } + ] + }, + "sensitive_values": { + "cors_rule": [], + "grant": [], + "lifecycle_rule": [], + "logging": [ + {} + ], + "object_lock_configuration": [], + "replication_configuration": [], + "server_side_encryption_configuration": [], + "tags_all": {}, + "versioning": [ + {} + ], + "website": [] + } + }, + { + "address": "aws_s3_bucket_server_side_encryption_configuration.example", + "mode": "managed", + "type": "aws_s3_bucket_server_side_encryption_configuration", + "name": "example", + "provider_name": "registry.terraform.io/hashicorp/aws", + "schema_version": 0, + "values": { + "expected_bucket_owner": null, + "rule": [ + { + "apply_server_side_encryption_by_default": [ + { + "kms_master_key_id": "", + "sse_algorithm": "AES256" + } + ], + "bucket_key_enabled": true + } + ] + }, + "sensitive_values": { + "rule": [ + { + "apply_server_side_encryption_by_default": [ + {} + ] + } + ] + } + }, + { + "address": "aws_security_group.sg", + "mode": "managed", + "type": "aws_security_group", + "name": "sg", + "provider_name": "registry.terraform.io/hashicorp/aws", + "schema_version": 1, + "values": { + "description": "Managed by Terraform", + "ingress": [ + { + "cidr_blocks": [ + "0.0.0.0/0" + ], + "description": "", + "from_port": 80, + "ipv6_cidr_blocks": [], + "prefix_list_ids": [], + "protocol": "tcp", + "security_groups": [], + "self": false, + "to_port": 80 + } + ], + "name": "sg", + "revoke_rules_on_delete": false, + "tags": { + "Name": "blah" + }, + "tags_all": { + "Name": "blah" + }, + "timeouts": null + }, + "sensitive_values": { + "egress": [], + "ingress": [ + { + "cidr_blocks": [ + false + ], + "ipv6_cidr_blocks": [], + "prefix_list_ids": [], + "security_groups": [] + } + ], + "tags": {}, + "tags_all": {} + } + } + ] + } + }, + "resource_changes": [ + { + "address": "aws_s3_bucket.planbucket", + "mode": "managed", + "type": "aws_s3_bucket", + "name": "planbucket", + "provider_name": "registry.terraform.io/hashicorp/aws", + "change": { + "actions": [ + "create" + ], + "before": null, + "after": { + "bucket": "${template-name-is-$evil}", + "bucket_prefix": null, + "force_destroy": false, + "logging": [ + { + "target_bucket": "arn:aws:s3:::iac-tfsec-dev", + "target_prefix": null + } + ], + "tags": null, + "versioning": [ + { + "enabled": true, + "mfa_delete": false + } + ] + }, + "after_unknown": { + "acceleration_status": true, + "acl": true, + "arn": true, + "bucket_domain_name": true, + "bucket_regional_domain_name": true, + "cors_rule": true, + "grant": true, + "hosted_zone_id": true, + "id": true, + "lifecycle_rule": true, + "logging": [ + {} + ], + "object_lock_configuration": true, + "object_lock_enabled": true, + "policy": true, + "region": true, + "replication_configuration": true, + "request_payer": true, + "server_side_encryption_configuration": true, + "tags_all": true, + "versioning": [ + {} + ], + "website": true, + "website_domain": true, + "website_endpoint": true + }, + "before_sensitive": false, + "after_sensitive": { + "cors_rule": [], + "grant": [], + "lifecycle_rule": [], + "logging": [ + {} + ], + "object_lock_configuration": [], + "replication_configuration": [], + "server_side_encryption_configuration": [], + "tags_all": {}, + "versioning": [ + {} + ], + "website": [] + } + } + }, + { + "address": "aws_s3_bucket_server_side_encryption_configuration.example", + "mode": "managed", + "type": "aws_s3_bucket_server_side_encryption_configuration", + "name": "example", + "provider_name": "registry.terraform.io/hashicorp/aws", + "change": { + "actions": [ + "create" + ], + "before": null, + "after": { + "expected_bucket_owner": null, + "rule": [ + { + "apply_server_side_encryption_by_default": [ + { + "kms_master_key_id": "", + "sse_algorithm": "AES256" + } + ], + "bucket_key_enabled": true + } + ] + }, + "after_unknown": { + "bucket": true, + "id": true, + "rule": [ + { + "apply_server_side_encryption_by_default": [ + {} + ] + } + ] + }, + "before_sensitive": false, + "after_sensitive": { + "rule": [ + { + "apply_server_side_encryption_by_default": [ + {} + ] + } + ] + } + } + }, + { + "address": "aws_security_group.sg", + "mode": "managed", + "type": "aws_security_group", + "name": "sg", + "provider_name": "registry.terraform.io/hashicorp/aws", + "change": { + "actions": [ + "create" + ], + "before": null, + "after": { + "description": "Managed by Terraform", + "ingress": [ + { + "cidr_blocks": [ + "0.0.0.0/0" + ], + "description": "", + "from_port": 80, + "ipv6_cidr_blocks": [], + "prefix_list_ids": [], + "protocol": "tcp", + "security_groups": [], + "self": false, + "to_port": 80 + } + ], + "name": "sg", + "revoke_rules_on_delete": false, + "tags": { + "Name": "blah" + }, + "tags_all": { + "Name": "blah" + }, + "timeouts": null + }, + "after_unknown": { + "arn": true, + "egress": true, + "id": true, + "ingress": [ + { + "cidr_blocks": [ + false + ], + "ipv6_cidr_blocks": [], + "prefix_list_ids": [], + "security_groups": [] + } + ], + "name_prefix": true, + "owner_id": true, + "tags": {}, + "tags_all": {}, + "vpc_id": true + }, + "before_sensitive": false, + "after_sensitive": { + "egress": [], + "ingress": [ + { + "cidr_blocks": [ + false + ], + "ipv6_cidr_blocks": [], + "prefix_list_ids": [], + "security_groups": [] + } + ], + "tags": {}, + "tags_all": {} + } + } + } + ], + "prior_state": { + "format_version": "0.2", + "terraform_version": "1.0.3", + "values": { + "root_module": { + "resources": [ + { + "address": "data.aws_s3_bucket.logging_bucket", + "mode": "data", + "type": "aws_s3_bucket", + "name": "logging_bucket", + "provider_name": "registry.terraform.io/hashicorp/aws", + "schema_version": 0, + "values": { + "arn": "arn:aws:s3:::iac-tfsec-dev", + "bucket": "iac-tfsec-dev", + "bucket_domain_name": "iac-tfsec-dev.s3.amazonaws.com", + "bucket_regional_domain_name": "iac-tfsec-dev.s3.amazonaws.com", + "hosted_zone_id": "Z3AQBSTGFYJSTF", + "id": "iac-tfsec-dev", + "region": "us-east-1", + "website_domain": null, + "website_endpoint": null + }, + "sensitive_values": {} + } + ] + } + } + }, + "configuration": { + "provider_config": { + "aws": { + "name": "aws" + } + }, + "root_module": { + "resources": [ + { + "address": "aws_s3_bucket.planbucket", + "mode": "managed", + "type": "aws_s3_bucket", + "name": "planbucket", + "provider_config_key": "aws", + "expressions": { + "bucket": { + "references": [ + "var.bucket_name" + ] + }, + "logging": [ + { + "target_bucket": { + "references": [ + "data.aws_s3_bucket.logging_bucket.arn", + "data.aws_s3_bucket.logging_bucket" + ] + } + } + ], + "versioning": [ + { + "enabled": { + "constant_value": true + } + } + ] + }, + "schema_version": 0 + }, + { + "address": "aws_s3_bucket_server_side_encryption_configuration.example", + "mode": "managed", + "type": "aws_s3_bucket_server_side_encryption_configuration", + "name": "example", + "provider_config_key": "aws", + "expressions": { + "bucket": { + "references": [ + "aws_s3_bucket.planbucket.id", + "aws_s3_bucket.planbucket" + ] + }, + "rule": [ + { + "apply_server_side_encryption_by_default": [ + { + "sse_algorithm": { + "constant_value": "AES256" + } + } + ], + "bucket_key_enabled": { + "constant_value": true + } + } + ] + }, + "schema_version": 0 + }, + { + "address": "aws_security_group.sg", + "mode": "managed", + "type": "aws_security_group", + "name": "sg", + "provider_config_key": "aws", + "expressions": { + "name": { + "constant_value": "sg" + }, + "tags": { + "constant_value": { + "Name": "blah" + } + } + }, + "schema_version": 1 + }, + { + "address": "data.aws_s3_bucket.logging_bucket", + "mode": "data", + "type": "aws_s3_bucket", + "name": "logging_bucket", + "provider_config_key": "aws", + "expressions": { + "bucket": { + "constant_value": "iac-tfsec-dev" + } + }, + "schema_version": 0 + } + ], + "variables": { + "bucket_name": { + "default": "${template-name-is-$evil}" + } + } + } + } +} \ No newline at end of file diff --git a/pkg/iac/terraform/resource_block.go b/pkg/iac/terraform/resource_block.go index cc50c8d9b872..3339675ee304 100644 --- a/pkg/iac/terraform/resource_block.go +++ b/pkg/iac/terraform/resource_block.go @@ -3,6 +3,7 @@ package terraform import ( "bytes" "fmt" + "regexp" "strings" "text/template" ) @@ -91,13 +92,7 @@ func renderPrimitive(val interface{}) string { case PlanReference: return fmt.Sprintf("%v", t.Value) case string: - if strings.Contains(t, "\n") { - return fmt.Sprintf(`< Date: Wed, 3 Apr 2024 20:36:44 +0400 Subject: [PATCH 43/57] chore(deps): bump github.com/Azure/azure-sdk-for-go/sdk/azcore from 1.9.0 to 1.10.0 (#6427) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 4 ++-- go.sum | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index 64d64beeea65..dc49b23e5b9d 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.21 require ( github.com/Azure/azure-sdk-for-go v68.0.0+incompatible - github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0 github.com/BurntSushi/toml v1.3.2 github.com/CycloneDX/cyclonedx-go v0.8.0 @@ -149,7 +149,7 @@ require ( dario.cat/mergo v1.0.0 // indirect github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 // indirect github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20230306123547-8075edf89bb0 // indirect - github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 // indirect github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect github.com/Azure/go-autorest v14.2.0+incompatible // indirect github.com/Azure/go-autorest/autorest v0.11.29 // indirect diff --git a/go.sum b/go.sum index b6eb0f6da7bc..8ae7f6a1404a 100644 --- a/go.sum +++ b/go.sum @@ -196,12 +196,12 @@ github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20230306123547-8075edf89bb0/go.mod github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0 h1:fb8kj/Dh4CSwgsOzHeZY4Xh68cFVbzXx+ONXGMY//4w= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0/go.mod h1:uReU2sSxZExRPBAg3qKzmAucSi51+SP1OhohieR821Q= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 h1:n1DH8TPV4qqPTje2RcUBYwtrTWlabVp4n46+74X2pn4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0/go.mod h1:HDcZnuGbiyppErN6lB+idp4CKhjbc8gwjto6OPpyggM= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0 h1:BMAjVKJM0U/CYF27gA0ZMmXGkOcvfFtD0oHVZ1TIPRI= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0/go.mod h1:1fXstnBMas5kzG+S3q8UoJcmyU6nUeunJcMDHcRYHhs= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.0 h1:d81/ng9rET2YqdVkVwkb6EXeRrLJIwyGnJcAlAWKwhs= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.0/go.mod h1:s4kgfzA0covAXNicZHDMN58jExvcng2mC/DepXiF1EI= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= From e98c873ed0f710f4569a06d2b38220d9e2c47630 Mon Sep 17 00:00:00 2001 From: Prajyot Parab Date: Wed, 3 Apr 2024 22:25:03 +0530 Subject: [PATCH 44/57] chore(deps): bump github.com/testcontainers/testcontainers-go to v0.28.0 (#6387) Signed-off-by: Prajyot-Parab --- go.mod | 14 +++++++++----- go.sum | 29 +++++++++++++++++++++++++---- 2 files changed, 34 insertions(+), 9 deletions(-) diff --git a/go.mod b/go.mod index dc49b23e5b9d..7cc33d88b91c 100644 --- a/go.mod +++ b/go.mod @@ -98,7 +98,7 @@ require ( github.com/spf13/viper v1.18.2 github.com/stretchr/testify v1.8.4 github.com/testcontainers/testcontainers-go v0.28.0 - github.com/testcontainers/testcontainers-go/modules/localstack v0.26.0 + github.com/testcontainers/testcontainers-go/modules/localstack v0.28.0 github.com/tetratelabs/wazero v1.7.0 github.com/twitchtv/twirp v8.1.2+incompatible github.com/xeipuuv/gojsonschema v1.2.0 @@ -264,6 +264,7 @@ require ( github.com/go-ini/ini v1.67.0 // indirect github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-openapi/analysis v0.21.5 // indirect github.com/go-openapi/errors v0.21.0 // indirect github.com/go-openapi/jsonpointer v0.20.1 // indirect @@ -315,6 +316,7 @@ require ( github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect github.com/lib/pq v1.10.9 // indirect github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect @@ -348,6 +350,7 @@ require ( github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/prometheus/client_golang v1.19.0 // indirect github.com/prometheus/client_model v0.5.0 // indirect github.com/prometheus/common v0.48.0 // indirect @@ -362,6 +365,8 @@ require ( github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 // indirect github.com/sergi/go-diff v1.3.1 // indirect github.com/shibumi/go-pathspec v1.3.0 // indirect + github.com/shirou/gopsutil/v3 v3.23.12 // indirect + github.com/shoenig/go-m1cpu v0.1.6 // indirect github.com/shopspring/decimal v1.3.1 // indirect github.com/skeema/knownhosts v1.2.1 // indirect github.com/sourcegraph/conc v0.3.0 // indirect @@ -369,6 +374,8 @@ require ( github.com/stretchr/objx v0.5.0 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/tchap/go-patricia/v2 v2.3.1 // indirect + github.com/tklauser/go-sysconf v0.3.12 // indirect + github.com/tklauser/numcpus v0.6.1 // indirect github.com/ulikunitz/xz v0.5.11 // indirect github.com/vbatts/tar-split v0.11.3 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect @@ -376,6 +383,7 @@ require ( github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/yashtewari/glob-intersection v0.2.0 // indirect github.com/yuin/gopher-lua v1.1.0 // indirect + github.com/yusufpapurcu/wmi v1.2.3 // indirect go.mongodb.org/mongo-driver v1.13.1 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 // indirect @@ -427,7 +435,3 @@ require ( sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect sigs.k8s.io/yaml v1.4.0 // indirect ) - -// testcontainers-go has a bug with versions v0.25.0 and v0.26.0 -// ref: https://github.com/testcontainers/testcontainers-go/issues/1782 -replace github.com/testcontainers/testcontainers-go => github.com/testcontainers/testcontainers-go v0.23.0 diff --git a/go.sum b/go.sum index 8ae7f6a1404a..d1b2fc815df0 100644 --- a/go.sum +++ b/go.sum @@ -827,6 +827,8 @@ github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-openapi/analysis v0.21.5 h1:3tHfEBh6Ia8eKc4M7khOGjPOAlWKJ10d877Cr9teujI= github.com/go-openapi/analysis v0.21.5/go.mod h1:25YcZosX9Lwz2wBsrFrrsL8bmjjXdlyP6zsr2AMy29M= github.com/go-openapi/errors v0.21.0 h1:FhChC/duCnfoLj1gZ0BgaBmzhJC2SL/sJr8a2vAobSY= @@ -1230,6 +1232,8 @@ github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0= github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40 h1:EnfXoSqDfSNJv0VBNqY/88RNnhSGYkrHaO0mmFGbVsc= github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40/go.mod h1:vy1vK6wD6j7xX6O6hXe621WabdtNkou2h7uRtTfRMyg= github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg= @@ -1455,6 +1459,8 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/poy/onpar v1.1.2 h1:QaNrNiZx0+Nar5dLgTVp5mXkyoVFIbepjyEoGSnhbAY= github.com/poy/onpar v1.1.2/go.mod h1:6X8FLNoxyr9kkmnlqpK6LSoiOtrO6MICtWwEuWkLjzg= github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= @@ -1535,6 +1541,12 @@ github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI= github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE= +github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4= +github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU= +github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= @@ -1625,12 +1637,16 @@ github.com/tchap/go-patricia/v2 v2.3.1 h1:6rQp39lgIYZ+MHmdEq4xzuk1t7OdC35z/xm0BG github.com/tchap/go-patricia/v2 v2.3.1/go.mod h1:VZRHKAb53DLaG+nA9EaYYiaEx6YztwDlLElMsnSHD4k= github.com/terminalstatic/go-xsd-validate v0.1.5 h1:RqpJnf6HGE2CB/lZB1A8BYguk8uRtcvYAPLCF15qguo= github.com/terminalstatic/go-xsd-validate v0.1.5/go.mod h1:18lsvYFofBflqCrvo1umpABZ99+GneNTw2kEEc8UPJw= -github.com/testcontainers/testcontainers-go v0.23.0 h1:ERYTSikX01QczBLPZpqsETTBO7lInqEP349phDOVJVs= -github.com/testcontainers/testcontainers-go v0.23.0/go.mod h1:3gzuZfb7T9qfcH2pHpV4RLlWrPjeWNQah6XlYQ32c4I= -github.com/testcontainers/testcontainers-go/modules/localstack v0.26.0 h1:lpL04dHA9mGFBQLFcV+aEEh1Tf4ohXdIGgoj3J0bacM= -github.com/testcontainers/testcontainers-go/modules/localstack v0.26.0/go.mod h1:1xkZPpkBu6coI7CyVn3DXUBnsVrZ+fd/Cc8lx6zk2mk= +github.com/testcontainers/testcontainers-go v0.28.0 h1:1HLm9qm+J5VikzFDYhOd+Zw12NtOl+8drH2E8nTY1r8= +github.com/testcontainers/testcontainers-go v0.28.0/go.mod h1:COlDpUXbwW3owtpMkEB1zo9gwb1CoKVKlyrVPejF4AU= +github.com/testcontainers/testcontainers-go/modules/localstack v0.28.0 h1:NOtK4tz2J1KbdAV6Lk9AQPUXB6Op8jGzKNfwVCThRxU= +github.com/testcontainers/testcontainers-go/modules/localstack v0.28.0/go.mod h1:nLimAfgHTQfaDZ2cO8/B4Z1qr8e020sM3ybpSsOVAUY= github.com/tetratelabs/wazero v1.7.0 h1:jg5qPydno59wqjpGrHph81lbtHzTrWzwwtD4cD88+hQ= github.com/tetratelabs/wazero v1.7.0/go.mod h1:ytl6Zuh20R/eROuyDaGPkp82O9C/DJfXAwJfQ3X6/7Y= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/twitchtv/twirp v8.1.2+incompatible h1:0O6TfzZW09ZP5r+ORA90XQEE3PTgA6C7MBbl2KxvVgE= @@ -1685,6 +1701,8 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yuin/gopher-lua v1.1.0 h1:BojcDhfyDWgU2f2TOzYK/g5p2gxMrku8oupLDqlnSqE= github.com/yuin/gopher-lua v1.1.0/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43 h1:+lm10QQTNSBd8DVTNGHx7o/IKu9HYDvLMffDhbyLccI= github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs= github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50 h1:hlE8//ciYMztlGpl/VA+Zm1AcTPHYkHJPbHqE6WJUXE= @@ -2010,6 +2028,7 @@ golang.org/x/sys v0.0.0-20201117170446-d9b008d0a637/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -2057,7 +2076,9 @@ golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= From 8e4279b863bcef675f6dbf97fffc9884242b5709 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Thu, 4 Apr 2024 03:13:25 +0300 Subject: [PATCH 45/57] refactor(terraform): remove unused file (#6445) --- go.mod | 1 - go.sum | 3 - .../scanners/terraform/executor/statistics.go | 92 ------------------- 3 files changed, 96 deletions(-) delete mode 100644 pkg/iac/scanners/terraform/executor/statistics.go diff --git a/go.mod b/go.mod index 7cc33d88b91c..f384e970590a 100644 --- a/go.mod +++ b/go.mod @@ -132,7 +132,6 @@ require ( github.com/liamg/iamgo v0.0.9 github.com/liamg/memoryfs v1.6.0 github.com/mitchellh/go-homedir v1.1.0 - github.com/olekukonko/tablewriter v0.0.5 github.com/owenrumney/squealer v1.2.2 github.com/zclconf/go-cty v1.14.1 github.com/zclconf/go-cty-yaml v1.0.3 diff --git a/go.sum b/go.sum index d1b2fc815df0..5af42e617a5d 100644 --- a/go.sum +++ b/go.sum @@ -1283,7 +1283,6 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU= github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= @@ -1375,8 +1374,6 @@ github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+ github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= -github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= -github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/onsi/ginkgo v0.0.0-20151202141238-7f8ab55aaf3b/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= diff --git a/pkg/iac/scanners/terraform/executor/statistics.go b/pkg/iac/scanners/terraform/executor/statistics.go deleted file mode 100644 index fc42985747a5..000000000000 --- a/pkg/iac/scanners/terraform/executor/statistics.go +++ /dev/null @@ -1,92 +0,0 @@ -package executor - -import ( - "encoding/json" - "fmt" - "io" - "sort" - "strconv" - "strings" - - "github.com/olekukonko/tablewriter" - - "github.com/aquasecurity/trivy/pkg/iac/scan" -) - -type StatisticsItem struct { - RuleID string `json:"rule_id"` - RuleDescription string `json:"rule_description"` - Links []string `json:"links"` - Count int `json:"count"` -} - -type Statistics []StatisticsItem - -type StatisticsResult struct { - Result Statistics `json:"results"` -} - -func SortStatistics(statistics Statistics) Statistics { - sort.Slice(statistics, func(i, j int) bool { - return statistics[i].Count > statistics[j].Count - }) - return statistics -} - -func (statistics Statistics) PrintStatisticsTable(format string, w io.Writer) error { - // lovely is the default so we keep it like that - if format != "lovely" && format != "markdown" && format != "json" { - return fmt.Errorf("you must specify only lovely, markdown or json format with --run-statistics") - } - - sorted := SortStatistics(statistics) - - if format == "json" { - result := StatisticsResult{Result: sorted} - val, err := json.MarshalIndent(result, "", " ") - if err != nil { - return err - } - - _, _ = fmt.Fprintln(w, string(val)) - - return nil - } - - table := tablewriter.NewWriter(w) - table.SetHeader([]string{"Rule ID", "Description", "Link", "Count"}) - table.SetRowLine(true) - - if format == "markdown" { - table.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false}) - table.SetCenterSeparator("|") - } - - for _, item := range sorted { - table.Append([]string{item.RuleID, - item.RuleDescription, - strings.Join(item.Links, "\n"), - strconv.Itoa(item.Count)}) - } - - table.Render() - - return nil -} - -func AddStatisticsCount(statistics Statistics, result scan.Result) Statistics { - for i, statistic := range statistics { - if statistic.RuleID == result.Rule().LongID() { - statistics[i].Count += 1 - return statistics - } - } - statistics = append(statistics, StatisticsItem{ - RuleID: result.Rule().LongID(), - RuleDescription: result.Rule().Summary, - Links: result.Rule().Links, - Count: 1, - }) - - return statistics -} From 6bca7c3c79ec43b7cc1bf3cee3ea027e8b8524f5 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Thu, 4 Apr 2024 03:29:31 +0300 Subject: [PATCH 46/57] refactor(terraform): remove unused options (#6446) --- .../scanners/terraform/executor/executor.go | 111 +++--------- .../terraform/executor/executor_test.go | 5 +- pkg/iac/scanners/terraform/executor/option.go | 43 ----- pkg/iac/scanners/terraform/executor/pool.go | 70 ++++---- pkg/iac/scanners/terraform/options.go | 110 +----------- pkg/iac/scanners/terraform/scanner_test.go | 164 ------------------ 6 files changed, 56 insertions(+), 447 deletions(-) diff --git a/pkg/iac/scanners/terraform/executor/executor.go b/pkg/iac/scanners/terraform/executor/executor.go index 96c4939d756b..88dc1fa9801c 100644 --- a/pkg/iac/scanners/terraform/executor/executor.go +++ b/pkg/iac/scanners/terraform/executor/executor.go @@ -14,35 +14,24 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/rules" "github.com/aquasecurity/trivy/pkg/iac/scan" - "github.com/aquasecurity/trivy/pkg/iac/severity" - "github.com/aquasecurity/trivy/pkg/iac/state" "github.com/aquasecurity/trivy/pkg/iac/terraform" "github.com/aquasecurity/trivy/pkg/iac/types" ) // Executor scans HCL blocks by running all registered rules against them type Executor struct { - enableIgnores bool - excludedRuleIDs []string - includedRuleIDs []string - ignoreCheckErrors bool - workspaceName string - useSingleThread bool - debug debug.Logger - resultsFilters []func(scan.Results) scan.Results - severityOverrides map[string]string - regoScanner *rego.Scanner - regoOnly bool - stateFuncs []func(*state.State) - frameworks []framework.Framework + workspaceName string + debug debug.Logger + resultsFilters []func(scan.Results) scan.Results + regoScanner *rego.Scanner + regoOnly bool + frameworks []framework.Framework } // New creates a new Executor func New(options ...Option) *Executor { s := &Executor{ - ignoreCheckErrors: true, - enableIgnores: true, - regoOnly: false, + regoOnly: false, } for _, option := range options { option(s) @@ -50,16 +39,6 @@ func New(options ...Option) *Executor { return s } -// Find element in list -func checkInList(id string, list []string) bool { - for _, codeIgnored := range list { - if codeIgnored == id { - return true - } - } - return false -} - func (e *Executor) Execute(modules terraform.Modules) (scan.Results, error) { e.debug.Log("Adapting modules...") @@ -70,90 +49,46 @@ func (e *Executor) Execute(modules terraform.Modules) (scan.Results, error) { if threads > 1 { threads-- } - if e.useSingleThread { - threads = 1 - } - e.debug.Log("Using max routines of %d", threads) - e.debug.Log("Applying state modifier functions...") - for _, f := range e.stateFuncs { - f(infra) - } + e.debug.Log("Using max routines of %d", threads) registeredRules := rules.GetRegistered(e.frameworks...) e.debug.Log("Initialized %d rule(s).", len(registeredRules)) - pool := NewPool(threads, registeredRules, modules, infra, e.ignoreCheckErrors, e.regoScanner, e.regoOnly) + pool := NewPool(threads, registeredRules, modules, infra, e.regoScanner, e.regoOnly) e.debug.Log("Created pool with %d worker(s) to apply rules.", threads) + results, err := pool.Run() if err != nil { return nil, err } - e.debug.Log("Finished applying rules.") - if e.enableIgnores { - e.debug.Log("Applying ignores...") - var ignores ignore.Rules - for _, module := range modules { - ignores = append(ignores, module.Ignores()...) - } + e.debug.Log("Finished applying rules.") - ignorers := map[string]ignore.Ignorer{ - "ws": workspaceIgnorer(e.workspaceName), - "ignore": attributeIgnorer(modules), - } + e.debug.Log("Applying ignores...") + var ignores ignore.Rules + for _, module := range modules { + ignores = append(ignores, module.Ignores()...) + } - results.Ignore(ignores, ignorers) + ignorers := map[string]ignore.Ignorer{ + "ws": workspaceIgnorer(e.workspaceName), + "ignore": attributeIgnorer(modules), + } - for _, ignored := range results.GetIgnored() { - e.debug.Log("Ignored '%s' at '%s'.", ignored.Rule().LongID(), ignored.Range()) - } + results.Ignore(ignores, ignorers) - } else { - e.debug.Log("Ignores are disabled.") + for _, ignored := range results.GetIgnored() { + e.debug.Log("Ignored '%s' at '%s'.", ignored.Rule().LongID(), ignored.Range()) } - results = e.updateSeverity(results) results = e.filterResults(results) e.sortResults(results) return results, nil } -func (e *Executor) updateSeverity(results []scan.Result) scan.Results { - if len(e.severityOverrides) == 0 { - return results - } - - var overriddenResults scan.Results - for _, res := range results { - for code, sev := range e.severityOverrides { - if res.Rule().LongID() != code { - continue - } - - overrides := scan.Results([]scan.Result{res}) - override := res.Rule() - override.Severity = severity.Severity(sev) - overrides.SetRule(override) - res = overrides[0] - } - overriddenResults = append(overriddenResults, res) - } - - return overriddenResults -} - func (e *Executor) filterResults(results scan.Results) scan.Results { - includedOnly := len(e.includedRuleIDs) > 0 - for i, result := range results { - id := result.Rule().LongID() - if (includedOnly && !checkInList(id, e.includedRuleIDs)) || checkInList(id, e.excludedRuleIDs) { - e.debug.Log("Excluding '%s' at '%s'.", result.Rule().LongID(), result.Range()) - results[i].OverrideStatus(scan.StatusIgnored) - } - } - if len(e.resultsFilters) > 0 && len(results) > 0 { before := len(results.GetIgnored()) e.debug.Log("Applying %d results filters to %d results...", len(results), before) diff --git a/pkg/iac/scanners/terraform/executor/executor_test.go b/pkg/iac/scanners/terraform/executor/executor_test.go index c8c3e2af60c5..ac663c313c17 100644 --- a/pkg/iac/scanners/terraform/executor/executor_test.go +++ b/pkg/iac/scanners/terraform/executor/executor_test.go @@ -78,8 +78,7 @@ resource "problem" "this" { modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - - _, err = New(OptionStopOnErrors(false)).Execute(modules) + _, err = New().Execute(modules) assert.Error(t, err) } @@ -127,6 +126,6 @@ resource "problem" "this" { modules, _, err := p.EvaluateAll(context.TODO()) require.NoError(t, err) - _, err = New(OptionStopOnErrors(false)).Execute(modules) + _, err = New().Execute(modules) assert.Error(t, err) } diff --git a/pkg/iac/scanners/terraform/executor/option.go b/pkg/iac/scanners/terraform/executor/option.go index 1e9ab5b9d998..a58d72867b54 100644 --- a/pkg/iac/scanners/terraform/executor/option.go +++ b/pkg/iac/scanners/terraform/executor/option.go @@ -7,7 +7,6 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/framework" "github.com/aquasecurity/trivy/pkg/iac/rego" "github.com/aquasecurity/trivy/pkg/iac/scan" - "github.com/aquasecurity/trivy/pkg/iac/state" ) type Option func(s *Executor) @@ -24,66 +23,24 @@ func OptionWithResultsFilter(f func(scan.Results) scan.Results) Option { } } -func OptionWithSeverityOverrides(overrides map[string]string) Option { - return func(s *Executor) { - s.severityOverrides = overrides - } -} - func OptionWithDebugWriter(w io.Writer) Option { return func(s *Executor) { s.debug = debug.New(w, "terraform", "executor") } } -func OptionNoIgnores() Option { - return func(s *Executor) { - s.enableIgnores = false - } -} - -func OptionExcludeRules(ruleIDs []string) Option { - return func(s *Executor) { - s.excludedRuleIDs = ruleIDs - } -} - -func OptionIncludeRules(ruleIDs []string) Option { - return func(s *Executor) { - s.includedRuleIDs = ruleIDs - } -} - -func OptionStopOnErrors(stop bool) Option { - return func(s *Executor) { - s.ignoreCheckErrors = !stop - } -} - func OptionWithWorkspaceName(workspaceName string) Option { return func(s *Executor) { s.workspaceName = workspaceName } } -func OptionWithSingleThread(single bool) Option { - return func(s *Executor) { - s.useSingleThread = single - } -} - func OptionWithRegoScanner(s *rego.Scanner) Option { return func(e *Executor) { e.regoScanner = s } } -func OptionWithStateFunc(f ...func(*state.State)) Option { - return func(e *Executor) { - e.stateFuncs = f - } -} - func OptionWithRegoOnly(regoOnly bool) Option { return func(e *Executor) { e.regoOnly = regoOnly diff --git a/pkg/iac/scanners/terraform/executor/pool.go b/pkg/iac/scanners/terraform/executor/pool.go index a62fbe510de0..69b8405ee3a7 100644 --- a/pkg/iac/scanners/terraform/executor/pool.go +++ b/pkg/iac/scanners/terraform/executor/pool.go @@ -17,24 +17,22 @@ import ( ) type Pool struct { - size int - modules terraform.Modules - state *state.State - rules []types.RegisteredRule - ignoreErrors bool - rs *rego.Scanner - regoOnly bool + size int + modules terraform.Modules + state *state.State + rules []types.RegisteredRule + rs *rego.Scanner + regoOnly bool } -func NewPool(size int, rules []types.RegisteredRule, modules terraform.Modules, st *state.State, ignoreErrors bool, regoScanner *rego.Scanner, regoOnly bool) *Pool { +func NewPool(size int, rules []types.RegisteredRule, modules terraform.Modules, st *state.State, regoScanner *rego.Scanner, regoOnly bool) *Pool { return &Pool{ - size: size, - rules: rules, - state: st, - modules: modules, - ignoreErrors: ignoreErrors, - rs: regoScanner, - regoOnly: regoOnly, + size: size, + rules: rules, + state: st, + modules: modules, + rs: regoScanner, + regoOnly: regoOnly, } } @@ -69,17 +67,15 @@ func (p *Pool) Run() (scan.Results, error) { for _, module := range p.modules { mod := *module outgoing <- &hclModuleRuleJob{ - module: &mod, - rule: r, - ignoreErrors: p.ignoreErrors, + module: &mod, + rule: r, } } } else { // run defsec rule outgoing <- &infraRuleJob{ - state: p.state, - rule: r, - ignoreErrors: p.ignoreErrors, + state: p.state, + rule: r, } } } @@ -105,14 +101,11 @@ type Job interface { type infraRuleJob struct { state *state.State rule types.RegisteredRule - - ignoreErrors bool } type hclModuleRuleJob struct { - module *terraform.Module - rule types.RegisteredRule - ignoreErrors bool + module *terraform.Module + rule types.RegisteredRule } type regoJob struct { @@ -122,24 +115,21 @@ type regoJob struct { } func (h *infraRuleJob) Run() (_ scan.Results, err error) { - if h.ignoreErrors { - defer func() { - if panicErr := recover(); panicErr != nil { - err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) - } - }() - } + defer func() { + if panicErr := recover(); panicErr != nil { + err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) + } + }() + return h.rule.Evaluate(h.state), err } func (h *hclModuleRuleJob) Run() (results scan.Results, err error) { - if h.ignoreErrors { - defer func() { - if panicErr := recover(); panicErr != nil { - err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) - } - }() - } + defer func() { + if panicErr := recover(); panicErr != nil { + err = fmt.Errorf("%s\n%s", panicErr, string(runtimeDebug.Stack())) + } + }() customCheck := h.rule.GetRule().CustomChecks.Terraform for _, block := range h.module.GetBlocks() { if !isCustomCheckRequiredForBlock(customCheck, block) { diff --git a/pkg/iac/scanners/terraform/options.go b/pkg/iac/scanners/terraform/options.go index d78c1f0cf897..f5a0d2223534 100644 --- a/pkg/iac/scanners/terraform/options.go +++ b/pkg/iac/scanners/terraform/options.go @@ -8,8 +8,6 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/executor" "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser" - "github.com/aquasecurity/trivy/pkg/iac/severity" - "github.com/aquasecurity/trivy/pkg/iac/state" ) type ConfigurableTerraformScanner interface { @@ -27,46 +25,6 @@ func ScannerWithTFVarsPaths(paths ...string) options.ScannerOption { } } -func ScannerWithSeverityOverrides(overrides map[string]string) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithSeverityOverrides(overrides)) - } - } -} - -func ScannerWithNoIgnores() options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionNoIgnores()) - } - } -} - -func ScannerWithExcludedRules(ruleIDs []string) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionExcludeRules(ruleIDs)) - } - } -} - -func ScannerWithIncludedRules(ruleIDs []string) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionIncludeRules(ruleIDs)) - } - } -} - -func ScannerWithStopOnRuleErrors(stop bool) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionStopOnErrors(stop)) - } - } -} - func ScannerWithWorkspaceName(name string) options.ScannerOption { return func(s options.ConfigurableScanner) { if tf, ok := s.(ConfigurableTerraformScanner); ok { @@ -76,14 +34,6 @@ func ScannerWithWorkspaceName(name string) options.ScannerOption { } } -func ScannerWithSingleThread(single bool) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithSingleThread(single)) - } - } -} - func ScannerWithAllDirectories(all bool) options.ScannerOption { return func(s options.ConfigurableScanner) { if tf, ok := s.(ConfigurableTerraformScanner); ok { @@ -92,14 +42,6 @@ func ScannerWithAllDirectories(all bool) options.ScannerOption { } } -func ScannerWithStopOnHCLError(stop bool) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddParserOptions(parser.OptionStopOnHCLError(stop)) - } - } -} - func ScannerWithSkipDownloaded(skip bool) options.ScannerOption { return func(s options.ConfigurableScanner) { if !skip { @@ -109,10 +51,7 @@ func ScannerWithSkipDownloaded(skip bool) options.ScannerOption { tf.AddExecutorOptions(executor.OptionWithResultsFilter(func(results scan.Results) scan.Results { for i, result := range results { prefix := result.Range().GetSourcePrefix() - switch { - case prefix == "": - case strings.HasPrefix(prefix, "."): - default: + if prefix != "" && !strings.HasPrefix(prefix, ".") { results[i].OverrideStatus(scan.StatusIgnored) } } @@ -122,53 +61,6 @@ func ScannerWithSkipDownloaded(skip bool) options.ScannerOption { } } -func ScannerWithResultsFilter(f func(scan.Results) scan.Results) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithResultsFilter(f)) - } - } -} - -func ScannerWithMinimumSeverity(minimum severity.Severity) options.ScannerOption { - min := severityAsOrdinal(minimum) - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithResultsFilter(func(results scan.Results) scan.Results { - for i, result := range results { - if severityAsOrdinal(result.Severity()) < min { - results[i].OverrideStatus(scan.StatusIgnored) - } - } - return results - })) - } - } -} - -func severityAsOrdinal(sev severity.Severity) int { - switch sev { - case severity.Critical: - return 4 - case severity.High: - return 3 - case severity.Medium: - return 2 - case severity.Low: - return 1 - default: - return 0 - } -} - -func ScannerWithStateFunc(f ...func(*state.State)) options.ScannerOption { - return func(s options.ConfigurableScanner) { - if tf, ok := s.(ConfigurableTerraformScanner); ok { - tf.AddExecutorOptions(executor.OptionWithStateFunc(f...)) - } - } -} - func ScannerWithDownloadsAllowed(allowed bool) options.ScannerOption { return func(s options.ConfigurableScanner) { if tf, ok := s.(ConfigurableTerraformScanner); ok { diff --git a/pkg/iac/scanners/terraform/scanner_test.go b/pkg/iac/scanners/terraform/scanner_test.go index 020954d811db..047ceb972a2a 100644 --- a/pkg/iac/scanners/terraform/scanner_test.go +++ b/pkg/iac/scanners/terraform/scanner_test.go @@ -13,7 +13,6 @@ import ( "github.com/aquasecurity/trivy/pkg/iac/scan" "github.com/aquasecurity/trivy/pkg/iac/scanners/options" "github.com/aquasecurity/trivy/pkg/iac/severity" - "github.com/aquasecurity/trivy/pkg/iac/state" "github.com/aquasecurity/trivy/pkg/iac/terraform" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -68,20 +67,6 @@ func scanWithOptions(t *testing.T, code string, opt ...options.ScannerOption) sc return results } -func Test_OptionWithSeverityOverrides(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - options := []options.ScannerOption{ - ScannerWithSeverityOverrides(map[string]string{"aws-service-abc": "LOW"}), - } - results := scanWithOptions(t, ` -resource "something" "else" {} -`, options...) - require.Len(t, results.GetFailed(), 1) - assert.Equal(t, severity.Low, results.GetFailed()[0].Severity()) -} - func Test_OptionWithDebugWriter(t *testing.T) { reg := rules.Register(alwaysFailRule) defer rules.Deregister(reg) @@ -97,67 +82,6 @@ resource "something" "else" {} require.Greater(t, buffer.Len(), 0) } -func Test_OptionNoIgnores(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - scannerOpts := []options.ScannerOption{ - ScannerWithNoIgnores(), - } - results := scanWithOptions(t, ` -//tfsec:ignore:aws-service-abc -resource "something" "else" {} -`, scannerOpts...) - require.Len(t, results.GetFailed(), 1) - require.Len(t, results.GetIgnored(), 0) - -} - -func Test_OptionExcludeRules(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - options := []options.ScannerOption{ - ScannerWithExcludedRules([]string{"aws-service-abc"}), - } - results := scanWithOptions(t, ` -resource "something" "else" {} -`, options...) - require.Len(t, results.GetFailed(), 0) - require.Len(t, results.GetIgnored(), 1) - -} - -func Test_OptionIncludeRules(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - scannerOpts := []options.ScannerOption{ - ScannerWithIncludedRules([]string{"this-only"}), - } - results := scanWithOptions(t, ` -resource "something" "else" {} -`, scannerOpts...) - require.Len(t, results.GetFailed(), 0) - require.Len(t, results.GetIgnored(), 1) - -} - -func Test_OptionWithMinimumSeverity(t *testing.T) { - reg := rules.Register(alwaysFailRule) - defer rules.Deregister(reg) - - scannerOpts := []options.ScannerOption{ - ScannerWithMinimumSeverity(severity.Critical), - } - results := scanWithOptions(t, ` -resource "something" "else" {} -`, scannerOpts...) - require.Len(t, results.GetFailed(), 0) - require.Len(t, results.GetIgnored(), 1) - -} - func Test_OptionWithPolicyDirs(t *testing.T) { fs := testutil.CreateFS(t, map[string]string{ @@ -355,38 +279,6 @@ cause := bucket.name } -func Test_OptionWithStateFunc(t *testing.T) { - - fs := testutil.CreateFS(t, map[string]string{ - "code/main.tf": ` -resource "aws_s3_bucket" "my-bucket" { - bucket = "evil" -} -`, - }) - - var actual state.State - - debugLog := bytes.NewBuffer([]byte{}) - scanner := New( - options.ScannerWithDebug(debugLog), - ScannerWithStateFunc(func(s *state.State) { - require.NotNil(t, s) - actual = *s - }), - ) - - _, err := scanner.ScanFS(context.TODO(), fs, "code") - require.NoError(t, err) - - assert.Equal(t, 1, len(actual.AWS.S3.Buckets)) - - if t.Failed() { - fmt.Printf("Debug logs:\n%s\n", debugLog.String()) - } - -} - func Test_OptionWithRegoOnly(t *testing.T) { fs := testutil.CreateFS(t, map[string]string{ @@ -794,62 +686,6 @@ resource "aws_s3_bucket_public_access_block" "testB" { } } -func Test_RegoInput(t *testing.T) { - - var regoInput interface{} - - opts := []options.ScannerOption{ - ScannerWithStateFunc(func(s *state.State) { - regoInput = s.ToRego() - }), - } - _ = scanWithOptions(t, ` -resource "aws_security_group" "example_security_group" { - name = "example_security_group" - - description = "Example SG" - - ingress { - description = "Allow SSH" - from_port = 22 - to_port = 22 - protocol = "tcp" - cidr_blocks = ["1.2.3.4", "5.6.7.8"] - } - -} -`, opts...) - - outer, ok := regoInput.(map[string]interface{}) - require.True(t, ok) - aws, ok := outer["aws"].(map[string]interface{}) - require.True(t, ok) - ec2, ok := aws["ec2"].(map[string]interface{}) - require.True(t, ok) - sgs, ok := ec2["securitygroups"].([]interface{}) - require.True(t, ok) - require.Len(t, sgs, 1) - sg0, ok := sgs[0].(map[string]interface{}) - require.True(t, ok) - ingress, ok := sg0["ingressrules"].([]interface{}) - require.True(t, ok) - require.Len(t, ingress, 1) - ingress0, ok := ingress[0].(map[string]interface{}) - require.True(t, ok) - cidrs, ok := ingress0["cidrs"].([]interface{}) - require.True(t, ok) - require.Len(t, cidrs, 2) - - cidr0, ok := cidrs[0].(map[string]interface{}) - require.True(t, ok) - - cidr1, ok := cidrs[1].(map[string]interface{}) - require.True(t, ok) - - assert.Equal(t, "1.2.3.4", cidr0["value"]) - assert.Equal(t, "5.6.7.8", cidr1["value"]) -} - // PoC for replacing Go with Rego: AVD-AWS-0001 func Test_RegoRules(t *testing.T) { From 13190e92d9fea1277389fc09fba0418c05c5f44f Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Thu, 4 Apr 2024 06:40:40 +0300 Subject: [PATCH 47/57] fix(terraform): eval submodules (#6411) Co-authored-by: William Reade --- .../scanners/terraform/parser/evaluator.go | 119 +++++++++++++----- .../scanners/terraform/parser/load_module.go | 8 ++ pkg/iac/scanners/terraform/parser/parser.go | 28 +++-- .../scanners/terraform/parser/parser_test.go | 109 ++++++++++++++++ 4 files changed, 223 insertions(+), 41 deletions(-) diff --git a/pkg/iac/scanners/terraform/parser/evaluator.go b/pkg/iac/scanners/terraform/parser/evaluator.go index 3633d22386a1..b93104f442cc 100644 --- a/pkg/iac/scanners/terraform/parser/evaluator.go +++ b/pkg/iac/scanners/terraform/parser/evaluator.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/ext/typeexpr" + "github.com/samber/lo" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" "golang.org/x/exp/slices" @@ -102,6 +103,7 @@ func (e *evaluator) evaluateStep() { e.ctx.Set(e.getValuesByBlockType("data"), "data") e.ctx.Set(e.getValuesByBlockType("output"), "output") + e.ctx.Set(e.getValuesByBlockType("module"), "module") } // exportOutputs is used to export module outputs to the parent module @@ -126,48 +128,100 @@ func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[str fsMap := make(map[string]fs.FS) fsMap[fsKey] = e.filesystem - var lastContext hcl.EvalContext e.debug.Log("Starting module evaluation...") - for i := 0; i < maxContextIterations; i++ { + e.evaluateSteps() - e.evaluateStep() + // expand out resources and modules via count, for-each and dynamic + // (not a typo, we do this twice so every order is processed) + e.blocks = e.expandBlocks(e.blocks) + e.blocks = e.expandBlocks(e.blocks) - // if ctx matches the last evaluation, we can bail, nothing left to resolve - if i > 0 && reflect.DeepEqual(lastContext.Variables, e.ctx.Inner().Variables) { - break - } + e.debug.Log("Starting submodule evaluation...") + submodules := e.loadSubmodules(ctx) - if len(e.ctx.Inner().Variables) != len(lastContext.Variables) { - lastContext.Variables = make(map[string]cty.Value, len(e.ctx.Inner().Variables)) + for i := 0; i < maxContextIterations; i++ { + changed := false + for _, sm := range submodules { + changed = changed || e.evaluateSubmodule(ctx, sm) } - for k, v := range e.ctx.Inner().Variables { - lastContext.Variables[k] = v + if !changed { + e.debug.Log("All submodules are evaluated at i=%d", i) + break } } - // expand out resources and modules via count, for-each and dynamic - // (not a typo, we do this twice so every order is processed) - e.blocks = e.expandBlocks(e.blocks) - e.blocks = e.expandBlocks(e.blocks) + e.debug.Log("Starting post-submodule evaluation...") + e.evaluateSteps() - e.debug.Log("Starting submodule evaluation...") var modules terraform.Modules + for _, sm := range submodules { + modules = append(modules, sm.modules...) + fsMap = lo.Assign(fsMap, sm.fsMap) + } + + e.debug.Log("Finished processing %d submodule(s).", len(modules)) + + e.debug.Log("Module evaluation complete.") + rootModule := terraform.NewModule(e.projectRootPath, e.modulePath, e.blocks, e.ignores) + return append(terraform.Modules{rootModule}, modules...), fsMap +} + +type submodule struct { + definition *ModuleDefinition + eval *evaluator + modules terraform.Modules + lastState map[string]cty.Value + fsMap map[string]fs.FS +} + +func (e *evaluator) loadSubmodules(ctx context.Context) []*submodule { + var submodules []*submodule + for _, definition := range e.loadModules(ctx) { - submodules, outputs, err := definition.Parser.EvaluateAll(ctx) - if err != nil { - e.debug.Log("Failed to evaluate submodule '%s': %s.", definition.Name, err) + eval, err := definition.Parser.Load(ctx) + if errors.Is(err, ErrNoFiles) { + continue + } else if err != nil { + e.debug.Log("Failed to load submodule '%s': %s.", definition.Name, err) continue } - // export module outputs - e.ctx.Set(outputs, "module", definition.Name) - modules = append(modules, submodules...) - for key, val := range definition.Parser.GetFilesystemMap() { - fsMap[key] = val + + submodules = append(submodules, &submodule{ + definition: definition, + eval: eval, + fsMap: make(map[string]fs.FS), + }) + } + + return submodules +} + +func (e *evaluator) evaluateSubmodule(ctx context.Context, sm *submodule) bool { + inputVars := sm.definition.inputVars() + if len(sm.modules) > 0 { + if reflect.DeepEqual(inputVars, sm.lastState) { + e.debug.Log("Submodule %s inputs unchanged", sm.definition.Name) + return false } } - e.debug.Log("Finished processing %d submodule(s).", len(modules)) - e.debug.Log("Starting post-submodule evaluation...") + e.debug.Log("Evaluating submodule %s", sm.definition.Name) + sm.eval.inputVars = inputVars + sm.modules, sm.fsMap = sm.eval.EvaluateAll(ctx) + outputs := sm.eval.exportOutputs() + + // lastState needs to be captured after applying outputs – so that they + // don't get treated as changes – but before running post-submodule + // evaluation, so that changes from that can trigger re-evaluations of + // the submodule if/when they feed back into inputs. + e.ctx.Set(outputs, "module", sm.definition.Name) + sm.lastState = sm.definition.inputVars() + e.evaluateSteps() + return true +} + +func (e *evaluator) evaluateSteps() { + var lastContext hcl.EvalContext for i := 0; i < maxContextIterations; i++ { e.evaluateStep() @@ -176,7 +230,6 @@ func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[str if i > 0 && reflect.DeepEqual(lastContext.Variables, e.ctx.Inner().Variables) { break } - if len(e.ctx.Inner().Variables) != len(lastContext.Variables) { lastContext.Variables = make(map[string]cty.Value, len(e.ctx.Inner().Variables)) } @@ -184,10 +237,6 @@ func (e *evaluator) EvaluateAll(ctx context.Context) (terraform.Modules, map[str lastContext.Variables[k] = v } } - - e.debug.Log("Module evaluation complete.") - rootModule := terraform.NewModule(e.projectRootPath, e.modulePath, e.blocks, e.ignores) - return append(terraform.Modules{rootModule}, modules...), fsMap } func (e *evaluator) expandBlocks(blocks terraform.Blocks) terraform.Blocks { @@ -217,7 +266,9 @@ func (e *evaluator) expandDynamicBlock(b *terraform.Block) { b.InjectBlock(content, blockName) } } - sub.MarkExpanded() + if len(expanded) > 0 { + sub.MarkExpanded() + } } } @@ -246,6 +297,10 @@ func (e *evaluator) expandBlockForEaches(blocks terraform.Blocks, isDynamic bool clones := make(map[string]cty.Value) _ = forEachAttr.Each(func(key cty.Value, val cty.Value) { + if val.IsNull() { + return + } + // instances are identified by a map key (or set member) from the value provided to for_each idx, err := convert.Convert(key, cty.String) if err != nil { diff --git a/pkg/iac/scanners/terraform/parser/load_module.go b/pkg/iac/scanners/terraform/parser/load_module.go index 461d7a7a1a56..0bd6a6395936 100644 --- a/pkg/iac/scanners/terraform/parser/load_module.go +++ b/pkg/iac/scanners/terraform/parser/load_module.go @@ -22,6 +22,14 @@ type ModuleDefinition struct { External bool } +func (d *ModuleDefinition) inputVars() map[string]cty.Value { + inputs := d.Definition.Values().AsValueMap() + if inputs == nil { + return make(map[string]cty.Value) + } + return inputs +} + // loadModules reads all module blocks and loads them func (e *evaluator) loadModules(ctx context.Context) []*ModuleDefinition { var moduleDefinitions []*ModuleDefinition diff --git a/pkg/iac/scanners/terraform/parser/parser.go b/pkg/iac/scanners/terraform/parser/parser.go index b80c4a6babf2..b5b50dc913d7 100644 --- a/pkg/iac/scanners/terraform/parser/parser.go +++ b/pkg/iac/scanners/terraform/parser/parser.go @@ -2,6 +2,7 @@ package parser import ( "context" + "errors" "io" "io/fs" "os" @@ -224,18 +225,19 @@ func (p *Parser) ParseFS(ctx context.Context, dir string) error { return nil } -func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, error) { +var ErrNoFiles = errors.New("no files found") +func (p *Parser) Load(ctx context.Context) (*evaluator, error) { p.debug.Log("Evaluating module...") if len(p.files) == 0 { p.debug.Log("No files found, nothing to do.") - return nil, cty.NilVal, nil + return nil, ErrNoFiles } blocks, ignores, err := p.readBlocks(p.files) if err != nil { - return nil, cty.NilVal, err + return nil, err } p.debug.Log("Read %d block(s) and %d ignore(s) for module '%s' (%d file[s])...", len(blocks), len(ignores), p.moduleName, len(p.files)) @@ -246,7 +248,7 @@ func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, } else { inputVars, err = loadTFVars(p.configsFS, p.tfvarsPaths) if err != nil { - return nil, cty.NilVal, err + return nil, err } p.debug.Log("Added %d variables from tfvars.", len(inputVars)) } @@ -260,10 +262,10 @@ func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, workingDir, err := os.Getwd() if err != nil { - return nil, cty.NilVal, err + return nil, err } p.debug.Log("Working directory for module evaluation is '%s'", workingDir) - evaluator := newEvaluator( + return newEvaluator( p.moduleFS, p, p.projectRoot, @@ -278,11 +280,19 @@ func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, p.debug.Extend("evaluator"), p.allowDownloads, p.skipCachedModules, - ) - modules, fsMap := evaluator.EvaluateAll(ctx) + ), nil +} + +func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, error) { + + e, err := p.Load(ctx) + if errors.Is(err, ErrNoFiles) { + return nil, cty.NilVal, nil + } + modules, fsMap := e.EvaluateAll(ctx) p.debug.Log("Finished parsing module '%s'.", p.moduleName) p.fsMap = fsMap - return modules, evaluator.exportOutputs(), nil + return modules, e.exportOutputs(), nil } func (p *Parser) GetFilesystemMap() map[string]fs.FS { diff --git a/pkg/iac/scanners/terraform/parser/parser_test.go b/pkg/iac/scanners/terraform/parser/parser_test.go index 12594841251b..a20bb2a84b58 100644 --- a/pkg/iac/scanners/terraform/parser/parser_test.go +++ b/pkg/iac/scanners/terraform/parser/parser_test.go @@ -1522,3 +1522,112 @@ func compareSets(a []int, b []int) bool { return true } + +func TestModuleRefersToOutputOfAnotherModule(t *testing.T) { + files := map[string]string{ + "main.tf": ` +module "module2" { + source = "./modules/foo" +} + +module "module1" { + source = "./modules/bar" + test_var = module.module2.test_out +} +`, + "modules/foo/main.tf": ` +output "test_out" { + value = "test_value" +} +`, + "modules/bar/main.tf": ` +variable "test_var" {} + +resource "test_resource" "this" { + dynamic "dynamic_block" { + for_each = [var.test_var] + content { + some_attr = dynamic_block.value + } + } +} +`, + } + + modules := parse(t, files) + require.Len(t, modules, 3) + + resources := modules.GetResourcesByType("test_resource") + require.Len(t, resources, 1) + + attr, _ := resources[0].GetNestedAttribute("dynamic_block.some_attr") + require.NotNil(t, attr) + + assert.Equal(t, "test_value", attr.GetRawValue()) +} + +func TestCyclicModules(t *testing.T) { + files := map[string]string{ + "main.tf": ` +module "module2" { + source = "./modules/foo" + test_var = passthru.handover.from_1 +} + +// Demonstrates need for evaluateSteps between submodule evaluations. +resource "passthru" "handover" { + from_1 = module.module1.test_out + from_2 = module.module2.test_out +} + +module "module1" { + source = "./modules/bar" + test_var = passthru.handover.from_2 +} +`, + "modules/foo/main.tf": ` +variable "test_var" {} + +resource "test_resource" "this" { + dynamic "dynamic_block" { + for_each = [var.test_var] + content { + some_attr = dynamic_block.value + } + } +} + +output "test_out" { + value = "test_value" +} +`, + "modules/bar/main.tf": ` +variable "test_var" {} + +resource "test_resource" "this" { + dynamic "dynamic_block" { + for_each = [var.test_var] + content { + some_attr = dynamic_block.value + } + } +} + +output "test_out" { + value = test_resource.this.dynamic_block.some_attr +} +`, + } + + modules := parse(t, files) + require.Len(t, modules, 3) + + resources := modules.GetResourcesByType("test_resource") + require.Len(t, resources, 2) + + for _, res := range resources { + attr, _ := res.GetNestedAttribute("dynamic_block.some_attr") + require.NotNil(t, attr, res.FullName()) + assert.Equal(t, "test_value", attr.GetRawValue()) + } +} From 637da2b1783d622c3a33068a7b5323ad7d81fbbb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 10:33:36 +0400 Subject: [PATCH 48/57] chore(deps): bump aquaproj/aqua-installer from 2.2.0 to 3.0.0 (#6437) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/auto-update-labels.yaml | 2 +- .github/workflows/test.yaml | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/auto-update-labels.yaml b/.github/workflows/auto-update-labels.yaml index 6dab8481873b..2c52775d7803 100644 --- a/.github/workflows/auto-update-labels.yaml +++ b/.github/workflows/auto-update-labels.yaml @@ -20,7 +20,7 @@ jobs: go-version-file: go.mod - name: Install aqua tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index fb63908b3676..8b7477733b9b 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -57,7 +57,7 @@ jobs: if: ${{ failure() && steps.lint.conclusion == 'failure' }} - name: Install tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 aqua_opts: "" @@ -87,7 +87,7 @@ jobs: go-version-file: go.mod - name: Install tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 @@ -116,7 +116,7 @@ jobs: go-version-file: go.mod - name: Install tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 @@ -136,7 +136,7 @@ jobs: go-version-file: go.mod - name: Install tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 @@ -166,7 +166,7 @@ jobs: with: go-version-file: go.mod - name: Install tools - uses: aquaproj/aqua-installer@v2.2.0 + uses: aquaproj/aqua-installer@v3.0.0 with: aqua_version: v1.25.0 - name: Run vm integration tests From 116356500eb8e5cbfd98995dc8a97e30b85303e5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 10:34:27 +0400 Subject: [PATCH 49/57] chore(deps): bump github.com/hashicorp/golang-lru/v2 from 2.0.6 to 2.0.7 (#6430) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index f384e970590a..2d9555a44bec 100644 --- a/go.mod +++ b/go.mod @@ -56,7 +56,7 @@ require ( github.com/hashicorp/go-getter v1.7.3 github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-retryablehttp v0.7.5 - github.com/hashicorp/golang-lru/v2 v2.0.6 + github.com/hashicorp/golang-lru/v2 v2.0.7 github.com/in-toto/in-toto-golang v0.9.0 github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f github.com/knqyf263/go-deb-version v0.0.0-20230223133812-3ed183d23422 diff --git a/go.sum b/go.sum index 5af42e617a5d..523364d92b52 100644 --- a/go.sum +++ b/go.sum @@ -1112,8 +1112,8 @@ github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.6.0 h1:uL2shRDx7RTrOrTCUZEGP/wJUFiUI8QT6E7z5o8jga4= github.com/hashicorp/golang-lru v0.6.0/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= -github.com/hashicorp/golang-lru/v2 v2.0.6 h1:3xi/Cafd1NaoEnS/yDssIiuVeDVywU0QdFGl3aQaQHM= -github.com/hashicorp/golang-lru/v2 v2.0.6/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hc-install v0.6.3 h1:yE/r1yJvWbtrJ0STwScgEnCanb0U9v7zp0Gbkmcoxqs= github.com/hashicorp/hc-install v0.6.3/go.mod h1:KamGdbodYzlufbWh4r9NRo8y6GLHWZP2GBtdnms1Ln0= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= From 089368d968fab3445a06a2fcd48c8bb29db0ef7a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 06:37:32 +0000 Subject: [PATCH 50/57] chore(deps): bump github.com/go-openapi/strfmt from 0.22.0 to 0.23.0 (#6452) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 10 +++++----- go.sum | 25 ++++++++++--------------- 2 files changed, 15 insertions(+), 20 deletions(-) diff --git a/go.mod b/go.mod index 2d9555a44bec..5118b48a45b7 100644 --- a/go.mod +++ b/go.mod @@ -45,7 +45,7 @@ require ( github.com/fatih/color v1.16.0 github.com/go-git/go-git/v5 v5.11.0 github.com/go-openapi/runtime v0.27.1 - github.com/go-openapi/strfmt v0.22.0 + github.com/go-openapi/strfmt v0.23.0 github.com/go-redis/redis/v8 v8.11.5 github.com/golang-jwt/jwt v3.2.2+incompatible github.com/golang/protobuf v1.5.3 @@ -96,7 +96,7 @@ require ( github.com/spf13/cobra v1.8.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.18.2 - github.com/stretchr/testify v1.8.4 + github.com/stretchr/testify v1.9.0 github.com/testcontainers/testcontainers-go v0.28.0 github.com/testcontainers/testcontainers-go/modules/localstack v0.28.0 github.com/tetratelabs/wazero v1.7.0 @@ -265,7 +265,7 @@ require ( github.com/go-logr/stdr v1.2.2 // indirect github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-openapi/analysis v0.21.5 // indirect - github.com/go-openapi/errors v0.21.0 // indirect + github.com/go-openapi/errors v0.22.0 // indirect github.com/go-openapi/jsonpointer v0.20.1 // indirect github.com/go-openapi/jsonreference v0.20.3 // indirect github.com/go-openapi/loads v0.21.3 // indirect @@ -370,7 +370,7 @@ require ( github.com/skeema/knownhosts v1.2.1 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/spf13/afero v1.11.0 // indirect - github.com/stretchr/objx v0.5.0 // indirect + github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/tchap/go-patricia/v2 v2.3.1 // indirect github.com/tklauser/go-sysconf v0.3.12 // indirect @@ -383,7 +383,7 @@ require ( github.com/yashtewari/glob-intersection v0.2.0 // indirect github.com/yuin/gopher-lua v1.1.0 // indirect github.com/yusufpapurcu/wmi v1.2.3 // indirect - go.mongodb.org/mongo-driver v1.13.1 // indirect + go.mongodb.org/mongo-driver v1.14.0 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 // indirect diff --git a/go.sum b/go.sum index 523364d92b52..47ed15b30083 100644 --- a/go.sum +++ b/go.sum @@ -831,8 +831,8 @@ github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-openapi/analysis v0.21.5 h1:3tHfEBh6Ia8eKc4M7khOGjPOAlWKJ10d877Cr9teujI= github.com/go-openapi/analysis v0.21.5/go.mod h1:25YcZosX9Lwz2wBsrFrrsL8bmjjXdlyP6zsr2AMy29M= -github.com/go-openapi/errors v0.21.0 h1:FhChC/duCnfoLj1gZ0BgaBmzhJC2SL/sJr8a2vAobSY= -github.com/go-openapi/errors v0.21.0/go.mod h1:jxNTMUxRCKj65yb/okJGEtahVd7uvWnuWfj53bse4ho= +github.com/go-openapi/errors v0.22.0 h1:c4xY/OLxUBSTiepAg3j/MHuAv5mJhnf53LLMWFB+u/w= +github.com/go-openapi/errors v0.22.0/go.mod h1:J3DmZScxCDufmIMsdOuDHxJbdOGC0xtUynjIx092vXE= github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= github.com/go-openapi/jsonpointer v0.20.1 h1:MkK4VEIEZMj4wT9PmjaUmGflVBr9nvud4Q4UVFbDoBE= @@ -848,8 +848,8 @@ github.com/go-openapi/runtime v0.27.1/go.mod h1:fijeJEiEclyS8BRurYE1DE5TLb9/KZl6 github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo= github.com/go-openapi/spec v0.20.12 h1:cgSLbrsmziAP2iais+Vz7kSazwZ8rsUZd6TUzdDgkVI= github.com/go-openapi/spec v0.20.12/go.mod h1:iSCgnBcwbMW9SfzJb8iYynXvcY6C/QFrI7otzF7xGM4= -github.com/go-openapi/strfmt v0.22.0 h1:Ew9PnEYc246TwrEspvBdDHS4BVKXy/AOVsfqGDgAcaI= -github.com/go-openapi/strfmt v0.22.0/go.mod h1:HzJ9kokGIju3/K6ap8jL+OlGAbjpSv27135Yr9OivU4= +github.com/go-openapi/strfmt v0.23.0 h1:nlUS6BCqcnAk0pyhi9Y+kdDVZdZMHfEKQiS4HaMgO/c= +github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4= github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-openapi/swag v0.22.5 h1:fVS63IE3M0lsuWRzuom3RLwUMVI2peDH01s6M70ugys= @@ -950,7 +950,6 @@ github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= @@ -1183,7 +1182,6 @@ github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYs github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/compress v1.13.0/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= -github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.15.11/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM= github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4= github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= @@ -1356,7 +1354,6 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 h1:n6/2gBQ3RWajuToeY6ZtZTIKv2v7ThUy5KKusIT0yc0= github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00/go.mod h1:Pm3mSP3c5uWn86xMLZ5Sa7JB9GsEZySvHYXCTK4E9q4= -github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= @@ -1608,8 +1605,9 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= @@ -1621,8 +1619,9 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= @@ -1672,9 +1671,6 @@ github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= -github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= -github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= -github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= @@ -1689,7 +1685,6 @@ github.com/xlab/treeprint v1.2.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/yashtewari/glob-intersection v0.2.0 h1:8iuHdN88yYuCzCdjt0gDe+6bAhUwBeEWqThExu54RFg= github.com/yashtewari/glob-intersection v0.2.0/go.mod h1:LK7pIC3piUjovexikBbJ26Yml7g8xa5bsjfx2v1fwok= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -1720,8 +1715,8 @@ go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3C go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= -go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= -go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= +go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= +go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= From ad544e97ccc1cc5b2288b9d69e700b7578dd3ad0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 06:49:05 +0000 Subject: [PATCH 51/57] chore(deps): bump github.com/Azure/azure-sdk-for-go/sdk/azidentity from 1.4.0 to 1.5.1 (#6426) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 8 ++++---- go.sum | 17 ++++++++--------- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/go.mod b/go.mod index 5118b48a45b7..0e585260a019 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,7 @@ go 1.21 require ( github.com/Azure/azure-sdk-for-go v68.0.0+incompatible github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 - github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0 + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 github.com/BurntSushi/toml v1.3.2 github.com/CycloneDX/cyclonedx-go v0.8.0 github.com/GoogleCloudPlatform/docker-credential-gcr v2.0.5+incompatible @@ -156,7 +156,7 @@ require ( github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect github.com/Azure/go-autorest/logger v0.2.1 // indirect github.com/Azure/go-autorest/tracing v0.6.0 // indirect - github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1 // indirect + github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 // indirect github.com/Intevation/gval v1.3.0 // indirect github.com/Intevation/jsonpath v0.2.1 // indirect github.com/MakeNowJust/heredoc v1.0.0 // indirect @@ -279,7 +279,7 @@ require ( github.com/gofrs/uuid v4.3.1+incompatible // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang-jwt/jwt/v4 v4.5.0 // indirect - github.com/golang-jwt/jwt/v5 v5.0.0 // indirect + github.com/golang-jwt/jwt/v5 v5.2.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/google/btree v1.1.2 // indirect github.com/google/gnostic-models v0.6.8 // indirect @@ -346,7 +346,7 @@ require ( github.com/pelletier/go-toml/v2 v2.1.0 // indirect github.com/peterbourgon/diskv v2.0.1+incompatible // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect - github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect + github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect diff --git a/go.sum b/go.sum index 47ed15b30083..1f149fc1b692 100644 --- a/go.sum +++ b/go.sum @@ -198,8 +198,8 @@ github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0 github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 h1:n1DH8TPV4qqPTje2RcUBYwtrTWlabVp4n46+74X2pn4= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0/go.mod h1:HDcZnuGbiyppErN6lB+idp4CKhjbc8gwjto6OPpyggM= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0 h1:BMAjVKJM0U/CYF27gA0ZMmXGkOcvfFtD0oHVZ1TIPRI= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0/go.mod h1:1fXstnBMas5kzG+S3q8UoJcmyU6nUeunJcMDHcRYHhs= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 h1:sO0/P7g68FrryJzljemN+6GTssUXdANk6aJ7T1ZxnsQ= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo= github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ= github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= @@ -227,8 +227,8 @@ github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+Z github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= -github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1 h1:WpB/QDNLpMw72xHJc34BNNykqSOeEJDAWkhf0u12/Jk= -github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= @@ -909,8 +909,8 @@ github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzq github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-jwt/jwt/v5 v5.0.0 h1:1n1XNM9hk7O9mnQoNBGolZvzebBQ7p93ULHRc28XJUE= -github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw= +github.com/golang-jwt/jwt/v5 v5.2.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.2.0 h1:uCdmnmatrKCgMBlM4rMuJZWOkPDqdbZPnrMXDY4gI68= github.com/golang/glog v1.2.0/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= @@ -1441,8 +1441,8 @@ github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5 h1:Ii+DKncOVM8Cu1H github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= -github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= -github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -2036,7 +2036,6 @@ golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= From 53517d622b94f5ef2be467fdfa97b73438027362 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan-Otto=20Kr=C3=B6pke?= Date: Sat, 6 Apr 2024 07:07:56 +0200 Subject: [PATCH 52/57] feat(misconf): add helm-api-version and helm-kube-version flag (#6332) Co-authored-by: Simar --- .../references/configuration/cli/trivy_aws.md | 2 + .../configuration/cli/trivy_config.md | 2 + .../configuration/cli/trivy_filesystem.md | 2 + .../configuration/cli/trivy_image.md | 2 + .../configuration/cli/trivy_kubernetes.md | 2 + .../configuration/cli/trivy_repository.md | 2 + .../configuration/cli/trivy_rootfs.md | 2 + .../references/configuration/cli/trivy_vm.md | 2 + .../references/configuration/config-file.md | 24 ++++--- pkg/commands/artifact/run.go | 2 + pkg/flag/misconf_flags.go | 20 ++++++ pkg/iac/scanners/helm/options.go | 8 +++ pkg/iac/scanners/helm/parser/option.go | 9 +++ pkg/iac/scanners/helm/parser/parser.go | 19 ++++- pkg/iac/scanners/helm/parser/parser_test.go | 3 +- pkg/iac/scanners/helm/scanner.go | 5 +- pkg/iac/scanners/helm/test/option_test.go | 72 +++++++++++++++++-- pkg/iac/scanners/helm/test/parser_test.go | 12 ++-- .../with-kube-version/templates/pdb.yaml | 17 +++++ .../testdata/with-kube-version/.helmignore | 23 ++++++ .../testdata/with-kube-version/Chart.yaml | 26 +++++++ .../with-kube-version/templates/_helpers.tpl | 62 ++++++++++++++++ .../with-kube-version/templates/pdb.yaml | 11 +++ .../testdata/with-kube-version/values.yaml | 0 pkg/misconf/scanner.go | 10 +++ 25 files changed, 313 insertions(+), 26 deletions(-) create mode 100644 pkg/iac/scanners/helm/test/testdata/expected/options/with-kube-version/templates/pdb.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-kube-version/.helmignore create mode 100644 pkg/iac/scanners/helm/test/testdata/with-kube-version/Chart.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-kube-version/templates/_helpers.tpl create mode 100644 pkg/iac/scanners/helm/test/testdata/with-kube-version/templates/pdb.yaml create mode 100644 pkg/iac/scanners/helm/test/testdata/with-kube-version/values.yaml diff --git a/docs/docs/references/configuration/cli/trivy_aws.md b/docs/docs/references/configuration/cli/trivy_aws.md index af1ebc44a834..b87bfce2bc30 100644 --- a/docs/docs/references/configuration/cli/trivy_aws.md +++ b/docs/docs/references/configuration/cli/trivy_aws.md @@ -76,6 +76,8 @@ trivy aws [flags] --endpoint string AWS Endpoint override --exit-code int specify exit code when any security issues are found -f, --format string format (table,json,template,sarif,cyclonedx,spdx,spdx-json,github,cosign-vuln) (default "table") + --helm-api-versions strings Available API versions used for Capabilities.APIVersions. This flag is the same as the api-versions flag of the helm template command. (can specify multiple or separate values with commas: policy/v1/PodDisruptionBudget,apps/v1/Deployment) + --helm-kube-version string Kubernetes version used for Capabilities.KubeVersion. This flag is the same as the kube-version flag of the helm template command. --helm-set strings specify Helm values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) --helm-set-file strings specify Helm values from respective files specified via the command line (can specify multiple or separate values with commas: key1=path1,key2=path2) --helm-set-string strings specify Helm string values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) diff --git a/docs/docs/references/configuration/cli/trivy_config.md b/docs/docs/references/configuration/cli/trivy_config.md index 865ecb6ba605..070257b4a896 100644 --- a/docs/docs/references/configuration/cli/trivy_config.md +++ b/docs/docs/references/configuration/cli/trivy_config.md @@ -20,6 +20,8 @@ trivy config [flags] DIR --exit-code int specify exit code when any security issues are found --file-patterns strings specify config file patterns -f, --format string format (table,json,template,sarif,cyclonedx,spdx,spdx-json,github,cosign-vuln) (default "table") + --helm-api-versions strings Available API versions used for Capabilities.APIVersions. This flag is the same as the api-versions flag of the helm template command. (can specify multiple or separate values with commas: policy/v1/PodDisruptionBudget,apps/v1/Deployment) + --helm-kube-version string Kubernetes version used for Capabilities.KubeVersion. This flag is the same as the kube-version flag of the helm template command. --helm-set strings specify Helm values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) --helm-set-file strings specify Helm values from respective files specified via the command line (can specify multiple or separate values with commas: key1=path1,key2=path2) --helm-set-string strings specify Helm string values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) diff --git a/docs/docs/references/configuration/cli/trivy_filesystem.md b/docs/docs/references/configuration/cli/trivy_filesystem.md index e26b26df7bfd..7aeb72ca5970 100644 --- a/docs/docs/references/configuration/cli/trivy_filesystem.md +++ b/docs/docs/references/configuration/cli/trivy_filesystem.md @@ -35,6 +35,8 @@ trivy filesystem [flags] PATH --exit-code int specify exit code when any security issues are found --file-patterns strings specify config file patterns -f, --format string format (table,json,template,sarif,cyclonedx,spdx,spdx-json,github,cosign-vuln) (default "table") + --helm-api-versions strings Available API versions used for Capabilities.APIVersions. This flag is the same as the api-versions flag of the helm template command. (can specify multiple or separate values with commas: policy/v1/PodDisruptionBudget,apps/v1/Deployment) + --helm-kube-version string Kubernetes version used for Capabilities.KubeVersion. This flag is the same as the kube-version flag of the helm template command. --helm-set strings specify Helm values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) --helm-set-file strings specify Helm values from respective files specified via the command line (can specify multiple or separate values with commas: key1=path1,key2=path2) --helm-set-string strings specify Helm string values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) diff --git a/docs/docs/references/configuration/cli/trivy_image.md b/docs/docs/references/configuration/cli/trivy_image.md index 20be9b459413..e5d91f31eb7c 100644 --- a/docs/docs/references/configuration/cli/trivy_image.md +++ b/docs/docs/references/configuration/cli/trivy_image.md @@ -51,6 +51,8 @@ trivy image [flags] IMAGE_NAME --exit-on-eol int exit with the specified code when the OS reaches end of service/life --file-patterns strings specify config file patterns -f, --format string format (table,json,template,sarif,cyclonedx,spdx,spdx-json,github,cosign-vuln) (default "table") + --helm-api-versions strings Available API versions used for Capabilities.APIVersions. This flag is the same as the api-versions flag of the helm template command. (can specify multiple or separate values with commas: policy/v1/PodDisruptionBudget,apps/v1/Deployment) + --helm-kube-version string Kubernetes version used for Capabilities.KubeVersion. This flag is the same as the kube-version flag of the helm template command. --helm-set strings specify Helm values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) --helm-set-file strings specify Helm values from respective files specified via the command line (can specify multiple or separate values with commas: key1=path1,key2=path2) --helm-set-string strings specify Helm string values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) diff --git a/docs/docs/references/configuration/cli/trivy_kubernetes.md b/docs/docs/references/configuration/cli/trivy_kubernetes.md index ed22d8299f5b..a1befafeb504 100644 --- a/docs/docs/references/configuration/cli/trivy_kubernetes.md +++ b/docs/docs/references/configuration/cli/trivy_kubernetes.md @@ -46,6 +46,8 @@ trivy kubernetes [flags] { cluster | all | specific resources like kubectl. eg: --exit-code int specify exit code when any security issues are found --file-patterns strings specify config file patterns -f, --format string format (table,json,cyclonedx) (default "table") + --helm-api-versions strings Available API versions used for Capabilities.APIVersions. This flag is the same as the api-versions flag of the helm template command. (can specify multiple or separate values with commas: policy/v1/PodDisruptionBudget,apps/v1/Deployment) + --helm-kube-version string Kubernetes version used for Capabilities.KubeVersion. This flag is the same as the kube-version flag of the helm template command. --helm-set strings specify Helm values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) --helm-set-file strings specify Helm values from respective files specified via the command line (can specify multiple or separate values with commas: key1=path1,key2=path2) --helm-set-string strings specify Helm string values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) diff --git a/docs/docs/references/configuration/cli/trivy_repository.md b/docs/docs/references/configuration/cli/trivy_repository.md index 0a2a614e4a5b..fa4d13bbdeee 100644 --- a/docs/docs/references/configuration/cli/trivy_repository.md +++ b/docs/docs/references/configuration/cli/trivy_repository.md @@ -35,6 +35,8 @@ trivy repository [flags] (REPO_PATH | REPO_URL) --exit-code int specify exit code when any security issues are found --file-patterns strings specify config file patterns -f, --format string format (table,json,template,sarif,cyclonedx,spdx,spdx-json,github,cosign-vuln) (default "table") + --helm-api-versions strings Available API versions used for Capabilities.APIVersions. This flag is the same as the api-versions flag of the helm template command. (can specify multiple or separate values with commas: policy/v1/PodDisruptionBudget,apps/v1/Deployment) + --helm-kube-version string Kubernetes version used for Capabilities.KubeVersion. This flag is the same as the kube-version flag of the helm template command. --helm-set strings specify Helm values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) --helm-set-file strings specify Helm values from respective files specified via the command line (can specify multiple or separate values with commas: key1=path1,key2=path2) --helm-set-string strings specify Helm string values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) diff --git a/docs/docs/references/configuration/cli/trivy_rootfs.md b/docs/docs/references/configuration/cli/trivy_rootfs.md index 571fb009f4f0..088b5deb6d88 100644 --- a/docs/docs/references/configuration/cli/trivy_rootfs.md +++ b/docs/docs/references/configuration/cli/trivy_rootfs.md @@ -38,6 +38,8 @@ trivy rootfs [flags] ROOTDIR --exit-on-eol int exit with the specified code when the OS reaches end of service/life --file-patterns strings specify config file patterns -f, --format string format (table,json,template,sarif,cyclonedx,spdx,spdx-json,github,cosign-vuln) (default "table") + --helm-api-versions strings Available API versions used for Capabilities.APIVersions. This flag is the same as the api-versions flag of the helm template command. (can specify multiple or separate values with commas: policy/v1/PodDisruptionBudget,apps/v1/Deployment) + --helm-kube-version string Kubernetes version used for Capabilities.KubeVersion. This flag is the same as the kube-version flag of the helm template command. --helm-set strings specify Helm values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) --helm-set-file strings specify Helm values from respective files specified via the command line (can specify multiple or separate values with commas: key1=path1,key2=path2) --helm-set-string strings specify Helm string values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) diff --git a/docs/docs/references/configuration/cli/trivy_vm.md b/docs/docs/references/configuration/cli/trivy_vm.md index 6acf6606284b..7b186a505794 100644 --- a/docs/docs/references/configuration/cli/trivy_vm.md +++ b/docs/docs/references/configuration/cli/trivy_vm.md @@ -35,6 +35,8 @@ trivy vm [flags] VM_IMAGE --exit-on-eol int exit with the specified code when the OS reaches end of service/life --file-patterns strings specify config file patterns -f, --format string format (table,json,template,sarif,cyclonedx,spdx,spdx-json,github,cosign-vuln) (default "table") + --helm-api-versions strings Available API versions used for Capabilities.APIVersions. This flag is the same as the api-versions flag of the helm template command. (can specify multiple or separate values with commas: policy/v1/PodDisruptionBudget,apps/v1/Deployment) + --helm-kube-version string Kubernetes version used for Capabilities.KubeVersion. This flag is the same as the kube-version flag of the helm template command. --helm-set strings specify Helm values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) --helm-set-file strings specify Helm values from respective files specified via the command line (can specify multiple or separate values with commas: key1=path1,key2=path2) --helm-set-string strings specify Helm string values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2) diff --git a/docs/docs/references/configuration/config-file.md b/docs/docs/references/configuration/config-file.md index 1c94f2eb8db6..f649d2a213b6 100644 --- a/docs/docs/references/configuration/config-file.md +++ b/docs/docs/references/configuration/config-file.md @@ -279,35 +279,39 @@ misconfiguration: - terraform # helm value override configurations - # set individual values helm: + # set individual values set: - securityContext.runAsUser=10001 - # set values with file - helm: + # set values with file values: - overrides.yaml - # set specific values from specific files - helm: + # set specific values from specific files set-file: - image=dev-overrides.yaml - # set as string and preserve type - helm: + # set as string and preserve type set-string: - name=true + # Available API versions used for Capabilities.APIVersions. This flag is the same as the api-versions flag of the helm template command. + api-versions: + - policy/v1/PodDisruptionBudget + - apps/v1/Deployment + + # Kubernetes version used for Capabilities.KubeVersion. This flag is the same as the kube-version flag of the helm template command. + kube-version: "v1.21.0" + # terraform tfvars overrrides terraform: vars: - dev-terraform.tfvars - common-terraform.tfvars - # Same as '--tf-exclude-downloaded-modules' - # Default is false - terraform: + # Same as '--tf-exclude-downloaded-modules' + # Default is false exclude-downloaded-modules: false ``` diff --git a/pkg/commands/artifact/run.go b/pkg/commands/artifact/run.go index ca1b80749c46..c54f0fe2fe75 100644 --- a/pkg/commands/artifact/run.go +++ b/pkg/commands/artifact/run.go @@ -603,6 +603,8 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi HelmValueFiles: opts.HelmValueFiles, HelmFileValues: opts.HelmFileValues, HelmStringValues: opts.HelmStringValues, + HelmAPIVersions: opts.HelmAPIVersions, + HelmKubeVersion: opts.HelmKubeVersion, TerraformTFVars: opts.TerraformTFVars, CloudFormationParamVars: opts.CloudFormationParamVars, K8sVersion: opts.K8sVersion, diff --git a/pkg/flag/misconf_flags.go b/pkg/flag/misconf_flags.go index 492960b60ff9..57a91820c60d 100644 --- a/pkg/flag/misconf_flags.go +++ b/pkg/flag/misconf_flags.go @@ -45,6 +45,16 @@ var ( ConfigName: "misconfiguration.helm.set-string", Usage: "specify Helm string values on the command line (can specify multiple or separate values with commas: key1=val1,key2=val2)", } + HelmAPIVersionsFlag = Flag[[]string]{ + Name: "helm-api-versions", + ConfigName: "misconfiguration.helm.api-versions", + Usage: "Available API versions used for Capabilities.APIVersions. This flag is the same as the api-versions flag of the helm template command. (can specify multiple or separate values with commas: policy/v1/PodDisruptionBudget,apps/v1/Deployment)", + } + HelmKubeVersionFlag = Flag[string]{ + Name: "helm-kube-version", + ConfigName: "misconfiguration.helm.kube-version", + Usage: "Kubernetes version used for Capabilities.KubeVersion. This flag is the same as the kube-version flag of the helm template command.", + } TfVarsFlag = Flag[[]string]{ Name: "tf-vars", ConfigName: "misconfiguration.terraform.vars", @@ -86,6 +96,8 @@ type MisconfFlagGroup struct { HelmValueFiles *Flag[[]string] HelmFileValues *Flag[[]string] HelmStringValues *Flag[[]string] + HelmAPIVersions *Flag[[]string] + HelmKubeVersion *Flag[string] TerraformTFVars *Flag[[]string] CloudformationParamVars *Flag[[]string] TerraformExcludeDownloaded *Flag[bool] @@ -102,6 +114,8 @@ type MisconfOptions struct { HelmValueFiles []string HelmFileValues []string HelmStringValues []string + HelmAPIVersions []string + HelmKubeVersion string TerraformTFVars []string CloudFormationParamVars []string TfExcludeDownloaded bool @@ -118,6 +132,8 @@ func NewMisconfFlagGroup() *MisconfFlagGroup { HelmFileValues: HelmSetFileFlag.Clone(), HelmStringValues: HelmSetStringFlag.Clone(), HelmValueFiles: HelmValuesFileFlag.Clone(), + HelmAPIVersions: HelmAPIVersionsFlag.Clone(), + HelmKubeVersion: HelmKubeVersionFlag.Clone(), TerraformTFVars: TfVarsFlag.Clone(), CloudformationParamVars: CfParamsFlag.Clone(), TerraformExcludeDownloaded: TerraformExcludeDownloaded.Clone(), @@ -138,6 +154,8 @@ func (f *MisconfFlagGroup) Flags() []Flagger { f.HelmValueFiles, f.HelmFileValues, f.HelmStringValues, + f.HelmAPIVersions, + f.HelmKubeVersion, f.TerraformTFVars, f.TerraformExcludeDownloaded, f.CloudformationParamVars, @@ -158,6 +176,8 @@ func (f *MisconfFlagGroup) ToOptions() (MisconfOptions, error) { HelmValueFiles: f.HelmValueFiles.Value(), HelmFileValues: f.HelmFileValues.Value(), HelmStringValues: f.HelmStringValues.Value(), + HelmAPIVersions: f.HelmAPIVersions.Value(), + HelmKubeVersion: f.HelmKubeVersion.Value(), TerraformTFVars: f.TerraformTFVars.Value(), CloudFormationParamVars: f.CloudformationParamVars.Value(), TfExcludeDownloaded: f.TerraformExcludeDownloaded.Value(), diff --git a/pkg/iac/scanners/helm/options.go b/pkg/iac/scanners/helm/options.go index 009809e734e6..6ac412bf1e34 100644 --- a/pkg/iac/scanners/helm/options.go +++ b/pkg/iac/scanners/helm/options.go @@ -49,3 +49,11 @@ func ScannerWithAPIVersions(values ...string) options.ScannerOption { } } } + +func ScannerWithKubeVersion(values string) options.ScannerOption { + return func(s options.ConfigurableScanner) { + if helmScanner, ok := s.(ConfigurableHelmScanner); ok { + helmScanner.AddParserOptions(parser.OptionWithKubeVersion(values)) + } + } +} diff --git a/pkg/iac/scanners/helm/parser/option.go b/pkg/iac/scanners/helm/parser/option.go index 379cc9460979..6de98d765182 100644 --- a/pkg/iac/scanners/helm/parser/option.go +++ b/pkg/iac/scanners/helm/parser/option.go @@ -9,6 +9,7 @@ type ConfigurableHelmParser interface { SetFileValues(...string) SetStringValues(...string) SetAPIVersions(...string) + SetKubeVersion(string) } func OptionWithValuesFile(paths ...string) options.ParserOption { @@ -50,3 +51,11 @@ func OptionWithAPIVersions(values ...string) options.ParserOption { } } } + +func OptionWithKubeVersion(value string) options.ParserOption { + return func(p options.ConfigurableParser) { + if helmParser, ok := p.(ConfigurableHelmParser); ok { + helmParser.SetKubeVersion(value) + } + } +} diff --git a/pkg/iac/scanners/helm/parser/parser.go b/pkg/iac/scanners/helm/parser/parser.go index 3123b04e4b9c..c8bc8a73bedd 100644 --- a/pkg/iac/scanners/helm/parser/parser.go +++ b/pkg/iac/scanners/helm/parser/parser.go @@ -17,6 +17,7 @@ import ( "helm.sh/helm/v3/pkg/action" "helm.sh/helm/v3/pkg/chart" "helm.sh/helm/v3/pkg/chart/loader" + "helm.sh/helm/v3/pkg/chartutil" "helm.sh/helm/v3/pkg/release" "helm.sh/helm/v3/pkg/releaseutil" @@ -40,6 +41,7 @@ type Parser struct { fileValues []string stringValues []string apiVersions []string + kubeVersion string } type ChartFile struct { @@ -75,7 +77,11 @@ func (p *Parser) SetAPIVersions(values ...string) { p.apiVersions = values } -func New(path string, opts ...options.ParserOption) *Parser { +func (p *Parser) SetKubeVersion(value string) { + p.kubeVersion = value +} + +func New(path string, opts ...options.ParserOption) (*Parser, error) { client := action.NewInstall(&action.Configuration{}) client.DryRun = true // don't do anything @@ -95,7 +101,16 @@ func New(path string, opts ...options.ParserOption) *Parser { p.helmClient.APIVersions = p.apiVersions } - return p + if p.kubeVersion != "" { + kubeVersion, err := chartutil.ParseKubeVersion(p.kubeVersion) + if err != nil { + return nil, err + } + + p.helmClient.KubeVersion = kubeVersion + } + + return p, nil } func (p *Parser) ParseFS(ctx context.Context, target fs.FS, path string) error { diff --git a/pkg/iac/scanners/helm/parser/parser_test.go b/pkg/iac/scanners/helm/parser/parser_test.go index c146b8f9e18f..030b0efbb86f 100644 --- a/pkg/iac/scanners/helm/parser/parser_test.go +++ b/pkg/iac/scanners/helm/parser/parser_test.go @@ -12,7 +12,8 @@ import ( func TestParseFS(t *testing.T) { t.Run("source chart is located next to an same archived chart", func(t *testing.T) { - p := New(".") + p, err := New(".") + require.NoError(t, err) require.NoError(t, p.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", "chart-and-archived-chart")), ".")) expectedFiles := []string{ diff --git a/pkg/iac/scanners/helm/scanner.go b/pkg/iac/scanners/helm/scanner.go index 3f1a0d2fbb65..e2b666082c97 100644 --- a/pkg/iac/scanners/helm/scanner.go +++ b/pkg/iac/scanners/helm/scanner.go @@ -170,7 +170,10 @@ func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, path string) (scan.R } func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS) (results []scan.Result, err error) { - helmParser := parser.New(path, s.parserOptions...) + helmParser, err := parser.New(path, s.parserOptions...) + if err != nil { + return nil, err + } if err := helmParser.ParseFS(ctx, target, path); err != nil { return nil, err diff --git a/pkg/iac/scanners/helm/test/option_test.go b/pkg/iac/scanners/helm/test/option_test.go index d16d29039a15..8efb03f16116 100644 --- a/pkg/iac/scanners/helm/test/option_test.go +++ b/pkg/iac/scanners/helm/test/option_test.go @@ -40,9 +40,9 @@ func Test_helm_parser_with_options_with_values_file(t *testing.T) { opts = append(opts, parser.OptionWithValuesFile(test.valuesFile)) } - helmParser := parser.New(chartName, opts...) - err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + helmParser, err := parser.New(chartName, opts...) require.NoError(t, err) + require.NoError(t, helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".")) manifests, err := helmParser.RenderedChartFiles() require.NoError(t, err) @@ -94,8 +94,9 @@ func Test_helm_parser_with_options_with_set_value(t *testing.T) { opts = append(opts, parser.OptionWithValues(test.values)) } - helmParser := parser.New(chartName, opts...) - err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + helmParser, err := parser.New(chartName, opts...) + require.NoError(t, err) + err = helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") require.NoError(t, err) manifests, err := helmParser.RenderedChartFiles() require.NoError(t, err) @@ -143,9 +144,68 @@ func Test_helm_parser_with_options_with_api_versions(t *testing.T) { opts = append(opts, parser.OptionWithAPIVersions(test.apiVersions...)) } - helmParser := parser.New(chartName, opts...) - err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + helmParser, err := parser.New(chartName, opts...) + require.NoError(t, err) + err = helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + require.NoError(t, err) + manifests, err := helmParser.RenderedChartFiles() + require.NoError(t, err) + + assert.Len(t, manifests, 1) + + for _, manifest := range manifests { + expectedPath := filepath.Join("testdata", "expected", "options", chartName, manifest.TemplateFilePath) + + expectedContent, err := os.ReadFile(expectedPath) + require.NoError(t, err) + + cleanExpected := strings.TrimSpace(strings.ReplaceAll(string(expectedContent), "\r\n", "\n")) + cleanActual := strings.TrimSpace(strings.ReplaceAll(manifest.ManifestContent, "\r\n", "\n")) + + assert.Equal(t, cleanExpected, cleanActual) + } + }) + } +} + +func Test_helm_parser_with_options_with_kube_versions(t *testing.T) { + + tests := []struct { + testName string + chartName string + kubeVersion string + expectedError string + }{ + { + testName: "Parsing directory 'with-kube-version'", + chartName: "with-kube-version", + kubeVersion: "1.60", + }, + { + testName: "Parsing directory 'with-kube-version' with invalid kube version", + chartName: "with-kube-version", + kubeVersion: "a.b.c", + expectedError: "Invalid Semantic Version", + }, + } + + for _, test := range tests { + t.Run(test.testName, func(t *testing.T) { + chartName := test.chartName + + t.Logf("Running test: %s", test.testName) + + var opts []options.ParserOption + + opts = append(opts, parser.OptionWithKubeVersion(test.kubeVersion)) + + helmParser, err := parser.New(chartName, opts...) + if test.expectedError != "" { + require.EqualError(t, err, test.expectedError) + return + } require.NoError(t, err) + require.NoError(t, helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".")) manifests, err := helmParser.RenderedChartFiles() require.NoError(t, err) diff --git a/pkg/iac/scanners/helm/test/parser_test.go b/pkg/iac/scanners/helm/test/parser_test.go index 0d12f33fe827..85a69469fb5d 100644 --- a/pkg/iac/scanners/helm/test/parser_test.go +++ b/pkg/iac/scanners/helm/test/parser_test.go @@ -32,9 +32,9 @@ func Test_helm_parser(t *testing.T) { for _, test := range tests { t.Run(test.testName, func(t *testing.T) { chartName := test.chartName - helmParser := parser.New(chartName) - err := helmParser.ParseFS(context.TODO(), os.DirFS("testdata"), chartName) + helmParser, err := parser.New(chartName) require.NoError(t, err) + require.NoError(t, helmParser.ParseFS(context.TODO(), os.DirFS("testdata"), chartName)) manifests, err := helmParser.RenderedChartFiles() require.NoError(t, err) @@ -70,9 +70,9 @@ func Test_helm_parser_where_name_non_string(t *testing.T) { t.Logf("Running test: %s", test.testName) - helmParser := parser.New(chartName) - err := helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".") + helmParser, err := parser.New(chartName) require.NoError(t, err) + require.NoError(t, helmParser.ParseFS(context.TODO(), os.DirFS(filepath.Join("testdata", chartName)), ".")) } } @@ -160,9 +160,9 @@ func Test_helm_tarball_parser(t *testing.T) { testFs := os.DirFS(testTemp) - helmParser := parser.New(test.archiveFile) - err := helmParser.ParseFS(context.TODO(), testFs, ".") + helmParser, err := parser.New(test.archiveFile) require.NoError(t, err) + require.NoError(t, helmParser.ParseFS(context.TODO(), testFs, ".")) manifests, err := helmParser.RenderedChartFiles() require.NoError(t, err) diff --git a/pkg/iac/scanners/helm/test/testdata/expected/options/with-kube-version/templates/pdb.yaml b/pkg/iac/scanners/helm/test/testdata/expected/options/with-kube-version/templates/pdb.yaml new file mode 100644 index 000000000000..7c7ef5fd74d7 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/expected/options/with-kube-version/templates/pdb.yaml @@ -0,0 +1,17 @@ +# Source: with-api-version/templates/pdb.yaml +apiVersion: policy/v1 +kind: PodDisruptionBudget +metadata: + name: with-api-version + labels: + helm.sh/chart: with-api-version-0.1.0 + app.kubernetes.io/name: with-api-version + app.kubernetes.io/instance: with-api-version + app.kubernetes.io/version: "1.16.0" + app.kubernetes.io/managed-by: Helm +spec: + selector: + matchLabels: + app.kubernetes.io/name: with-api-version + app.kubernetes.io/instance: with-api-version + maxUnavailable: 0 diff --git a/pkg/iac/scanners/helm/test/testdata/with-kube-version/.helmignore b/pkg/iac/scanners/helm/test/testdata/with-kube-version/.helmignore new file mode 100644 index 000000000000..0e8a0eb36f4c --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-kube-version/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/pkg/iac/scanners/helm/test/testdata/with-kube-version/Chart.yaml b/pkg/iac/scanners/helm/test/testdata/with-kube-version/Chart.yaml new file mode 100644 index 000000000000..99c44c125940 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-kube-version/Chart.yaml @@ -0,0 +1,26 @@ +apiVersion: v2 +name: with-api-version +description: A Helm chart for Kubernetes + +# A chart can be either an 'application' or a 'library' chart. +# +# Application charts are a collection of templates that can be packaged into versioned archives +# to be deployed. +# +# Library charts provide useful utilities or functions for the chart developer. They're included as +# a dependency of application charts to inject those utilities and functions into the rendering +# pipeline. Library charts do not define any templates and therefore cannot be deployed. +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 0.1.0 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. Versions are not expected to +# follow Semantic Versioning. They should reflect the version the application is using. +# It is recommended to use it with quotes. +appVersion: "1.16.0" + +kubeVersion: ">=1.60.0-0" diff --git a/pkg/iac/scanners/helm/test/testdata/with-kube-version/templates/_helpers.tpl b/pkg/iac/scanners/helm/test/testdata/with-kube-version/templates/_helpers.tpl new file mode 100644 index 000000000000..cab726131dc5 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-kube-version/templates/_helpers.tpl @@ -0,0 +1,62 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "with-api-version.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "with-api-version.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "with-api-version.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "with-api-version.labels" -}} +helm.sh/chart: {{ include "with-api-version.chart" . }} +{{ include "with-api-version.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "with-api-version.selectorLabels" -}} +app.kubernetes.io/name: {{ include "with-api-version.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +Create the name of the service account to use +*/}} +{{- define "with-api-version.serviceAccountName" -}} +{{- if .Values.serviceAccount.create }} +{{- default (include "with-api-version.fullname" .) .Values.serviceAccount.name }} +{{- else }} +{{- default "default" .Values.serviceAccount.name }} +{{- end }} +{{- end }} diff --git a/pkg/iac/scanners/helm/test/testdata/with-kube-version/templates/pdb.yaml b/pkg/iac/scanners/helm/test/testdata/with-kube-version/templates/pdb.yaml new file mode 100644 index 000000000000..0c063e06df97 --- /dev/null +++ b/pkg/iac/scanners/helm/test/testdata/with-kube-version/templates/pdb.yaml @@ -0,0 +1,11 @@ +apiVersion: policy/v1 +kind: PodDisruptionBudget +metadata: + name: {{ include "with-api-version.fullname" . }} + labels: + {{- include "with-api-version.labels" . | nindent 4 }} +spec: + selector: + matchLabels: + {{- include "with-api-version.selectorLabels" . | nindent 6 }} + maxUnavailable: 0 diff --git a/pkg/iac/scanners/helm/test/testdata/with-kube-version/values.yaml b/pkg/iac/scanners/helm/test/testdata/with-kube-version/values.yaml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pkg/misconf/scanner.go b/pkg/misconf/scanner.go index 4c353f22c26e..6a30c9b69ec4 100644 --- a/pkg/misconf/scanner.go +++ b/pkg/misconf/scanner.go @@ -59,6 +59,8 @@ type ScannerOption struct { HelmValueFiles []string HelmFileValues []string HelmStringValues []string + HelmAPIVersions []string + HelmKubeVersion string TerraformTFVars []string CloudFormationParamVars []string TfExcludeDownloaded bool @@ -332,6 +334,14 @@ func addHelmOpts(opts []options.ScannerOption, scannerOption ScannerOption) []op opts = append(opts, helm2.ScannerWithStringValues(scannerOption.HelmStringValues...)) } + if len(scannerOption.HelmAPIVersions) > 0 { + opts = append(opts, helm2.ScannerWithAPIVersions(scannerOption.HelmAPIVersions...)) + } + + if scannerOption.HelmKubeVersion != "" { + opts = append(opts, helm2.ScannerWithKubeVersion(scannerOption.HelmKubeVersion)) + } + return opts } From a51ceddadaa1db45e9ec53ab2dbbd06a0b9e0ae5 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Sat, 6 Apr 2024 08:10:53 +0300 Subject: [PATCH 53/57] refactor(terraform): sync funcs with Terraform (#6415) --- .golangci.yaml | 2 + .../scanners/terraform/parser/funcs/cidr.go | 51 ++-- .../terraform/parser/funcs/collection.go | 105 ++----- .../terraform/parser/funcs/conversion.go | 135 +------- .../scanners/terraform/parser/funcs/crypto.go | 93 +----- .../terraform/parser/funcs/datetime.go | 140 +++++++-- .../terraform/parser/funcs/defaults.go | 289 ------------------ .../terraform/parser/funcs/encoding.go | 112 ++----- .../terraform/parser/funcs/filesystem.go | 84 +---- pkg/iac/scanners/terraform/parser/funcs/ip.go | 261 ++++++++++++++++ .../scanners/terraform/parser/funcs/marks.go | 4 +- .../scanners/terraform/parser/funcs/number.go | 55 ++-- .../scanners/terraform/parser/funcs/redact.go | 20 ++ .../terraform/parser/funcs/refinements.go | 10 + .../terraform/parser/funcs/sensitive.go | 33 +- .../scanners/terraform/parser/funcs/string.go | 112 ++++++- .../scanners/terraform/parser/functions.go | 111 ++++--- 17 files changed, 711 insertions(+), 906 deletions(-) delete mode 100644 pkg/iac/scanners/terraform/parser/funcs/defaults.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/ip.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/redact.go create mode 100644 pkg/iac/scanners/terraform/parser/funcs/refinements.go diff --git a/.golangci.yaml b/.golangci.yaml index 1ab912bad2b1..7be028f1e1ce 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -94,6 +94,8 @@ run: - ".*_test.go$" - "integration/*" - "examples/*" + skip-dirs: + - "pkg/iac/scanners/terraform/parser/funcs" # copies of Terraform functions issues: exclude-rules: diff --git a/pkg/iac/scanners/terraform/parser/funcs/cidr.go b/pkg/iac/scanners/terraform/parser/funcs/cidr.go index 5f1504c0a8a1..23b1c7be0d45 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/cidr.go +++ b/pkg/iac/scanners/terraform/parser/funcs/cidr.go @@ -4,7 +4,6 @@ package funcs import ( "fmt" "math/big" - "net" "github.com/apparentlymart/go-cidr/cidr" "github.com/zclconf/go-cty/cty" @@ -25,13 +24,14 @@ var CidrHostFunc = function.New(&function.Spec{ Type: cty.Number, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { var hostNum *big.Int if err := gocty.FromCtyValue(args[1], &hostNum); err != nil { return cty.UnknownVal(cty.String), err } - _, network, err := net.ParseCIDR(args[0].AsString()) + _, network, err := ParseCIDR(args[0].AsString()) if err != nil { return cty.UnknownVal(cty.String), fmt.Errorf("invalid CIDR expression: %s", err) } @@ -54,14 +54,19 @@ var CidrNetmaskFunc = function.New(&function.Spec{ Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { - _, network, err := net.ParseCIDR(args[0].AsString()) + _, network, err := ParseCIDR(args[0].AsString()) if err != nil { return cty.UnknownVal(cty.String), fmt.Errorf("invalid CIDR expression: %s", err) } - return cty.StringVal(net.IP(network.Mask).String()), nil + if network.IP.To4() == nil { + return cty.UnknownVal(cty.String), fmt.Errorf("IPv6 addresses cannot have a netmask: %s", args[0].AsString()) + } + + return cty.StringVal(IP(network.Mask).String()), nil }, }) @@ -82,7 +87,8 @@ var CidrSubnetFunc = function.New(&function.Spec{ Type: cty.Number, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { var newbits int if err := gocty.FromCtyValue(args[1], &newbits); err != nil { @@ -93,7 +99,7 @@ var CidrSubnetFunc = function.New(&function.Spec{ return cty.UnknownVal(cty.String), err } - _, network, err := net.ParseCIDR(args[0].AsString()) + _, network, err := ParseCIDR(args[0].AsString()) if err != nil { return cty.UnknownVal(cty.String), fmt.Errorf("invalid CIDR expression: %s", err) } @@ -120,9 +126,10 @@ var CidrSubnetsFunc = function.New(&function.Spec{ Name: "newbits", Type: cty.Number, }, - Type: function.StaticReturnType(cty.List(cty.String)), + Type: function.StaticReturnType(cty.List(cty.String)), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { - _, network, err := net.ParseCIDR(args[0].AsString()) + _, network, err := ParseCIDR(args[0].AsString()) if err != nil { return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "invalid CIDR expression: %s", err) } @@ -186,27 +193,3 @@ var CidrSubnetsFunc = function.New(&function.Spec{ return cty.ListVal(retVals), nil }, }) - -// CidrHost calculates a full host IP address within a given IP network address prefix. -func CidrHost(prefix, hostnum cty.Value) (cty.Value, error) { - return CidrHostFunc.Call([]cty.Value{prefix, hostnum}) -} - -// CidrNetmask converts an IPv4 address prefix given in CIDR notation into a subnet mask address. -func CidrNetmask(prefix cty.Value) (cty.Value, error) { - return CidrNetmaskFunc.Call([]cty.Value{prefix}) -} - -// CidrSubnet calculates a subnet address within a given IP network address prefix. -func CidrSubnet(prefix, newbits, netnum cty.Value) (cty.Value, error) { - return CidrSubnetFunc.Call([]cty.Value{prefix, newbits, netnum}) -} - -// CidrSubnets calculates a sequence of consecutive subnet prefixes that may -// be of different prefix lengths under a common base prefix. -func CidrSubnets(prefix cty.Value, newbits ...cty.Value) (cty.Value, error) { - args := make([]cty.Value, len(newbits)+1) - args[0] = prefix - copy(args[1:], newbits) - return CidrSubnetsFunc.Call(args) -} diff --git a/pkg/iac/scanners/terraform/parser/funcs/collection.go b/pkg/iac/scanners/terraform/parser/funcs/collection.go index f68af2ce36af..d5deb65a68e5 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/collection.go +++ b/pkg/iac/scanners/terraform/parser/funcs/collection.go @@ -33,6 +33,7 @@ var LengthFunc = function.New(&function.Spec{ return cty.Number, errors.New("argument must be a string, a collection type, or a structural type") } }, + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { coll := args[0] collTy := args[0].Type() @@ -69,7 +70,8 @@ var AllTrueFunc = function.New(&function.Spec{ Type: cty.List(cty.Bool), }, }, - Type: function.StaticReturnType(cty.Bool), + Type: function.StaticReturnType(cty.Bool), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { result := cty.True for it := args[0].ElementIterator(); it.Next(); { @@ -98,7 +100,8 @@ var AnyTrueFunc = function.New(&function.Spec{ Type: cty.List(cty.Bool), }, }, - Type: function.StaticReturnType(cty.Bool), + Type: function.StaticReturnType(cty.Bool), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { result := cty.False var hasUnknown bool @@ -147,6 +150,7 @@ var CoalesceFunc = function.New(&function.Spec{ } return retType, nil }, + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { for _, argVal := range args { // We already know this will succeed because of the checks in our Type func above @@ -179,7 +183,8 @@ var IndexFunc = function.New(&function.Spec{ Type: cty.DynamicPseudoType, }, }, - Type: function.StaticReturnType(cty.Number), + Type: function.StaticReturnType(cty.Number), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { if !(args[0].Type().IsListType() || args[0].Type().IsTupleType()) { return cty.NilVal, errors.New("argument must be a list or tuple") @@ -312,8 +317,8 @@ var LookupFunc = function.New(&function.Spec{ return defaultVal.WithMarks(markses...), nil } - return cty.UnknownVal(cty.DynamicPseudoType).WithMarks(markses...), fmt.Errorf( - "lookup failed to find '%s'", lookupKey) + return cty.UnknownVal(cty.DynamicPseudoType), fmt.Errorf( + "lookup failed to find key %s", redactIfSensitive(lookupKey, keyMarks)) }, }) @@ -344,6 +349,7 @@ var MatchkeysFunc = function.New(&function.Spec{ // the return type is based on args[0] (values) return args[0].Type(), nil }, + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { if !args[0].IsKnown() { return cty.UnknownVal(cty.List(retType.ElementType())), nil @@ -353,7 +359,7 @@ var MatchkeysFunc = function.New(&function.Spec{ return cty.ListValEmpty(retType.ElementType()), errors.New("length of keys and values should be equal") } - var output []cty.Value + output := make([]cty.Value, 0) values := args[0] // Keys and searchset must be the same type. @@ -487,7 +493,8 @@ var SumFunc = function.New(&function.Spec{ Type: cty.DynamicPseudoType, }, }, - Type: function.StaticReturnType(cty.Number), + Type: function.StaticReturnType(cty.Number), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { if !args[0].CanIterateElements() { @@ -528,6 +535,10 @@ var SumFunc = function.New(&function.Spec{ if s.IsNull() { return cty.NilVal, function.NewArgErrorf(0, "argument must be list, set, or tuple of number values") } + s, err = convert.Convert(s, cty.Number) + if err != nil { + return cty.NilVal, function.NewArgErrorf(0, "argument must be list, set, or tuple of number values") + } for _, v := range arg[1:] { if v.IsNull() { return cty.NilVal, function.NewArgErrorf(0, "argument must be list, set, or tuple of number values") @@ -552,7 +563,8 @@ var TransposeFunc = function.New(&function.Spec{ Type: cty.Map(cty.List(cty.String)), }, }, - Type: function.StaticReturnType(cty.Map(cty.List(cty.String))), + Type: function.StaticReturnType(cty.Map(cty.List(cty.String))), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { inputMap := args[0] if !inputMap.IsWhollyKnown() { @@ -582,7 +594,7 @@ var TransposeFunc = function.New(&function.Spec{ } for outKey, outVal := range tmpMap { - var values []cty.Value + values := make([]cty.Value, 0) for _, v := range outVal { values = append(values, cty.StringVal(v)) } @@ -600,7 +612,7 @@ var TransposeFunc = function.New(&function.Spec{ // ListFunc constructs a function that takes an arbitrary number of arguments // and returns a list containing those values in the same order. // -// Deprecated: This function is deprecated in Terraform v0.12 +// This function is deprecated in Terraform v0.12 var ListFunc = function.New(&function.Spec{ Params: []function.Parameter{}, VarParam: &function.Parameter{ @@ -621,7 +633,7 @@ var ListFunc = function.New(&function.Spec{ // MapFunc constructs a function that takes an even number of arguments and // returns a map whose elements are constructed from consecutive pairs of arguments. // -// Deprecated: This function is deprecated in Terraform v0.12 +// This function is deprecated in Terraform v0.12 var MapFunc = function.New(&function.Spec{ Params: []function.Parameter{}, VarParam: &function.Parameter{ @@ -638,74 +650,3 @@ var MapFunc = function.New(&function.Spec{ return cty.DynamicVal, fmt.Errorf("the \"map\" function was deprecated in Terraform v0.12 and is no longer available; use tomap({ ... }) syntax to write a literal map") }, }) - -// Length returns the number of elements in the given collection or number of -// Unicode characters in the given string. -func Length(collection cty.Value) (cty.Value, error) { - return LengthFunc.Call([]cty.Value{collection}) -} - -// AllTrue returns true if all elements of the list are true. If the list is empty, -// return true. -func AllTrue(collection cty.Value) (cty.Value, error) { - return AllTrueFunc.Call([]cty.Value{collection}) -} - -// AnyTrue returns true if any element of the list is true. If the list is empty, -// return false. -func AnyTrue(collection cty.Value) (cty.Value, error) { - return AnyTrueFunc.Call([]cty.Value{collection}) -} - -// Coalesce takes any number of arguments and returns the first one that isn't empty. -func Coalesce(args ...cty.Value) (cty.Value, error) { - return CoalesceFunc.Call(args) -} - -// Index finds the element index for a given value in a list. -func Index(list, value cty.Value) (cty.Value, error) { - return IndexFunc.Call([]cty.Value{list, value}) -} - -// List takes any number of list arguments and returns a list containing those -// -// values in the same order. -func List(args ...cty.Value) (cty.Value, error) { - return ListFunc.Call(args) -} - -// Lookup performs a dynamic lookup into a map. -// There are two required arguments, map and key, plus an optional default, -// which is a value to return if no key is found in map. -func Lookup(args ...cty.Value) (cty.Value, error) { - return LookupFunc.Call(args) -} - -// Map takes an even number of arguments and returns a map whose elements are constructed -// from consecutive pairs of arguments. -func Map(args ...cty.Value) (cty.Value, error) { - return MapFunc.Call(args) -} - -// Matchkeys constructs a new list by taking a subset of elements from one list -// whose indexes match the corresponding indexes of values in another list. -func Matchkeys(values, keys, searchset cty.Value) (cty.Value, error) { - return MatchkeysFunc.Call([]cty.Value{values, keys, searchset}) -} - -// One returns either the first element of a one-element list, or null -// if given a zero-element list.. -func One(list cty.Value) (cty.Value, error) { - return OneFunc.Call([]cty.Value{list}) -} - -// Sum adds numbers in a list, set, or tuple -func Sum(list cty.Value) (cty.Value, error) { - return SumFunc.Call([]cty.Value{list}) -} - -// Transpose takes a map of lists of strings and swaps the keys and values to -// produce a new map of lists of strings. -func Transpose(values cty.Value) (cty.Value, error) { - return TransposeFunc.Call([]cty.Value{values}) -} diff --git a/pkg/iac/scanners/terraform/parser/funcs/conversion.go b/pkg/iac/scanners/terraform/parser/funcs/conversion.go index 02fb3164a6f0..18a1310e69f7 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/conversion.go +++ b/pkg/iac/scanners/terraform/parser/funcs/conversion.go @@ -2,10 +2,7 @@ package funcs import ( - "fmt" - "sort" "strconv" - "strings" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" @@ -32,9 +29,10 @@ func MakeToFunc(wantTy cty.Type) function.Function { // messages to be more appropriate for an explicit type // conversion, whereas the cty function system produces // messages aimed at _implicit_ type conversions. - Type: cty.DynamicPseudoType, - AllowNull: true, - AllowMarked: true, + Type: cty.DynamicPseudoType, + AllowNull: true, + AllowMarked: true, + AllowDynamicType: true, }, }, Type: func(args []cty.Value) (cty.Type, error) { @@ -96,128 +94,3 @@ func MakeToFunc(wantTy cty.Type) function.Function { }, }) } - -var TypeFunc = function.New(&function.Spec{ - Params: []function.Parameter{ - { - Name: "value", - Type: cty.DynamicPseudoType, - AllowDynamicType: true, - AllowUnknown: true, - AllowNull: true, - }, - }, - Type: function.StaticReturnType(cty.String), - Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { - return cty.StringVal(TypeString(args[0].Type())).Mark(MarkedRaw), nil - }, -}) - -// Modified copy of TypeString from go-cty: -// https://github.com/zclconf/go-cty-debug/blob/master/ctydebug/type_string.go -// -// TypeString returns a string representation of a given type that is -// reminiscent of Go syntax calling into the cty package but is mainly -// intended for easy human inspection of values in tests, debug output, etc. -// -// The resulting string will include newlines and indentation in order to -// increase the readability of complex structures. It always ends with a -// newline, so you can print this result directly to your output. -func TypeString(ty cty.Type) string { - var b strings.Builder - writeType(ty, &b, 0) - return b.String() -} - -func writeType(ty cty.Type, b *strings.Builder, indent int) { - switch { - case ty == cty.NilType: - b.WriteString("nil") - return - case ty.IsObjectType(): - atys := ty.AttributeTypes() - if len(atys) == 0 { - b.WriteString("object({})") - return - } - attrNames := make([]string, 0, len(atys)) - for name := range atys { - attrNames = append(attrNames, name) - } - sort.Strings(attrNames) - b.WriteString("object({\n") - indent++ - for _, name := range attrNames { - aty := atys[name] - b.WriteString(indentSpaces(indent)) - fmt.Fprintf(b, "%s: ", name) - writeType(aty, b, indent) - b.WriteString(",\n") - } - indent-- - b.WriteString(indentSpaces(indent)) - b.WriteString("})") - case ty.IsTupleType(): - etys := ty.TupleElementTypes() - if len(etys) == 0 { - b.WriteString("tuple([])") - return - } - b.WriteString("tuple([\n") - indent++ - for _, ety := range etys { - b.WriteString(indentSpaces(indent)) - writeType(ety, b, indent) - b.WriteString(",\n") - } - indent-- - b.WriteString(indentSpaces(indent)) - b.WriteString("])") - case ty.IsCollectionType(): - ety := ty.ElementType() - switch { - case ty.IsListType(): - b.WriteString("list(") - case ty.IsMapType(): - b.WriteString("map(") - case ty.IsSetType(): - b.WriteString("set(") - default: - // At the time of writing there are no other collection types, - // but we'll be robust here and just pass through the GoString - // of anything we don't recognize. - b.WriteString(ty.FriendlyName()) - return - } - // Because object and tuple types render split over multiple - // lines, a collection type container around them can end up - // being hard to see when scanning, so we'll generate some extra - // indentation to make a collection of structural type more visually - // distinct from the structural type alone. - complexElem := ety.IsObjectType() || ety.IsTupleType() - if complexElem { - indent++ - b.WriteString("\n") - b.WriteString(indentSpaces(indent)) - } - writeType(ty.ElementType(), b, indent) - if complexElem { - indent-- - b.WriteString(",\n") - b.WriteString(indentSpaces(indent)) - } - b.WriteString(")") - default: - // For any other type we'll just use its GoString and assume it'll - // follow the usual GoString conventions. - b.WriteString(ty.FriendlyName()) - } -} - -func indentSpaces(level int) string { - return strings.Repeat(" ", level) -} - -func Type(input []cty.Value) (cty.Value, error) { - return TypeFunc.Call(input) -} diff --git a/pkg/iac/scanners/terraform/parser/funcs/crypto.go b/pkg/iac/scanners/terraform/parser/funcs/crypto.go index 4bc3bab99744..894da1280c1a 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/crypto.go +++ b/pkg/iac/scanners/terraform/parser/funcs/crypto.go @@ -26,8 +26,9 @@ import ( ) var UUIDFunc = function.New(&function.Spec{ - Params: []function.Parameter{}, - Type: function.StaticReturnType(cty.String), + Params: []function.Parameter{}, + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { result, err := uuid.GenerateUUID() if err != nil { @@ -48,7 +49,8 @@ var UUIDV5Func = function.New(&function.Spec{ Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { var namespace uuidv5.UUID switch { @@ -102,7 +104,8 @@ var BcryptFunc = function.New(&function.Spec{ Name: "cost", Type: cty.Number, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { defaultCost := 10 @@ -149,7 +152,8 @@ var RsaDecryptFunc = function.New(&function.Spec{ Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { s := args[0].AsString() key := args[1].AsString() @@ -224,7 +228,8 @@ func makeStringHashFunction(hf func() hash.Hash, enc func([]byte) string) functi Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { s := args[0].AsString() h := hf() @@ -243,13 +248,15 @@ func makeFileHashFunction(target fs.FS, baseDir string, hf func() hash.Hash, enc Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { path := args[0].AsString() f, err := openFile(target, baseDir, path) if err != nil { return cty.UnknownVal(cty.String), err } + defer f.Close() h := hf() _, err = io.Copy(h, f) @@ -261,75 +268,3 @@ func makeFileHashFunction(target fs.FS, baseDir string, hf func() hash.Hash, enc }, }) } - -// UUID generates and returns a Type-4 UUID in the standard hexadecimal string -// format. -// -// This is not a pure function: it will generate a different result for each -// call. It must therefore be registered as an impure function in the function -// table in the "lang" package. -func UUID() (cty.Value, error) { - return UUIDFunc.Call(nil) -} - -// UUIDV5 generates and returns a Type-5 UUID in the standard hexadecimal string -// format. -func UUIDV5(namespace, name cty.Value) (cty.Value, error) { - return UUIDV5Func.Call([]cty.Value{namespace, name}) -} - -// Base64Sha256 computes the SHA256 hash of a given string and encodes it with -// Base64. -// -// The given string is first encoded as UTF-8 and then the SHA256 algorithm is applied -// as defined in RFC 4634. The raw hash is then encoded with Base64 before returning. -// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. -func Base64Sha256(str cty.Value) (cty.Value, error) { - return Base64Sha256Func.Call([]cty.Value{str}) -} - -// Base64Sha512 computes the SHA512 hash of a given string and encodes it with -// Base64. -// -// The given string is first encoded as UTF-8 and then the SHA256 algorithm is applied -// as defined in RFC 4634. The raw hash is then encoded with Base64 before returning. -// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4 -func Base64Sha512(str cty.Value) (cty.Value, error) { - return Base64Sha512Func.Call([]cty.Value{str}) -} - -// Bcrypt computes a hash of the given string using the Blowfish cipher, -// returning a string in the Modular Crypt Format -// usually expected in the shadow password file on many Unix systems. -func Bcrypt(str cty.Value, cost ...cty.Value) (cty.Value, error) { - args := make([]cty.Value, len(cost)+1) - args[0] = str - copy(args[1:], cost) - return BcryptFunc.Call(args) -} - -// Md5 computes the MD5 hash of a given string and encodes it with hexadecimal digits. -func Md5(str cty.Value) (cty.Value, error) { - return Md5Func.Call([]cty.Value{str}) -} - -// RsaDecrypt decrypts an RSA-encrypted ciphertext, returning the corresponding -// cleartext. -func RsaDecrypt(ciphertext, privatekey cty.Value) (cty.Value, error) { - return RsaDecryptFunc.Call([]cty.Value{ciphertext, privatekey}) -} - -// Sha1 computes the SHA1 hash of a given string and encodes it with hexadecimal digits. -func Sha1(str cty.Value) (cty.Value, error) { - return Sha1Func.Call([]cty.Value{str}) -} - -// Sha256 computes the SHA256 hash of a given string and encodes it with hexadecimal digits. -func Sha256(str cty.Value) (cty.Value, error) { - return Sha256Func.Call([]cty.Value{str}) -} - -// Sha512 computes the SHA512 hash of a given string and encodes it with hexadecimal digits. -func Sha512(str cty.Value) (cty.Value, error) { - return Sha512Func.Call([]cty.Value{str}) -} diff --git a/pkg/iac/scanners/terraform/parser/funcs/datetime.go b/pkg/iac/scanners/terraform/parser/funcs/datetime.go index b09da879da99..11ed3c8a2214 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/datetime.go +++ b/pkg/iac/scanners/terraform/parser/funcs/datetime.go @@ -2,6 +2,7 @@ package funcs import ( + "fmt" "time" "github.com/zclconf/go-cty/cty" @@ -10,13 +11,26 @@ import ( // TimestampFunc constructs a function that returns a string representation of the current date and time. var TimestampFunc = function.New(&function.Spec{ - Params: []function.Parameter{}, - Type: function.StaticReturnType(cty.String), + Params: []function.Parameter{}, + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { return cty.StringVal(time.Now().UTC().Format(time.RFC3339)), nil }, }) +// MakeStaticTimestampFunc constructs a function that returns a string +// representation of the date and time specified by the provided argument. +func MakeStaticTimestampFunc(static time.Time) function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{}, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + return cty.StringVal(static.Format(time.RFC3339)), nil + }, + }) +} + // TimeAddFunc constructs a function that adds a duration to a timestamp, returning a new timestamp. var TimeAddFunc = function.New(&function.Spec{ Params: []function.Parameter{ @@ -29,9 +43,10 @@ var TimeAddFunc = function.New(&function.Spec{ Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { - ts, err := time.Parse(time.RFC3339, args[0].AsString()) + ts, err := parseTimestamp(args[0].AsString()) if err != nil { return cty.UnknownVal(cty.String), err } @@ -44,28 +59,99 @@ var TimeAddFunc = function.New(&function.Spec{ }, }) -// Timestamp returns a string representation of the current date and time. -// -// In the Terraform language, timestamps are conventionally represented as -// strings using RFC 3339 "Date and Time format" syntax, and so timestamp -// returns a string in this format. -func Timestamp() (cty.Value, error) { - return TimestampFunc.Call([]cty.Value{}) -} +// TimeCmpFunc is a function that compares two timestamps. +var TimeCmpFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "timestamp_a", + Type: cty.String, + }, + { + Name: "timestamp_b", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.Number), + RefineResult: refineNotNull, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + tsA, err := parseTimestamp(args[0].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), function.NewArgError(0, err) + } + tsB, err := parseTimestamp(args[1].AsString()) + if err != nil { + return cty.UnknownVal(cty.String), function.NewArgError(1, err) + } -// TimeAdd adds a duration to a timestamp, returning a new timestamp. -// -// In the Terraform language, timestamps are conventionally represented as -// strings using RFC 3339 "Date and Time format" syntax. Timeadd requires -// the timestamp argument to be a string conforming to this syntax. -// -// `duration` is a string representation of a time difference, consisting of -// sequences of number and unit pairs, like `"1.5h"` or `1h30m`. The accepted -// units are `ns`, `us` (or `µs`), `"ms"`, `"s"`, `"m"`, and `"h"`. The first -// number may be negative to indicate a negative duration, like `"-2h5m"`. -// -// The result is a string, also in RFC 3339 format, representing the result -// of adding the given direction to the given timestamp. -func TimeAdd(timestamp, duration cty.Value) (cty.Value, error) { - return TimeAddFunc.Call([]cty.Value{timestamp, duration}) + switch { + case tsA.Equal(tsB): + return cty.NumberIntVal(0), nil + case tsA.Before(tsB): + return cty.NumberIntVal(-1), nil + default: + // By elimintation, tsA must be after tsB. + return cty.NumberIntVal(1), nil + } + }, +}) + +func parseTimestamp(ts string) (time.Time, error) { + t, err := time.Parse(time.RFC3339, ts) + if err != nil { + switch err := err.(type) { + case *time.ParseError: + // If err is a time.ParseError then its string representation is not + // appropriate since it relies on details of Go's strange date format + // representation, which a caller of our functions is not expected + // to be familiar with. + // + // Therefore we do some light transformation to get a more suitable + // error that should make more sense to our callers. These are + // still not awesome error messages, but at least they refer to + // the timestamp portions by name rather than by Go's example + // values. + if err.LayoutElem == "" && err.ValueElem == "" && err.Message != "" { + // For some reason err.Message is populated with a ": " prefix + // by the time package. + return time.Time{}, fmt.Errorf("not a valid RFC3339 timestamp%s", err.Message) + } + var what string + switch err.LayoutElem { + case "2006": + what = "year" + case "01": + what = "month" + case "02": + what = "day of month" + case "15": + what = "hour" + case "04": + what = "minute" + case "05": + what = "second" + case "Z07:00": + what = "UTC offset" + case "T": + return time.Time{}, fmt.Errorf("not a valid RFC3339 timestamp: missing required time introducer 'T'") + case ":", "-": + if err.ValueElem == "" { + return time.Time{}, fmt.Errorf("not a valid RFC3339 timestamp: end of string where %q is expected", err.LayoutElem) + } else { + return time.Time{}, fmt.Errorf("not a valid RFC3339 timestamp: found %q where %q is expected", err.ValueElem, err.LayoutElem) + } + default: + // Should never get here, because time.RFC3339 includes only the + // above portions, but since that might change in future we'll + // be robust here. + what = "timestamp segment" + } + if err.ValueElem == "" { + return time.Time{}, fmt.Errorf("not a valid RFC3339 timestamp: end of string before %s", what) + } else { + return time.Time{}, fmt.Errorf("not a valid RFC3339 timestamp: cannot use %q as %s", err.ValueElem, what) + } + } + return time.Time{}, err + } + return t, nil } diff --git a/pkg/iac/scanners/terraform/parser/funcs/defaults.go b/pkg/iac/scanners/terraform/parser/funcs/defaults.go deleted file mode 100644 index 1e5c0913adbd..000000000000 --- a/pkg/iac/scanners/terraform/parser/funcs/defaults.go +++ /dev/null @@ -1,289 +0,0 @@ -// Copied from github.com/hashicorp/terraform/internal/lang/funcs -package funcs - -import ( - "fmt" - - "github.com/zclconf/go-cty/cty" - "github.com/zclconf/go-cty/cty/convert" - "github.com/zclconf/go-cty/cty/function" -) - -// DefaultsFunc is a helper function for substituting default values in -// place of null values in a given data structure. -// -// See the documentation for function Defaults for more information. -var DefaultsFunc = function.New(&function.Spec{ - Params: []function.Parameter{ - { - Name: "input", - Type: cty.DynamicPseudoType, - AllowNull: true, - AllowMarked: true, - }, - { - Name: "defaults", - Type: cty.DynamicPseudoType, - AllowMarked: true, - }, - }, - Type: func(args []cty.Value) (cty.Type, error) { - // The result type is guaranteed to be the same as the input type, - // since all we're doing is replacing null values with non-null - // values of the same type. - retType := args[0].Type() - defaultsType := args[1].Type() - - // This function is aimed at filling in object types or collections - // of object types where some of the attributes might be null, so - // it doesn't make sense to use a primitive type directly with it. - // (The "coalesce" function may be appropriate for such cases.) - if retType.IsPrimitiveType() { - // This error message is a bit of a fib because we can actually - // apply defaults to tuples too, but we expect that to be so - // unusual as to not be worth mentioning here, because mentioning - // it would require using some less-well-known Terraform language - // terminology in the message (tuple types, structural types). - return cty.DynamicPseudoType, function.NewArgErrorf(1, "only object types and collections of object types can have defaults applied") - } - - defaultsPath := make(cty.Path, 0, 4) // some capacity so that most structures won't reallocate - if err := defaultsAssertSuitableFallback(retType, defaultsType, defaultsPath); err != nil { - errMsg := err.Error() - return cty.DynamicPseudoType, function.NewArgErrorf(1, "%s", errMsg) - } - - return retType, nil - }, - Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { - if args[0].Type().HasDynamicTypes() { - // If the types our input object aren't known yet for some reason - // then we'll defer all of our work here, because our - // interpretation of the defaults depends on the types in - // the input. - return cty.UnknownVal(retType), nil - } - - v := defaultsApply(args[0], args[1]) - return v, nil - }, -}) - -// nolint: gocyclo -func defaultsApply(input, fallback cty.Value) cty.Value { - wantTy := input.Type() - - umInput, inputMarks := input.Unmark() - umFb, fallbackMarks := fallback.Unmark() - - // If neither are known, we very conservatively return an unknown value - // with the union of marks on both input and default. - if !(umInput.IsKnown() && umFb.IsKnown()) { - return cty.UnknownVal(wantTy).WithMarks(inputMarks).WithMarks(fallbackMarks) - } - - // For the rest of this function we're assuming that the given defaults - // will always be valid, because we expect to have caught any problems - // during the type checking phase. Any inconsistencies that reach here are - // therefore considered to be implementation bugs, and so will panic. - - // Our strategy depends on the kind of type we're working with. - switch { - case wantTy.IsPrimitiveType(): - // For leaf primitive values the rule is relatively simple: use the - // input if it's non-null, or fallback if input is null. - if !umInput.IsNull() { - return input - } - v, err := convert.Convert(umFb, wantTy) - if err != nil { - // Should not happen because we checked in defaultsAssertSuitableFallback - panic(err.Error()) - } - return v.WithMarks(fallbackMarks) - - case wantTy.IsObjectType(): - // For structural types, a null input value must be passed through. We - // do not apply default values for missing optional structural values, - // only their contents. - // - // We also pass through the input if the fallback value is null. This - // can happen if the given defaults do not include a value for this - // attribute. - if umInput.IsNull() || umFb.IsNull() { - return input - } - atys := wantTy.AttributeTypes() - ret := make(map[string]cty.Value) - for attr, aty := range atys { - inputSub := umInput.GetAttr(attr) - fallbackSub := cty.NullVal(aty) - if umFb.Type().HasAttribute(attr) { - fallbackSub = umFb.GetAttr(attr) - } - ret[attr] = defaultsApply(inputSub.WithMarks(inputMarks), fallbackSub.WithMarks(fallbackMarks)) - } - return cty.ObjectVal(ret) - - case wantTy.IsTupleType(): - // For structural types, a null input value must be passed through. We - // do not apply default values for missing optional structural values, - // only their contents. - // - // We also pass through the input if the fallback value is null. This - // can happen if the given defaults do not include a value for this - // attribute. - if umInput.IsNull() || umFb.IsNull() { - return input - } - - l := wantTy.Length() - ret := make([]cty.Value, l) - for i := 0; i < l; i++ { - inputSub := umInput.Index(cty.NumberIntVal(int64(i))) - fallbackSub := umFb.Index(cty.NumberIntVal(int64(i))) - ret[i] = defaultsApply(inputSub.WithMarks(inputMarks), fallbackSub.WithMarks(fallbackMarks)) - } - return cty.TupleVal(ret) - - case wantTy.IsCollectionType(): - // For collection types we apply a single fallback value to each - // element of the input collection, because in the situations this - // function is intended for we assume that the number of elements - // is the caller's decision, and so we'll just apply the same defaults - // to all of the elements. - ety := wantTy.ElementType() - switch { - case wantTy.IsMapType(): - newVals := make(map[string]cty.Value) - - if !umInput.IsNull() { - for it := umInput.ElementIterator(); it.Next(); { - k, v := it.Element() - newVals[k.AsString()] = defaultsApply(v.WithMarks(inputMarks), fallback.WithMarks(fallbackMarks)) - } - } - - if len(newVals) == 0 { - return cty.MapValEmpty(ety) - } - return cty.MapVal(newVals) - case wantTy.IsListType(), wantTy.IsSetType(): - var newVals []cty.Value - - if !umInput.IsNull() { - for it := umInput.ElementIterator(); it.Next(); { - _, v := it.Element() - newV := defaultsApply(v.WithMarks(inputMarks), fallback.WithMarks(fallbackMarks)) - newVals = append(newVals, newV) - } - } - - if len(newVals) == 0 { - if wantTy.IsSetType() { - return cty.SetValEmpty(ety) - } - return cty.ListValEmpty(ety) - } - if wantTy.IsSetType() { - return cty.SetVal(newVals) - } - return cty.ListVal(newVals) - default: - // There are no other collection types, so this should not happen - panic(fmt.Sprintf("invalid collection type %#v", wantTy)) - } - default: - // We should've caught anything else in defaultsAssertSuitableFallback, - // so this should not happen. - panic(fmt.Sprintf("invalid target type %#v", wantTy)) - } -} - -func defaultsAssertSuitableFallback(wantTy, fallbackTy cty.Type, fallbackPath cty.Path) error { - // If the type we want is a collection type then we need to keep peeling - // away collection type wrappers until we find the non-collection-type - // that's underneath, which is what the fallback will actually be applied - // to. - inCollection := false - for wantTy.IsCollectionType() { - wantTy = wantTy.ElementType() - inCollection = true - } - - switch { - case wantTy.IsPrimitiveType(): - // The fallback is valid if it's equal to or convertible to what we want. - if fallbackTy.Equals(wantTy) { - return nil - } - conversion := convert.GetConversion(fallbackTy, wantTy) - if conversion == nil { - msg := convert.MismatchMessage(fallbackTy, wantTy) - return fallbackPath.NewErrorf("invalid default value for %s: %s", wantTy.FriendlyName(), msg) - } - return nil - case wantTy.IsObjectType(): - if !fallbackTy.IsObjectType() { - if inCollection { - return fallbackPath.NewErrorf("the default value for a collection of an object type must itself be an object type, not %s", fallbackTy.FriendlyName()) - } - return fallbackPath.NewErrorf("the default value for an object type must itself be an object type, not %s", fallbackTy.FriendlyName()) - } - for attr, wantAty := range wantTy.AttributeTypes() { - if !fallbackTy.HasAttribute(attr) { - continue // it's always okay to not have a default value - } - fallbackSubpath := fallbackPath.GetAttr(attr) - fallbackSubTy := fallbackTy.AttributeType(attr) - err := defaultsAssertSuitableFallback(wantAty, fallbackSubTy, fallbackSubpath) - if err != nil { - return err - } - } - for attr := range fallbackTy.AttributeTypes() { - if !wantTy.HasAttribute(attr) { - fallbackSubpath := fallbackPath.GetAttr(attr) - return fallbackSubpath.NewErrorf("target type does not expect an attribute named %q", attr) - } - } - return nil - case wantTy.IsTupleType(): - if !fallbackTy.IsTupleType() { - if inCollection { - return fallbackPath.NewErrorf("the default value for a collection of a tuple type must itself be a tuple type, not %s", fallbackTy.FriendlyName()) - } - return fallbackPath.NewErrorf("the default value for a tuple type must itself be a tuple type, not %s", fallbackTy.FriendlyName()) - } - wantEtys := wantTy.TupleElementTypes() - fallbackEtys := fallbackTy.TupleElementTypes() - if got, want := len(wantEtys), len(fallbackEtys); got != want { - return fallbackPath.NewErrorf("the default value for a tuple type of length %d must also have length %d, not %d", want, want, got) - } - for i := 0; i < len(wantEtys); i++ { - fallbackSubpath := fallbackPath.IndexInt(i) - wantSubTy := wantEtys[i] - fallbackSubTy := fallbackEtys[i] - err := defaultsAssertSuitableFallback(wantSubTy, fallbackSubTy, fallbackSubpath) - if err != nil { - return err - } - } - return nil - default: - // No other types are supported right now. - return fallbackPath.NewErrorf("cannot apply defaults to %s", wantTy.FriendlyName()) - } -} - -// Defaults is a helper function for substituting default values in -// place of null values in a given data structure. -// -// This is primarily intended for use with a module input variable that -// has an object type constraint (or a collection thereof) that has optional -// attributes, so that the receiver of a value that omits those attributes -// can insert non-null default values in place of the null values caused by -// omitting the attributes. -func Defaults(input, defaults cty.Value) (cty.Value, error) { - return DefaultsFunc.Call([]cty.Value{input, defaults}) -} diff --git a/pkg/iac/scanners/terraform/parser/funcs/encoding.go b/pkg/iac/scanners/terraform/parser/funcs/encoding.go index 778367fb8fce..e5fb8490818f 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/encoding.go +++ b/pkg/iac/scanners/terraform/parser/funcs/encoding.go @@ -19,22 +19,25 @@ import ( var Base64DecodeFunc = function.New(&function.Spec{ Params: []function.Parameter{ { - Name: "str", - Type: cty.String, + Name: "str", + Type: cty.String, + AllowMarked: true, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { - s := args[0].AsString() + str, strMarks := args[0].Unmark() + s := str.AsString() sDec, err := base64.StdEncoding.DecodeString(s) if err != nil { - return cty.UnknownVal(cty.String), fmt.Errorf("failed to decode base64 data '%s'", s) + return cty.UnknownVal(cty.String), fmt.Errorf("failed to decode base64 data %s", redactIfSensitive(s, strMarks)) } - if !utf8.Valid(sDec) { - log.Printf("[DEBUG] the result of decoding the provided string is not valid UTF-8: %s", sDec) + if !utf8.Valid([]byte(sDec)) { + log.Printf("[DEBUG] the result of decoding the provided string is not valid UTF-8: %s", redactIfSensitive(sDec, strMarks)) return cty.UnknownVal(cty.String), fmt.Errorf("the result of decoding the provided string is not valid UTF-8") } - return cty.StringVal(string(sDec)), nil + return cty.StringVal(string(sDec)).WithMarks(strMarks), nil }, }) @@ -46,7 +49,8 @@ var Base64EncodeFunc = function.New(&function.Spec{ Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { return cty.StringVal(base64.StdEncoding.EncodeToString([]byte(args[0].AsString()))), nil }, @@ -64,7 +68,8 @@ var TextEncodeBase64Func = function.New(&function.Spec{ Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { encoding, err := ianaindex.IANA.Encoding(args[1].AsString()) if err != nil || encoding == nil { @@ -107,7 +112,8 @@ var TextDecodeBase64Func = function.New(&function.Spec{ Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { encoding, err := ianaindex.IANA.Encoding(args[1].AsString()) if err != nil || encoding == nil { @@ -126,7 +132,7 @@ var TextDecodeBase64Func = function.New(&function.Spec{ case base64.CorruptInputError: return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "the given value is has an invalid base64 symbol at offset %d", int(err)) default: - return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "invalid source string: %T", err) + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "invalid source string: %w", err) } } @@ -150,20 +156,21 @@ var Base64GzipFunc = function.New(&function.Spec{ Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { s := args[0].AsString() var b bytes.Buffer gz := gzip.NewWriter(&b) if _, err := gz.Write([]byte(s)); err != nil { - return cty.UnknownVal(cty.String), fmt.Errorf("failed to write gzip raw data: '%s'", s) + return cty.UnknownVal(cty.String), fmt.Errorf("failed to write gzip raw data: %w", err) } if err := gz.Flush(); err != nil { - return cty.UnknownVal(cty.String), fmt.Errorf("failed to flush gzip writer: '%s'", s) + return cty.UnknownVal(cty.String), fmt.Errorf("failed to flush gzip writer: %w", err) } if err := gz.Close(); err != nil { - return cty.UnknownVal(cty.String), fmt.Errorf("failed to close gzip writer: '%s'", s) + return cty.UnknownVal(cty.String), fmt.Errorf("failed to close gzip writer: %w", err) } return cty.StringVal(base64.StdEncoding.EncodeToString(b.Bytes())), nil }, @@ -177,78 +184,9 @@ var URLEncodeFunc = function.New(&function.Spec{ Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { return cty.StringVal(url.QueryEscape(args[0].AsString())), nil }, }) - -// Base64Decode decodes a string containing a base64 sequence. -// -// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. -// -// Strings in the Terraform language are sequences of unicode characters rather -// than bytes, so this function will also interpret the resulting bytes as -// UTF-8. If the bytes after Base64 decoding are _not_ valid UTF-8, this function -// produces an error. -func Base64Decode(str cty.Value) (cty.Value, error) { - return Base64DecodeFunc.Call([]cty.Value{str}) -} - -// Base64Encode applies Base64 encoding to a string. -// -// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. -// -// Strings in the Terraform language are sequences of unicode characters rather -// than bytes, so this function will first encode the characters from the string -// as UTF-8, and then apply Base64 encoding to the result. -func Base64Encode(str cty.Value) (cty.Value, error) { - return Base64EncodeFunc.Call([]cty.Value{str}) -} - -// Base64Gzip compresses a string with gzip and then encodes the result in -// Base64 encoding. -// -// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. -// -// Strings in the Terraform language are sequences of unicode characters rather -// than bytes, so this function will first encode the characters from the string -// as UTF-8, then apply gzip compression, and then finally apply Base64 encoding. -func Base64Gzip(str cty.Value) (cty.Value, error) { - return Base64GzipFunc.Call([]cty.Value{str}) -} - -// URLEncode applies URL encoding to a given string. -// -// This function identifies characters in the given string that would have a -// special meaning when included as a query string argument in a URL and -// escapes them using RFC 3986 "percent encoding". -// -// If the given string contains non-ASCII characters, these are first encoded as -// UTF-8 and then percent encoding is applied separately to each UTF-8 byte. -func URLEncode(str cty.Value) (cty.Value, error) { - return URLEncodeFunc.Call([]cty.Value{str}) -} - -// TextEncodeBase64 applies Base64 encoding to a string that was encoded before with a target encoding. -// -// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. -// -// First step is to apply the target IANA encoding (e.g. UTF-16LE). -// Strings in the Terraform language are sequences of unicode characters rather -// than bytes, so this function will first encode the characters from the string -// as UTF-8, and then apply Base64 encoding to the result. -func TextEncodeBase64(str, enc cty.Value) (cty.Value, error) { - return TextEncodeBase64Func.Call([]cty.Value{str, enc}) -} - -// TextDecodeBase64 decodes a string containing a base64 sequence whereas a specific encoding of the string is expected. -// -// Terraform uses the "standard" Base64 alphabet as defined in RFC 4648 section 4. -// -// Strings in the Terraform language are sequences of unicode characters rather -// than bytes, so this function will also interpret the resulting bytes as -// the target encoding. -func TextDecodeBase64(str, enc cty.Value) (cty.Value, error) { - return TextDecodeBase64Func.Call([]cty.Value{str, enc}) -} diff --git a/pkg/iac/scanners/terraform/parser/funcs/filesystem.go b/pkg/iac/scanners/terraform/parser/funcs/filesystem.go index 910e17f325c6..a53e975443f6 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/filesystem.go +++ b/pkg/iac/scanners/terraform/parser/funcs/filesystem.go @@ -3,6 +3,7 @@ package funcs import ( "encoding/base64" + "errors" "fmt" "io" "io/fs" @@ -184,7 +185,7 @@ func MakeTemplateFileFunc(target fs.FS, baseDir string, funcsCb func() map[strin // MakeFileExistsFunc constructs a function that takes a path // and determines whether a file exists at that path -func MakeFileExistsFunc(baseDir string) function.Function { +func MakeFileExistsFunc(target fs.FS, baseDir string) function.Function { return function.New(&function.Spec{ Params: []function.Parameter{ { @@ -204,12 +205,12 @@ func MakeFileExistsFunc(baseDir string) function.Function { path = filepath.Join(baseDir, path) } - // Ensure that the path is canonical for the host OS - path = filepath.Clean(path) + // Trivy uses a virtual file system + path = filepath.ToSlash(path) - fi, err := os.Stat(path) + fi, err := fs.Stat(target, path) if err != nil { - if os.IsNotExist(err) { + if errors.Is(err, os.ErrNotExist) { return cty.False, nil } return cty.UnknownVal(cty.Bool), fmt.Errorf("failed to stat %s", path) @@ -227,7 +228,7 @@ func MakeFileExistsFunc(baseDir string) function.Function { // MakeFileSetFunc constructs a function that takes a glob pattern // and enumerates a file set from that pattern -func MakeFileSetFunc(baseDir string) function.Function { +func MakeFileSetFunc(target fs.FS, baseDir string) function.Function { return function.New(&function.Spec{ Params: []function.Parameter{ { @@ -252,8 +253,10 @@ func MakeFileSetFunc(baseDir string) function.Function { // pattern is canonical for the host OS. The joined path is // automatically cleaned during this operation. pattern = filepath.Join(path, pattern) + // Trivy uses a virtual file system + path = filepath.ToSlash(path) - matches, err := doublestar.Glob(os.DirFS(path), pattern) + matches, err := doublestar.Glob(target, pattern) if err != nil { return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to glob pattern (%s): %s", pattern, err) } @@ -364,8 +367,8 @@ func openFile(target fs.FS, baseDir, path string) (fs.File, error) { path = filepath.Join(baseDir, path) } - // Ensure that the path is canonical for the host OS - path = filepath.Clean(path) + // Trivy uses a virtual file system + path = filepath.ToSlash(path) if target != nil { return target.Open(path) @@ -402,66 +405,3 @@ func File(target fs.FS, baseDir string, path cty.Value) (cty.Value, error) { fn := MakeFileFunc(target, baseDir, false) return fn.Call([]cty.Value{path}) } - -// FileExists determines whether a file exists at the given path. -// -// The underlying function implementation works relative to a particular base -// directory, so this wrapper takes a base directory string and uses it to -// construct the underlying function before calling it. -func FileExists(baseDir string, path cty.Value) (cty.Value, error) { - fn := MakeFileExistsFunc(baseDir) - return fn.Call([]cty.Value{path}) -} - -// FileSet enumerates a set of files given a glob pattern -// -// The underlying function implementation works relative to a particular base -// directory, so this wrapper takes a base directory string and uses it to -// construct the underlying function before calling it. -func FileSet(baseDir string, path, pattern cty.Value) (cty.Value, error) { - fn := MakeFileSetFunc(baseDir) - return fn.Call([]cty.Value{path, pattern}) -} - -// FileBase64 reads the contents of the file at the given path. -// -// The bytes from the file are encoded as base64 before returning. -// -// The underlying function implementation works relative to a particular base -// directory, so this wrapper takes a base directory string and uses it to -// construct the underlying function before calling it. -func FileBase64(target fs.FS, baseDir string, path cty.Value) (cty.Value, error) { - fn := MakeFileFunc(target, baseDir, true) - return fn.Call([]cty.Value{path}) -} - -// Basename takes a string containing a filesystem path and removes all except the last portion from it. -// -// The underlying function implementation works only with the path string and does not access the filesystem itself. -// It is therefore unable to take into account filesystem features such as symlinks. -// -// If the path is empty then the result is ".", representing the current working directory. -func Basename(path cty.Value) (cty.Value, error) { - return BasenameFunc.Call([]cty.Value{path}) -} - -// Dirname takes a string containing a filesystem path and removes the last portion from it. -// -// The underlying function implementation works only with the path string and does not access the filesystem itself. -// It is therefore unable to take into account filesystem features such as symlinks. -// -// If the path is empty then the result is ".", representing the current working directory. -func Dirname(path cty.Value) (cty.Value, error) { - return DirnameFunc.Call([]cty.Value{path}) -} - -// Pathexpand takes a string that might begin with a `~` segment, and if so it replaces that segment with -// the current user's home directory path. -// -// The underlying function implementation works only with the path string and does not access the filesystem itself. -// It is therefore unable to take into account filesystem features such as symlinks. -// -// If the leading segment in the path is not `~` then the given path is returned unmodified. -func Pathexpand(path cty.Value) (cty.Value, error) { - return PathExpandFunc.Call([]cty.Value{path}) -} diff --git a/pkg/iac/scanners/terraform/parser/funcs/ip.go b/pkg/iac/scanners/terraform/parser/funcs/ip.go new file mode 100644 index 000000000000..d1cf0352e95f --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/ip.go @@ -0,0 +1,261 @@ +// Copied from github.com/hashicorp/terraform/internal/ipaddr +package funcs + +import ( + stdnet "net" +) + +// Bigger than we need, not too big to worry about overflow +const bigVal = 0xFFFFFF + +// Decimal to integer. +// Returns number, characters consumed, success. +func dtoi(s string) (n int, i int, ok bool) { + n = 0 + for i = 0; i < len(s) && '0' <= s[i] && s[i] <= '9'; i++ { + n = n*10 + int(s[i]-'0') + if n >= bigVal { + return bigVal, i, false + } + } + if i == 0 { + return 0, 0, false + } + return n, i, true +} + +// Hexadecimal to integer. +// Returns number, characters consumed, success. +func xtoi(s string) (n int, i int, ok bool) { + n = 0 + for i = 0; i < len(s); i++ { + if '0' <= s[i] && s[i] <= '9' { + n *= 16 + n += int(s[i] - '0') + } else if 'a' <= s[i] && s[i] <= 'f' { + n *= 16 + n += int(s[i]-'a') + 10 + } else if 'A' <= s[i] && s[i] <= 'F' { + n *= 16 + n += int(s[i]-'A') + 10 + } else { + break + } + if n >= bigVal { + return 0, i, false + } + } + if i == 0 { + return 0, i, false + } + return n, i, true +} + +// +// Lean on the standard net lib as much as possible. +// + +type IP = stdnet.IP +type IPNet = stdnet.IPNet +type ParseError = stdnet.ParseError + +const IPv4len = stdnet.IPv4len +const IPv6len = stdnet.IPv6len + +var CIDRMask = stdnet.CIDRMask +var IPv4 = stdnet.IPv4 + +// Parse IPv4 address (d.d.d.d). +func parseIPv4(s string) IP { + var p [IPv4len]byte + for i := 0; i < IPv4len; i++ { + if len(s) == 0 { + // Missing octets. + return nil + } + if i > 0 { + if s[0] != '.' { + return nil + } + s = s[1:] + } + n, c, ok := dtoi(s) + if !ok || n > 0xFF { + return nil + } + // + // NOTE: This correct check was added for go-1.17, but is a + // backwards-incompatible change for Terraform users, who might have + // already written modules with leading zeroes. + // + // if c > 1 && s[0] == '0' { + // // Reject non-zero components with leading zeroes. + // return nil + //} + s = s[c:] + p[i] = byte(n) + } + if len(s) != 0 { + return nil + } + return IPv4(p[0], p[1], p[2], p[3]) +} + +// parseIPv6 parses s as a literal IPv6 address described in RFC 4291 +// and RFC 5952. +func parseIPv6(s string) (ip IP) { + ip = make(IP, IPv6len) + ellipsis := -1 // position of ellipsis in ip + + // Might have leading ellipsis + if len(s) >= 2 && s[0] == ':' && s[1] == ':' { + ellipsis = 0 + s = s[2:] + // Might be only ellipsis + if len(s) == 0 { + return ip + } + } + + // Loop, parsing hex numbers followed by colon. + i := 0 + for i < IPv6len { + // Hex number. + n, c, ok := xtoi(s) + if !ok || n > 0xFFFF { + return nil + } + + // If followed by dot, might be in trailing IPv4. + if c < len(s) && s[c] == '.' { + if ellipsis < 0 && i != IPv6len-IPv4len { + // Not the right place. + return nil + } + if i+IPv4len > IPv6len { + // Not enough room. + return nil + } + ip4 := parseIPv4(s) + if ip4 == nil { + return nil + } + ip[i] = ip4[12] + ip[i+1] = ip4[13] + ip[i+2] = ip4[14] + ip[i+3] = ip4[15] + s = "" + i += IPv4len + break + } + + // Save this 16-bit chunk. + ip[i] = byte(n >> 8) + ip[i+1] = byte(n) + i += 2 + + // Stop at end of string. + s = s[c:] + if len(s) == 0 { + break + } + + // Otherwise must be followed by colon and more. + if s[0] != ':' || len(s) == 1 { + return nil + } + s = s[1:] + + // Look for ellipsis. + if s[0] == ':' { + if ellipsis >= 0 { // already have one + return nil + } + ellipsis = i + s = s[1:] + if len(s) == 0 { // can be at end + break + } + } + } + + // Must have used entire string. + if len(s) != 0 { + return nil + } + + // If didn't parse enough, expand ellipsis. + if i < IPv6len { + if ellipsis < 0 { + return nil + } + n := IPv6len - i + for j := i - 1; j >= ellipsis; j-- { + ip[j+n] = ip[j] + } + for j := ellipsis + n - 1; j >= ellipsis; j-- { + ip[j] = 0 + } + } else if ellipsis >= 0 { + // Ellipsis must represent at least one 0 group. + return nil + } + return ip +} + +// ParseIP parses s as an IP address, returning the result. +// The string s can be in IPv4 dotted decimal ("192.0.2.1"), IPv6 +// ("2001:db8::68"), or IPv4-mapped IPv6 ("::ffff:192.0.2.1") form. +// If s is not a valid textual representation of an IP address, +// ParseIP returns nil. +func ParseIP(s string) IP { + for i := 0; i < len(s); i++ { + switch s[i] { + case '.': + return parseIPv4(s) + case ':': + return parseIPv6(s) + } + } + return nil +} + +// ParseCIDR parses s as a CIDR notation IP address and prefix length, +// like "192.0.2.0/24" or "2001:db8::/32", as defined in +// RFC 4632 and RFC 4291. +// +// It returns the IP address and the network implied by the IP and +// prefix length. +// For example, ParseCIDR("192.0.2.1/24") returns the IP address +// 192.0.2.1 and the network 192.0.2.0/24. +func ParseCIDR(s string) (IP, *IPNet, error) { + i := indexByteString(s, '/') + if i < 0 { + return nil, nil, &ParseError{Type: "CIDR address", Text: s} + } + addr, mask := s[:i], s[i+1:] + iplen := IPv4len + ip := parseIPv4(addr) + if ip == nil { + iplen = IPv6len + ip = parseIPv6(addr) + } + n, i, ok := dtoi(mask) + if ip == nil || !ok || i != len(mask) || n < 0 || n > 8*iplen { + return nil, nil, &ParseError{Type: "CIDR address", Text: s} + } + m := CIDRMask(n, 8*iplen) + return ip, &IPNet{IP: ip.Mask(m), Mask: m}, nil +} + +// This is copied from go/src/internal/bytealg, which includes versions +// optimized for various platforms. Those optimizations are elided here so we +// don't have to maintain them. +func indexByteString(s string, c byte) int { + for i := 0; i < len(s); i++ { + if s[i] == c { + return i + } + } + return -1 +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/marks.go b/pkg/iac/scanners/terraform/parser/funcs/marks.go index ca368c113c5c..abbc397f1e08 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/marks.go +++ b/pkg/iac/scanners/terraform/parser/funcs/marks.go @@ -37,8 +37,8 @@ func Contains(val cty.Value, mark valueMark) bool { // MarkedSensitive indicates that this value is marked as sensitive in the context of // Terraform. -var MarkedSensitive = valueMark("sensitive") +const MarkedSensitive = valueMark("sensitive") // MarkedRaw is used to indicate to the repl that the value should be written without // any formatting. -var MarkedRaw = valueMark("raw") +const MarkedRaw = valueMark("raw") diff --git a/pkg/iac/scanners/terraform/parser/funcs/number.go b/pkg/iac/scanners/terraform/parser/funcs/number.go index 012455eb7737..60ebd660bf18 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/number.go +++ b/pkg/iac/scanners/terraform/parser/funcs/number.go @@ -22,7 +22,8 @@ var LogFunc = function.New(&function.Spec{ Type: cty.Number, }, }, - Type: function.StaticReturnType(cty.Number), + Type: function.StaticReturnType(cty.Number), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { var num float64 if err := gocty.FromCtyValue(args[0], &num); err != nil { @@ -50,7 +51,8 @@ var PowFunc = function.New(&function.Spec{ Type: cty.Number, }, }, - Type: function.StaticReturnType(cty.Number), + Type: function.StaticReturnType(cty.Number), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { var num float64 if err := gocty.FromCtyValue(args[0], &num); err != nil { @@ -75,7 +77,8 @@ var SignumFunc = function.New(&function.Spec{ Type: cty.Number, }, }, - Type: function.StaticReturnType(cty.Number), + Type: function.StaticReturnType(cty.Number), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { var num int if err := gocty.FromCtyValue(args[0], &num); err != nil { @@ -96,12 +99,14 @@ var SignumFunc = function.New(&function.Spec{ var ParseIntFunc = function.New(&function.Spec{ Params: []function.Parameter{ { - Name: "number", - Type: cty.DynamicPseudoType, + Name: "number", + Type: cty.DynamicPseudoType, + AllowMarked: true, }, { - Name: "base", - Type: cty.Number, + Name: "base", + Type: cty.Number, + AllowMarked: true, }, }, @@ -111,17 +116,20 @@ var ParseIntFunc = function.New(&function.Spec{ } return cty.Number, nil }, + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { var numstr string var base int var err error - if err = gocty.FromCtyValue(args[0], &numstr); err != nil { + numArg, numMarks := args[0].Unmark() + if err = gocty.FromCtyValue(numArg, &numstr); err != nil { return cty.UnknownVal(cty.String), function.NewArgError(0, err) } - if err = gocty.FromCtyValue(args[1], &base); err != nil { + baseArg, baseMarks := args[1].Unmark() + if err = gocty.FromCtyValue(baseArg, &base); err != nil { return cty.UnknownVal(cty.Number), function.NewArgError(1, err) } @@ -136,35 +144,14 @@ var ParseIntFunc = function.New(&function.Spec{ if !ok { return cty.UnknownVal(cty.Number), function.NewArgErrorf( 0, - "cannot parse %q as a base %d integer", - numstr, - base, + "cannot parse %s as a base %s integer", + redactIfSensitive(numstr, numMarks), + redactIfSensitive(base, baseMarks), ) } - parsedNum := cty.NumberVal((&big.Float{}).SetInt(num)) + parsedNum := cty.NumberVal((&big.Float{}).SetInt(num)).WithMarks(numMarks, baseMarks) return parsedNum, nil }, }) - -// Log returns returns the logarithm of a given number in a given base. -func Log(num, base cty.Value) (cty.Value, error) { - return LogFunc.Call([]cty.Value{num, base}) -} - -// Pow returns the logarithm of a given number in a given base. -func Pow(num, power cty.Value) (cty.Value, error) { - return PowFunc.Call([]cty.Value{num, power}) -} - -// Signum determines the sign of a number, returning a number between -1 and -// 1 to represent the sign. -func Signum(num cty.Value) (cty.Value, error) { - return SignumFunc.Call([]cty.Value{num}) -} - -// ParseInt parses a string argument and returns an integer of the specified base. -func ParseInt(num, base cty.Value) (cty.Value, error) { - return ParseIntFunc.Call([]cty.Value{num, base}) -} diff --git a/pkg/iac/scanners/terraform/parser/funcs/redact.go b/pkg/iac/scanners/terraform/parser/funcs/redact.go new file mode 100644 index 000000000000..f5908fc7da57 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/redact.go @@ -0,0 +1,20 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "fmt" + + "github.com/zclconf/go-cty/cty" +) + +func redactIfSensitive(value interface{}, markses ...cty.ValueMarks) string { + if Has(cty.DynamicVal.WithMarks(markses...), MarkedSensitive) { + return "(sensitive value)" + } + switch v := value.(type) { + case string: + return fmt.Sprintf("%q", v) + default: + return fmt.Sprintf("%v", v) + } +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/refinements.go b/pkg/iac/scanners/terraform/parser/funcs/refinements.go new file mode 100644 index 000000000000..de9cb08b1604 --- /dev/null +++ b/pkg/iac/scanners/terraform/parser/funcs/refinements.go @@ -0,0 +1,10 @@ +// Copied from github.com/hashicorp/terraform/internal/lang/funcs +package funcs + +import ( + "github.com/zclconf/go-cty/cty" +) + +func refineNotNull(b *cty.RefinementBuilder) *cty.RefinementBuilder { + return b.NotNull() +} diff --git a/pkg/iac/scanners/terraform/parser/funcs/sensitive.go b/pkg/iac/scanners/terraform/parser/funcs/sensitive.go index c67ed13e6e7b..3566a678fc9b 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/sensitive.go +++ b/pkg/iac/scanners/terraform/parser/funcs/sensitive.go @@ -49,19 +49,26 @@ var NonsensitiveFunc = function.New(&function.Spec{ return args[0].Type(), nil }, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { - if args[0].IsKnown() && !args[0].HasMark(MarkedSensitive) { - return cty.DynamicVal, function.NewArgErrorf(0, "the given value is not sensitive, so this call is redundant") - } - v, m := args[0].Unmark() - delete(m, MarkedSensitive) // remove the sensitive marking - return v.WithMarks(m), nil + v, marks := args[0].Unmark() + delete(marks, MarkedSensitive) // remove the sensitive marking + return v.WithMarks(marks), nil }, }) -func Sensitive(v cty.Value) (cty.Value, error) { - return SensitiveFunc.Call([]cty.Value{v}) -} - -func Nonsensitive(v cty.Value) (cty.Value, error) { - return NonsensitiveFunc.Call([]cty.Value{v}) -} +var IssensitiveFunc = function.New(&function.Spec{ + Params: []function.Parameter{{ + Name: "value", + Type: cty.DynamicPseudoType, + AllowUnknown: true, + AllowNull: true, + AllowMarked: true, + AllowDynamicType: true, + }}, + Type: func(args []cty.Value) (cty.Type, error) { + return cty.Bool, nil + }, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + s := args[0].HasMark(MarkedSensitive) + return cty.BoolVal(s), nil + }, +}) diff --git a/pkg/iac/scanners/terraform/parser/funcs/string.go b/pkg/iac/scanners/terraform/parser/funcs/string.go index 6fe077c1f586..f859c7af7a31 100644 --- a/pkg/iac/scanners/terraform/parser/funcs/string.go +++ b/pkg/iac/scanners/terraform/parser/funcs/string.go @@ -9,6 +9,83 @@ import ( "github.com/zclconf/go-cty/cty/function" ) +// StartsWithFunc constructs a function that checks if a string starts with +// a specific prefix using strings.HasPrefix +var StartsWithFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + AllowUnknown: true, + }, + { + Name: "prefix", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.Bool), + RefineResult: refineNotNull, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + prefix := args[1].AsString() + + if !args[0].IsKnown() { + // If the unknown value has a known prefix then we might be + // able to still produce a known result. + if prefix == "" { + // The empty string is a prefix of any string. + return cty.True, nil + } + if knownPrefix := args[0].Range().StringPrefix(); knownPrefix != "" { + if strings.HasPrefix(knownPrefix, prefix) { + return cty.True, nil + } + if len(knownPrefix) >= len(prefix) { + // If the prefix we're testing is no longer than the known + // prefix and it didn't match then the full string with + // that same prefix can't match either. + return cty.False, nil + } + } + return cty.UnknownVal(cty.Bool), nil + } + + str := args[0].AsString() + + if strings.HasPrefix(str, prefix) { + return cty.True, nil + } + + return cty.False, nil + }, +}) + +// EndsWithFunc constructs a function that checks if a string ends with +// a specific suffix using strings.HasSuffix +var EndsWithFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + { + Name: "suffix", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.Bool), + RefineResult: refineNotNull, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + str := args[0].AsString() + suffix := args[1].AsString() + + if strings.HasSuffix(str, suffix) { + return cty.True, nil + } + + return cty.False, nil + }, +}) + // ReplaceFunc constructs a function that searches a given string for another // given substring, and replaces each occurrence with a given replacement string. var ReplaceFunc = function.New(&function.Spec{ @@ -26,7 +103,8 @@ var ReplaceFunc = function.New(&function.Spec{ Type: cty.String, }, }, - Type: function.StaticReturnType(cty.String), + Type: function.StaticReturnType(cty.String), + RefineResult: refineNotNull, Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { str := args[0].AsString() substr := args[1].AsString() @@ -43,12 +121,32 @@ var ReplaceFunc = function.New(&function.Spec{ return cty.StringVal(re.ReplaceAllString(str, replace)), nil } - return cty.StringVal(strings.ReplaceAll(str, substr, replace)), nil + return cty.StringVal(strings.Replace(str, substr, replace, -1)), nil }, }) -// Replace searches a given string for another given substring, -// and replaces all occurrences with a given replacement string. -func Replace(str, substr, replace cty.Value) (cty.Value, error) { - return ReplaceFunc.Call([]cty.Value{str, substr, replace}) -} +// StrContainsFunc searches a given string for another given substring, +// if found the function returns true, otherwise returns false. +var StrContainsFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "str", + Type: cty.String, + }, + { + Name: "substr", + Type: cty.String, + }, + }, + Type: function.StaticReturnType(cty.Bool), + Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) { + str := args[0].AsString() + substr := args[1].AsString() + + if strings.Contains(str, substr) { + return cty.True, nil + } + + return cty.False, nil + }, +}) diff --git a/pkg/iac/scanners/terraform/parser/functions.go b/pkg/iac/scanners/terraform/parser/functions.go index 39b6c268b345..6f6406d8ed19 100644 --- a/pkg/iac/scanners/terraform/parser/functions.go +++ b/pkg/iac/scanners/terraform/parser/functions.go @@ -9,7 +9,7 @@ import ( "github.com/zclconf/go-cty/cty/function" "github.com/zclconf/go-cty/cty/function/stdlib" - funcs2 "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/funcs" + "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser/funcs" ) // Functions returns the set of functions that should be used to when evaluating @@ -17,104 +17,117 @@ import ( func Functions(target fs.FS, baseDir string) map[string]function.Function { return map[string]function.Function{ "abs": stdlib.AbsoluteFunc, - "abspath": funcs2.AbsPathFunc, - "basename": funcs2.BasenameFunc, - "base64decode": funcs2.Base64DecodeFunc, - "base64encode": funcs2.Base64EncodeFunc, - "base64gzip": funcs2.Base64GzipFunc, - "base64sha256": funcs2.Base64Sha256Func, - "base64sha512": funcs2.Base64Sha512Func, - "bcrypt": funcs2.BcryptFunc, + "abspath": funcs.AbsPathFunc, + "alltrue": funcs.AllTrueFunc, + "anytrue": funcs.AnyTrueFunc, + "basename": funcs.BasenameFunc, + "base64decode": funcs.Base64DecodeFunc, + "base64encode": funcs.Base64EncodeFunc, + "base64gzip": funcs.Base64GzipFunc, + "base64sha256": funcs.Base64Sha256Func, + "base64sha512": funcs.Base64Sha512Func, + "bcrypt": funcs.BcryptFunc, "can": tryfunc.CanFunc, "ceil": stdlib.CeilFunc, "chomp": stdlib.ChompFunc, - "cidrhost": funcs2.CidrHostFunc, - "cidrnetmask": funcs2.CidrNetmaskFunc, - "cidrsubnet": funcs2.CidrSubnetFunc, - "cidrsubnets": funcs2.CidrSubnetsFunc, - "coalesce": funcs2.CoalesceFunc, + "cidrhost": funcs.CidrHostFunc, + "cidrnetmask": funcs.CidrNetmaskFunc, + "cidrsubnet": funcs.CidrSubnetFunc, + "cidrsubnets": funcs.CidrSubnetsFunc, + "coalesce": funcs.CoalesceFunc, "coalescelist": stdlib.CoalesceListFunc, "compact": stdlib.CompactFunc, "concat": stdlib.ConcatFunc, "contains": stdlib.ContainsFunc, "csvdecode": stdlib.CSVDecodeFunc, - "dirname": funcs2.DirnameFunc, + "dirname": funcs.DirnameFunc, "distinct": stdlib.DistinctFunc, "element": stdlib.ElementFunc, + "endswith": funcs.EndsWithFunc, "chunklist": stdlib.ChunklistFunc, - "file": funcs2.MakeFileFunc(target, baseDir, false), - "fileexists": funcs2.MakeFileExistsFunc(baseDir), - "fileset": funcs2.MakeFileSetFunc(baseDir), - "filebase64": funcs2.MakeFileFunc(target, baseDir, true), - "filebase64sha256": funcs2.MakeFileBase64Sha256Func(target, baseDir), - "filebase64sha512": funcs2.MakeFileBase64Sha512Func(target, baseDir), - "filemd5": funcs2.MakeFileMd5Func(target, baseDir), - "filesha1": funcs2.MakeFileSha1Func(target, baseDir), - "filesha256": funcs2.MakeFileSha256Func(target, baseDir), - "filesha512": funcs2.MakeFileSha512Func(target, baseDir), + "file": funcs.MakeFileFunc(target, baseDir, false), + "fileexists": funcs.MakeFileExistsFunc(target, baseDir), + "fileset": funcs.MakeFileSetFunc(target, baseDir), + "filebase64": funcs.MakeFileFunc(target, baseDir, true), + "filebase64sha256": funcs.MakeFileBase64Sha256Func(target, baseDir), + "filebase64sha512": funcs.MakeFileBase64Sha512Func(target, baseDir), + "filemd5": funcs.MakeFileMd5Func(target, baseDir), + "filesha1": funcs.MakeFileSha1Func(target, baseDir), + "filesha256": funcs.MakeFileSha256Func(target, baseDir), + "filesha512": funcs.MakeFileSha512Func(target, baseDir), "flatten": stdlib.FlattenFunc, "floor": stdlib.FloorFunc, "format": stdlib.FormatFunc, "formatdate": stdlib.FormatDateFunc, "formatlist": stdlib.FormatListFunc, "indent": stdlib.IndentFunc, - "index": funcs2.IndexFunc, // stdlib.IndexFunc is not compatible + "index": funcs.IndexFunc, // stdlib.IndexFunc is not compatible "join": stdlib.JoinFunc, "jsondecode": stdlib.JSONDecodeFunc, "jsonencode": stdlib.JSONEncodeFunc, "keys": stdlib.KeysFunc, - "length": funcs2.LengthFunc, - "list": funcs2.ListFunc, + "length": funcs.LengthFunc, + "list": funcs.ListFunc, "log": stdlib.LogFunc, - "lookup": funcs2.LookupFunc, + "lookup": funcs.LookupFunc, "lower": stdlib.LowerFunc, - "map": funcs2.MapFunc, - "matchkeys": funcs2.MatchkeysFunc, + "map": funcs.MapFunc, + "matchkeys": funcs.MatchkeysFunc, "max": stdlib.MaxFunc, - "md5": funcs2.Md5Func, + "md5": funcs.Md5Func, "merge": stdlib.MergeFunc, "min": stdlib.MinFunc, + "one": funcs.OneFunc, "parseint": stdlib.ParseIntFunc, - "pathexpand": funcs2.PathExpandFunc, + "pathexpand": funcs.PathExpandFunc, "pow": stdlib.PowFunc, "range": stdlib.RangeFunc, "regex": stdlib.RegexFunc, "regexall": stdlib.RegexAllFunc, - "replace": funcs2.ReplaceFunc, + "replace": funcs.ReplaceFunc, "reverse": stdlib.ReverseListFunc, - "rsadecrypt": funcs2.RsaDecryptFunc, + "rsadecrypt": funcs.RsaDecryptFunc, + "sensitive": funcs.SensitiveFunc, + "nonsensitive": funcs.NonsensitiveFunc, + "issensitive": funcs.IssensitiveFunc, "setintersection": stdlib.SetIntersectionFunc, "setproduct": stdlib.SetProductFunc, "setsubtract": stdlib.SetSubtractFunc, "setunion": stdlib.SetUnionFunc, - "sha1": funcs2.Sha1Func, - "sha256": funcs2.Sha256Func, - "sha512": funcs2.Sha512Func, + "sha1": funcs.Sha1Func, + "sha256": funcs.Sha256Func, + "sha512": funcs.Sha512Func, "signum": stdlib.SignumFunc, "slice": stdlib.SliceFunc, "sort": stdlib.SortFunc, "split": stdlib.SplitFunc, + "startswith": funcs.StartsWithFunc, + "strcontains": funcs.StrContainsFunc, "strrev": stdlib.ReverseFunc, "substr": stdlib.SubstrFunc, - "timestamp": funcs2.TimestampFunc, + "sum": funcs.SumFunc, + "textdecodebase64": funcs.TextDecodeBase64Func, + "textencodebase64": funcs.TextEncodeBase64Func, + "timestamp": funcs.TimestampFunc, "timeadd": stdlib.TimeAddFunc, + "timecmp": funcs.TimeCmpFunc, "title": stdlib.TitleFunc, - "tostring": funcs2.MakeToFunc(cty.String), - "tonumber": funcs2.MakeToFunc(cty.Number), - "tobool": funcs2.MakeToFunc(cty.Bool), - "toset": funcs2.MakeToFunc(cty.Set(cty.DynamicPseudoType)), - "tolist": funcs2.MakeToFunc(cty.List(cty.DynamicPseudoType)), - "tomap": funcs2.MakeToFunc(cty.Map(cty.DynamicPseudoType)), - "transpose": funcs2.TransposeFunc, + "tostring": funcs.MakeToFunc(cty.String), + "tonumber": funcs.MakeToFunc(cty.Number), + "tobool": funcs.MakeToFunc(cty.Bool), + "toset": funcs.MakeToFunc(cty.Set(cty.DynamicPseudoType)), + "tolist": funcs.MakeToFunc(cty.List(cty.DynamicPseudoType)), + "tomap": funcs.MakeToFunc(cty.Map(cty.DynamicPseudoType)), + "transpose": funcs.TransposeFunc, "trim": stdlib.TrimFunc, "trimprefix": stdlib.TrimPrefixFunc, "trimspace": stdlib.TrimSpaceFunc, "trimsuffix": stdlib.TrimSuffixFunc, "try": tryfunc.TryFunc, "upper": stdlib.UpperFunc, - "urlencode": funcs2.URLEncodeFunc, - "uuid": funcs2.UUIDFunc, - "uuidv5": funcs2.UUIDV5Func, + "urlencode": funcs.URLEncodeFunc, + "uuid": funcs.UUIDFunc, + "uuidv5": funcs.UUIDV5Func, "values": stdlib.ValuesFunc, "yamldecode": ctyyaml.YAMLDecodeFunc, "yamlencode": ctyyaml.YAMLEncodeFunc, From 06b44738e786e455379aa4235c61bd2f34da5d38 Mon Sep 17 00:00:00 2001 From: Teppei Fukuda Date: Mon, 8 Apr 2024 14:53:00 +0400 Subject: [PATCH 54/57] chore: bump Go to 1.22 (#6075) Signed-off-by: knqyf263 Co-authored-by: Simar --- .golangci.yaml | 2 +- Dockerfile.protoc | 2 +- aqua.yaml | 2 +- go.mod | 4 +++- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.golangci.yaml b/.golangci.yaml index 7be028f1e1ce..5f959a4cc339 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -88,7 +88,7 @@ linters: - gocritic run: - go: '1.21' + go: '1.22' skip-files: - ".*_mock.go$" - ".*_test.go$" diff --git a/Dockerfile.protoc b/Dockerfile.protoc index 1ee880f8fa5c..9b21ed16a55d 100644 --- a/Dockerfile.protoc +++ b/Dockerfile.protoc @@ -1,4 +1,4 @@ -FROM --platform=linux/amd64 golang:1.21 +FROM --platform=linux/amd64 golang:1.22 # Set environment variable for protoc ENV PROTOC_ZIP=protoc-3.19.4-linux-x86_64.zip diff --git a/aqua.yaml b/aqua.yaml index 0ffe7fbe2b81..af251aeec167 100644 --- a/aqua.yaml +++ b/aqua.yaml @@ -5,6 +5,6 @@ registries: - type: standard ref: v3.157.0 # renovate: depName=aquaproj/aqua-registry packages: -- name: tinygo-org/tinygo@v0.29.0 +- name: tinygo-org/tinygo@v0.31.1 - name: WebAssembly/binaryen@version_112 - name: magefile/mage@v1.14.0 diff --git a/go.mod b/go.mod index 0e585260a019..ee06f719ac44 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,8 @@ module github.com/aquasecurity/trivy -go 1.21 +go 1.22 + +toolchain go1.22.0 require ( github.com/Azure/azure-sdk-for-go v68.0.0+incompatible From 336c47ecc3be1e69795d699534f795c1150a9b94 Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Thu, 11 Apr 2024 05:24:06 +0300 Subject: [PATCH 55/57] docs: update links to IaC schemas (#6477) --- docs/docs/scanner/misconfiguration/custom/schema.md | 11 +++++------ docs/tutorials/misconfiguration/custom-checks.md | 4 ++-- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/docs/docs/scanner/misconfiguration/custom/schema.md b/docs/docs/scanner/misconfiguration/custom/schema.md index 8791d1a22752..99527ffd9920 100644 --- a/docs/docs/scanner/misconfiguration/custom/schema.md +++ b/docs/docs/scanner/misconfiguration/custom/schema.md @@ -4,8 +4,7 @@ Policies can be defined with custom schemas that allow inputs to be verified against them. Adding a policy schema enables Trivy to show more detailed error messages when an invalid input is encountered. -In Trivy we have been able to define a schema for a [Dockerfile](https://github.com/aquasecurity/trivy-iac/blob/main/pkg/rego/schemas/dockerfile.json) -Without input schemas, a policy would be as follows: +In Trivy we have been able to define a schema for a [Dockerfile](https://github.com/aquasecurity/trivy/blob/main/pkg/iac/rego/schemas/dockerfile.json). Without input schemas, a policy would be as follows: !!! example ``` @@ -36,7 +35,7 @@ schema as such ``` Here `input: schema["dockerfile"]` points to a schema that expects a valid `Dockerfile` as input. An example of this -can be found [here](https://github.com/aquasecurity/defsec/blob/master/pkg/rego/schemas/dockerfile.json) +can be found [here](https://github.com/aquasecurity/trivy/blob/main/pkg/iac/rego/schemas/dockerfile.json). Now if this policy is evaluated against, a more descriptive error will be available to help fix the problem. @@ -50,9 +49,9 @@ Now if this policy is evaluated against, a more descriptive error will be availa Currently, out of the box the following schemas are supported natively: -1. [Docker](https://github.com/aquasecurity/trivy-iac/blob/main/pkg/rego/schemas/dockerfile.json) -2. [Kubernetes](https://github.com/aquasecurity/trivy-iac/blob/main/pkg/rego/schemas/kubernetes.json) -3. [Cloud](https://github.com/aquasecurity/trivy-iac/blob/main/pkg/rego/schemas/cloud.json) +1. [Docker](https://github.com/aquasecurity/trivy/blob/main/pkg/iac/rego/schemas/dockerfile.json) +2. [Kubernetes](https://github.com/aquasecurity/trivy/blob/main/pkg/iac/rego/schemas/kubernetes.json) +3. [Cloud](https://github.com/aquasecurity/trivy/blob/main/pkg/iac/rego/schemas/cloud.json) ## Custom Policies with Custom Schemas diff --git a/docs/tutorials/misconfiguration/custom-checks.md b/docs/tutorials/misconfiguration/custom-checks.md index ecf8f5af1b5a..f36f855185a5 100644 --- a/docs/tutorials/misconfiguration/custom-checks.md +++ b/docs/tutorials/misconfiguration/custom-checks.md @@ -8,8 +8,8 @@ When you are writing a check, it's important to understand the input to the chec Since Rego is primarily tailored to query JSON objects, all incoming configuration files needs to be first converted to structured objects, which is available to the Rego code as the input variable. This is nothing that users have to do manually in Trivy. Instead, Rego makes it possible to pass in custom Schemas that detail how files are converted. Once Rego has access to a custom Schema, it will know in which format to access configuration files such as a Dockerfile. -[Here you can find the schemas](https://github.com/aquasecurity/defsec/tree/master/pkg/rego/schemas) that define how different configuration files are converted to JSON by Trivy. -This tutorial will make use of the [dockerfile.json schema](https://github.com/aquasecurity/defsec/tree/master/pkg/rego/schemas). The schema will need to be parsed into your custom check. +[Here you can find the schemas](https://github.com/aquasecurity/trivy/tree/main/pkg/iac/rego/schemas) that define how different configuration files are converted to JSON by Trivy. +This tutorial will make use of the [dockerfile.json schema](https://github.com/aquasecurity/trivy/blob/main/pkg/iac/rego/schemas/dockerfile.json). The schema will need to be parsed into your custom check. Users can also use the [Schema Explorer](https://aquasecurity.github.io/trivy-schemas/) to view the structure of the data provided to Rego. From 94d6e8ced63232728e5b743353871404e5486e9a Mon Sep 17 00:00:00 2001 From: Teppei Fukuda Date: Thu, 11 Apr 2024 22:59:09 +0400 Subject: [PATCH 56/57] refactor: replace zap with slog (#6466) Signed-off-by: knqyf263 Co-authored-by: Nikita Pivkin Co-authored-by: simar7 <1254783+simar7@users.noreply.github.com> --- cmd/trivy/main.go | 2 +- magefiles/cloud_actions.go | 4 +- magefiles/docs.go | 2 +- pkg/attestation/sbom/rekor.go | 2 +- pkg/attestation/sbom/rekor_test.go | 2 +- pkg/cloud/aws/commands/run.go | 27 +- pkg/cloud/aws/scanner/scanner.go | 13 +- pkg/commands/app.go | 16 +- pkg/commands/artifact/run.go | 33 +- pkg/commands/convert/run.go | 2 +- pkg/commands/operation/operation.go | 26 +- pkg/commands/server/run.go | 6 +- pkg/db/db.go | 23 +- pkg/dependency/parser/c/conan/parse.go | 10 +- .../parser/dotnet/core_deps/parse.go | 10 +- pkg/dependency/parser/hex/mix/parse.go | 14 +- pkg/dependency/parser/java/jar/parse.go | 15 +- .../parser/java/jar/sonatype/log.go | 40 ++- .../parser/java/jar/sonatype/sonatype.go | 2 +- pkg/dependency/parser/java/pom/artifact.go | 2 +- pkg/dependency/parser/java/pom/parse.go | 34 +- pkg/dependency/parser/java/pom/pom.go | 5 +- pkg/dependency/parser/nodejs/npm/parse.go | 60 ++-- pkg/dependency/parser/nodejs/pnpm/parse.go | 40 ++- .../parser/nodejs/pnpm/parse_test.go | 3 +- pkg/dependency/parser/nodejs/yarn/parse.go | 18 +- pkg/dependency/parser/php/composer/parse.go | 10 +- .../parser/python/packaging/parse.go | 15 +- pkg/dependency/parser/python/poetry/parse.go | 24 +- .../parser/python/poetry/parse_test.go | 4 +- pkg/dependency/parser/rust/cargo/parse.go | 16 +- .../parser/swift/cocoapods/parse.go | 20 +- pkg/dependency/parser/swift/swift/parse.go | 12 +- pkg/detector/library/compare/compare.go | 4 +- pkg/detector/library/driver.go | 5 +- pkg/detector/ospkg/alma/alma.go | 11 +- pkg/detector/ospkg/alma/alma_test.go | 2 +- pkg/detector/ospkg/alpine/alpine.go | 23 +- pkg/detector/ospkg/alpine/alpine_test.go | 2 +- pkg/detector/ospkg/amazon/amazon.go | 15 +- pkg/detector/ospkg/amazon/amazon_test.go | 2 +- pkg/detector/ospkg/chainguard/chainguard.go | 15 +- .../ospkg/chainguard/chainguard_test.go | 2 +- pkg/detector/ospkg/debian/debian.go | 14 +- pkg/detector/ospkg/debian/debian_test.go | 2 +- pkg/detector/ospkg/detect.go | 8 +- pkg/detector/ospkg/mariner/mariner.go | 8 +- pkg/detector/ospkg/mariner/mariner_test.go | 9 +- pkg/detector/ospkg/oracle/oracle.go | 8 +- pkg/detector/ospkg/oracle/oracle_test.go | 2 +- pkg/detector/ospkg/photon/photon.go | 7 +- pkg/detector/ospkg/photon/photon_test.go | 2 +- pkg/detector/ospkg/redhat/redhat.go | 10 +- pkg/detector/ospkg/redhat/redhat_test.go | 2 +- pkg/detector/ospkg/rocky/rocky.go | 11 +- pkg/detector/ospkg/rocky/rocky_test.go | 2 +- pkg/detector/ospkg/suse/suse.go | 7 +- pkg/detector/ospkg/suse/suse_test.go | 2 +- pkg/detector/ospkg/ubuntu/ubuntu.go | 12 +- pkg/detector/ospkg/ubuntu/ubuntu_test.go | 2 +- pkg/detector/ospkg/version/version.go | 3 +- pkg/detector/ospkg/wolfi/wolfi.go | 16 +- pkg/detector/ospkg/wolfi/wolfi_test.go | 2 +- pkg/fanal/analyzer/analyzer.go | 12 +- pkg/fanal/analyzer/config_analyzer.go | 2 +- pkg/fanal/analyzer/imgconf/secret/secret.go | 2 +- pkg/fanal/analyzer/language/analyze.go | 2 +- .../analyzer/language/dart/pub/pubspec.go | 13 +- .../analyzer/language/dotnet/nuget/nuspec.go | 6 +- pkg/fanal/analyzer/language/golang/mod/mod.go | 11 +- .../analyzer/language/java/gradle/lockfile.go | 6 +- .../analyzer/language/java/gradle/pom.go | 12 +- .../language/nodejs/license/license.go | 8 +- pkg/fanal/analyzer/language/nodejs/npm/npm.go | 7 +- .../analyzer/language/nodejs/npm/npm_test.go | 2 +- .../analyzer/language/nodejs/yarn/yarn.go | 9 +- .../language/php/composer/composer.go | 5 +- .../language/python/packaging/packaging.go | 4 +- .../analyzer/language/python/poetry/poetry.go | 7 +- .../analyzer/language/rust/cargo/cargo.go | 11 +- pkg/fanal/analyzer/licensing/license.go | 57 +++- pkg/fanal/analyzer/licensing/license_test.go | 2 +- pkg/fanal/analyzer/pkg/apk/apk.go | 22 +- pkg/fanal/analyzer/pkg/apk/apk_test.go | 37 ++- pkg/fanal/analyzer/pkg/dpkg/dpkg.go | 23 +- pkg/fanal/analyzer/pkg/rpm/rpm.go | 21 +- pkg/fanal/analyzer/pkg/rpm/rpm_test.go | 33 +- pkg/fanal/analyzer/secret/secret_test.go | 14 +- pkg/fanal/applier/docker.go | 2 +- pkg/fanal/artifact/image/image.go | 16 +- pkg/fanal/artifact/image/remote_sbom.go | 12 +- pkg/fanal/artifact/image/remote_sbom_test.go | 2 +- pkg/fanal/artifact/local/fs.go | 6 +- pkg/fanal/artifact/sbom/sbom.go | 2 +- pkg/fanal/artifact/vm/ami.go | 2 +- pkg/fanal/artifact/vm/ebs.go | 6 +- pkg/fanal/artifact/vm/file.go | 5 +- pkg/fanal/handler/unpackaged/unpackaged.go | 4 +- .../handler/unpackaged/unpackaged_test.go | 2 +- pkg/fanal/image/daemon/image.go | 2 +- pkg/fanal/image/image.go | 2 +- pkg/fanal/image/registry/token.go | 4 +- pkg/fanal/log/log.go | 17 - pkg/fanal/secret/scanner.go | 51 ++- pkg/fanal/secret/scanner_test.go | 48 ++- pkg/fanal/walker/fs.go | 4 +- pkg/fanal/walker/vm.go | 8 +- pkg/fanal/walker/walk.go | 4 +- pkg/flag/db_flags.go | 11 +- pkg/flag/db_flags_test.go | 17 +- pkg/flag/options.go | 12 +- pkg/flag/options_test.go | 27 ++ pkg/flag/remote_flags.go | 8 +- pkg/flag/remote_flags_test.go | 17 +- pkg/flag/report_flags.go | 20 +- pkg/flag/report_flags_test.go | 24 +- pkg/flag/sbom_flags.go | 2 +- pkg/flag/scan_flags.go | 4 +- pkg/flag/vulnerability_flags.go | 4 +- pkg/flag/vulnerability_flags_test.go | 20 +- pkg/iac/ignore/parse.go | 11 +- pkg/javadb/client.go | 8 +- pkg/k8s/commands/cluster.go | 11 +- pkg/k8s/commands/namespace.go | 9 +- pkg/k8s/commands/resource.go | 6 +- pkg/k8s/commands/run.go | 15 +- pkg/k8s/report/report.go | 3 +- pkg/k8s/scanner/io.go | 4 +- pkg/k8s/scanner/scanner.go | 18 +- pkg/licensing/classifier.go | 2 +- pkg/log/context.go | 47 +++ pkg/log/handler.go | 304 ++++++++++++++++++ pkg/log/handler_test.go | 252 +++++++++++++++ pkg/log/logger.go | 162 ++++------ pkg/misconf/scanner.go | 6 +- pkg/module/command.go | 6 +- pkg/module/module.go | 32 +- pkg/parallel/walk.go | 5 +- pkg/plugin/plugin.go | 21 +- pkg/policy/policy.go | 10 +- pkg/rekor/client.go | 12 +- pkg/remote/remote.go | 4 +- pkg/report/table/vulnerability.go | 4 +- pkg/report/template.go | 2 +- pkg/report/writer.go | 2 +- pkg/result/ignore.go | 8 +- pkg/rpc/client/headers.go | 2 +- pkg/rpc/convert.go | 12 +- pkg/rpc/retry.go | 4 +- pkg/rpc/server/listen.go | 18 +- pkg/rpc/server/server.go | 2 +- pkg/sbom/cyclonedx/marshal.go | 4 +- pkg/sbom/cyclonedx/unmarshal.go | 15 +- pkg/sbom/io/decode.go | 13 +- pkg/sbom/spdx/marshal.go | 2 +- pkg/scanner/langpkg/scan.go | 8 +- pkg/scanner/local/scan.go | 22 +- pkg/scanner/ospkg/scan.go | 5 +- pkg/scanner/scan.go | 8 +- pkg/utils/fsutils/fs.go | 3 +- pkg/version/version.go | 8 +- pkg/vex/csaf.go | 21 +- pkg/vex/cyclonedx.go | 13 +- pkg/vulnerability/vulnerability.go | 2 +- 164 files changed, 1657 insertions(+), 884 deletions(-) delete mode 100644 pkg/fanal/log/log.go create mode 100644 pkg/log/context.go create mode 100644 pkg/log/handler.go create mode 100644 pkg/log/handler_test.go diff --git a/cmd/trivy/main.go b/cmd/trivy/main.go index e2c545975315..e3118ae8e97f 100644 --- a/cmd/trivy/main.go +++ b/cmd/trivy/main.go @@ -15,7 +15,7 @@ import ( func main() { if err := run(); err != nil { - log.Fatal(err) + log.Fatal("Fatal error", log.Err(err)) } } diff --git a/magefiles/cloud_actions.go b/magefiles/cloud_actions.go index 045586c8837a..195bdde349e8 100644 --- a/magefiles/cloud_actions.go +++ b/magefiles/cloud_actions.go @@ -206,10 +206,10 @@ func main() { // GenAllowedActions generates the list of valid actions for wildcard support func GenAllowedActions() error { - log.Logger.Info("Start parsing actions") + log.Info("Start parsing actions") startTime := time.Now() defer func() { - log.Logger.Infof("Parsing is completed. Duration %fs\n", time.Since(startTime).Seconds()) + log.Info("Parsing is completed", log.Duration(time.Since(startTime).Seconds())) }() doc, err := htmlquery.LoadURL(serviceActionReferencesURL) diff --git a/magefiles/docs.go b/magefiles/docs.go index b69e813690af..1a59007de229 100644 --- a/magefiles/docs.go +++ b/magefiles/docs.go @@ -24,6 +24,6 @@ func main() { cmd := commands.NewApp() cmd.DisableAutoGenTag = true if err := doc.GenMarkdownTree(cmd, "./docs/docs/references/configuration/cli"); err != nil { - log.Fatal(err) + log.Fatal("Fatal error", log.Err(err)) } } diff --git a/pkg/attestation/sbom/rekor.go b/pkg/attestation/sbom/rekor.go index 7336def6b511..a5872152c8f5 100644 --- a/pkg/attestation/sbom/rekor.go +++ b/pkg/attestation/sbom/rekor.go @@ -39,7 +39,7 @@ func (r *Rekor) RetrieveSBOM(ctx context.Context, digest string) ([]byte, error) return nil, ErrNoSBOMAttestation } - log.Logger.Debugf("Found matching Rekor entries: %s", entryIDs) + log.Debug("Found matching Rekor entries", log.Any("entry_ids", entryIDs)) for _, ids := range lo.Chunk[rekor.EntryID](entryIDs, rekor.MaxGetEntriesLimit) { entries, err := r.client.GetEntries(ctx, ids) diff --git a/pkg/attestation/sbom/rekor_test.go b/pkg/attestation/sbom/rekor_test.go index db97d781568c..a5aee5a71707 100644 --- a/pkg/attestation/sbom/rekor_test.go +++ b/pkg/attestation/sbom/rekor_test.go @@ -31,7 +31,7 @@ func TestRekor_RetrieveSBOM(t *testing.T) { }, } - require.NoError(t, log.InitLogger(false, true)) + log.InitLogger(false, true) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ts := rekortest.NewServer(t) diff --git a/pkg/cloud/aws/commands/run.go b/pkg/cloud/aws/commands/run.go index 23406aeafda5..58744e752c79 100644 --- a/pkg/cloud/aws/commands/run.go +++ b/pkg/cloud/aws/commands/run.go @@ -23,7 +23,7 @@ import ( var allSupportedServicesFunc = awsScanner.AllSupportedServices func getAccountIDAndRegion(ctx context.Context, region, endpoint string) (string, string, error) { - log.Logger.Debug("Looking for AWS credentials provider...") + log.DebugContext(ctx, "Looking for AWS credentials provider...") cfg, err := config.LoadDefaultAWSConfig(ctx, region, endpoint) if err != nil { @@ -32,7 +32,7 @@ func getAccountIDAndRegion(ctx context.Context, region, endpoint string) (string svc := sts.NewFromConfig(cfg) - log.Logger.Debug("Looking up AWS caller identity...") + log.DebugContext(ctx, "Looking up AWS caller identity...") result, err := svc.GetCallerIdentity(ctx, &sts.GetCallerIdentityInput{}) if err != nil { return "", "", xerrors.Errorf("failed to discover AWS caller identity: %w", err) @@ -40,7 +40,7 @@ func getAccountIDAndRegion(ctx context.Context, region, endpoint string) (string if result.Account == nil { return "", "", xerrors.Errorf("missing account id for aws account") } - log.Logger.Debugf("Verified AWS credentials for account %s!", *result.Account) + log.DebugContext(ctx, "Verified AWS credentials for account!", log.String("account", *result.Account)) return *result.Account, cfg.Region, nil } @@ -85,22 +85,22 @@ func processOptions(ctx context.Context, opt *flag.Options) error { } } - err := filterServices(opt) + err := filterServices(ctx, opt) if err != nil { return err } - log.Logger.Debug("scanning services: ", opt.Services) + log.DebugContext(ctx, "Scanning services", log.Any("services", opt.Services)) return nil } -func filterServices(opt *flag.Options) error { +func filterServices(ctx context.Context, opt *flag.Options) error { switch { case len(opt.Services) == 0 && len(opt.SkipServices) == 0: - log.Logger.Debug("No service(s) specified, scanning all services...") + log.DebugContext(ctx, "No service(s) specified, scanning all services...") opt.Services = allSupportedServicesFunc() case len(opt.SkipServices) > 0: - log.Logger.Debug("excluding services: ", opt.SkipServices) + log.DebugContext(ctx, "Excluding services", log.Any("services", opt.SkipServices)) for _, s := range allSupportedServicesFunc() { if slices.Contains(opt.SkipServices, s) { continue @@ -110,7 +110,8 @@ func filterServices(opt *flag.Options) error { } } case len(opt.Services) > 0: - log.Logger.Debugf("Specific services were requested: [%s]...", strings.Join(opt.Services, ", ")) + log.DebugContext(ctx, "Specific services were requested...", + log.String("services", strings.Join(opt.Services, ", "))) for _, service := range opt.Services { var found bool supported := allSupportedServicesFunc() @@ -132,10 +133,12 @@ func Run(ctx context.Context, opt flag.Options) error { ctx, cancel := context.WithTimeout(ctx, opt.GlobalOptions.Timeout) defer cancel() + ctx = log.WithContextPrefix(ctx, "aws") + var err error defer func() { if errors.Is(err, context.DeadlineExceeded) { - log.Logger.Warn("Increase --timeout value") + log.Warn("Increase --timeout value") } }() @@ -148,14 +151,14 @@ func Run(ctx context.Context, opt flag.Options) error { var aerr errs.AdapterError if errors.As(err, &aerr) { for _, e := range aerr.Errors() { - log.Logger.Warnf("Adapter error: %s", e) + log.WarnContext(ctx, "Adapter error", log.Err(e)) } } else { return xerrors.Errorf("aws scan error: %w", err) } } - log.Logger.Debug("Writing report to output...") + log.DebugContext(ctx, "Writing report to output...") res := results.GetFailed() if opt.MisconfOptions.IncludeNonFailures { diff --git a/pkg/cloud/aws/scanner/scanner.go b/pkg/cloud/aws/scanner/scanner.go index 84b5cf6c640e..d1efe2b78d4a 100644 --- a/pkg/cloud/aws/scanner/scanner.go +++ b/pkg/cloud/aws/scanner/scanner.go @@ -20,10 +20,13 @@ import ( ) type AWSScanner struct { + logger *log.Logger } func NewScanner() *AWSScanner { - return &AWSScanner{} + return &AWSScanner{ + logger: log.WithPrefix("aws"), + } } func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Results, bool, error) { @@ -31,7 +34,7 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result awsCache := cache.New(option.CacheDir, option.MaxCacheAge, option.Account, option.Region) included, missing := awsCache.ListServices(option.Services) - prefixedLogger := &log.PrefixedLogger{Name: "aws"} + prefixedLogger := log.NewWriteLogger(log.WithPrefix("aws")) var scannerOpts []options.ScannerOption if !option.NoProgress { @@ -72,10 +75,10 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result downloadedPolicyPaths, err = operation.InitBuiltinPolicies(context.Background(), option.CacheDir, option.Quiet, option.SkipPolicyUpdate, option.MisconfOptions.PolicyBundleRepository, option.RegistryOpts()) if err != nil { if !option.SkipPolicyUpdate { - log.Logger.Errorf("Falling back to embedded policies: %s", err) + s.logger.Error("Falling back to embedded policies", log.Err(err)) } } else { - log.Logger.Debug("Policies successfully loaded from disk") + s.logger.Debug("Policies successfully loaded from disk") policyPaths = append(policyPaths, downloadedPolicyPaths...) scannerOpts = append(scannerOpts, options.ScannerWithEmbeddedPolicies(false), @@ -95,7 +98,7 @@ func (s *AWSScanner) Scan(ctx context.Context, option flag.Options) (scan.Result dataFS, dataPaths, err := misconf.CreateDataFS(option.RegoOptions.DataPaths) if err != nil { - log.Logger.Errorf("Could not load config data: %s", err) + s.logger.Error("Could not load config data", err) } scannerOpts = append(scannerOpts, options.ScannerWithDataDirs(dataPaths...), diff --git a/pkg/commands/app.go b/pkg/commands/app.go index 41d1d2ff645d..902b92b78087 100644 --- a/pkg/commands/app.go +++ b/pkg/commands/app.go @@ -114,7 +114,7 @@ func loadPluginCommands() []*cobra.Command { var commands []*cobra.Command plugins, err := plugin.LoadAll() if err != nil { - log.Logger.Debugf("no plugins were loaded") + log.Debug("No plugins loaded") return nil } for _, p := range plugins { @@ -142,12 +142,12 @@ func initConfig(configFile string) error { viper.SetConfigType("yaml") if err := viper.ReadInConfig(); err != nil { if errors.Is(err, os.ErrNotExist) { - log.Logger.Debugf("config file %q not found", configFile) + log.Debug("Config file not found", log.String("file_path", configFile)) return nil } return xerrors.Errorf("config file %q loading error: %s", configFile, err) } - log.Logger.Infof("Loaded %s", configFile) + log.Info("Loaded", log.String("file_path", configFile)) return nil } @@ -196,9 +196,7 @@ func NewRootCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { } // Initialize logger - if err := log.InitLogger(globalOptions.Debug, globalOptions.Quiet); err != nil { - return err - } + log.InitLogger(globalOptions.Debug, globalOptions.Quiet) return nil }, @@ -570,7 +568,7 @@ func NewClientCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { return validateArgs(cmd, args) }, RunE: func(cmd *cobra.Command, args []string) error { - log.Logger.Warn("'client' subcommand is deprecated now. See https://github.com/aquasecurity/trivy/discussions/2119") + log.Warn("'client' subcommand is deprecated now. See https://github.com/aquasecurity/trivy/discussions/2119") if err := clientFlags.Bind(cmd); err != nil { return xerrors.Errorf("flag bind error: %w", err) @@ -1040,7 +1038,7 @@ The following services are supported: } if opts.Timeout < time.Hour { opts.Timeout = time.Hour - log.Logger.Debug("Timeout is set to less than 1 hour - upgrading to 1 hour for this command.") + log.Info("Timeout is set to less than 1 hour - upgrading to 1 hour for this command.") } return awscommands.Run(cmd.Context(), opts) }, @@ -1106,7 +1104,7 @@ func NewVMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command { } if options.Timeout < time.Minute*30 { options.Timeout = time.Minute * 30 - log.Logger.Debug("Timeout is set to less than 30 min - upgrading to 30 min for this command.") + log.Info("Timeout is set to less than 30 min - upgrading to 30 min for this command.") } return artifact.Run(cmd.Context(), options, artifact.TargetVM) }, diff --git a/pkg/commands/artifact/run.go b/pkg/commands/artifact/run.go index c54f0fe2fe75..b156ce44754a 100644 --- a/pkg/commands/artifact/run.go +++ b/pkg/commands/artifact/run.go @@ -360,7 +360,7 @@ func (r *runner) initCache(opts flag.Options) error { if err != nil { return xerrors.Errorf("unable to initialize the cache: %w", err) } - log.Logger.Debugf("cache dir: %s", fsutils.CacheDir()) + log.Debug("Cache dir", log.String("dir", fsutils.CacheDir())) if opts.Reset { defer cacheClient.Close() @@ -400,12 +400,12 @@ func Run(ctx context.Context, opts flag.Options, targetKind TargetKind) (err err defer func() { if errors.Is(err, context.DeadlineExceeded) { - log.Logger.Warn("Increase --timeout value") + log.Warn("Increase --timeout value") } }() if opts.GenerateDefaultConfig { - log.Logger.Info("Writing the default config to trivy-default.yaml...") + log.Info("Writing the default config to trivy-default.yaml...") return viper.SafeWriteConfigAs("trivy-default.yaml") } @@ -484,7 +484,8 @@ func disabledAnalyzers(opts flag.Options) []analyzer.Type { // Filter only enabled misconfiguration scanners ma, err := filterMisconfigAnalyzers(opts.MisconfigScanners, analyzer.TypeConfigFiles) if err != nil { - log.Logger.Errorf("Invalid misconfig scanners specified: %s defaulting to use all misconfig scanners", opts.MisconfigScanners) + log.Error("Invalid misconfiguration scanners specified, defaulting to use all misconfig scanners", + log.Any("scanners", opts.MisconfigScanners)) } else { analyzers = append(analyzers, ma...) } @@ -528,7 +529,7 @@ func filterMisconfigAnalyzers(included, all []analyzer.Type) ([]analyzer.Type, e return nil, xerrors.Errorf("invalid misconfiguration scanner specified %s valid scanners: %s", missing, all) } - log.Logger.Debugf("Enabling misconfiguration scanners: %s", included) + log.Debug("Enabling misconfiguration scanners", log.Any("scanners", included)) return lo.Without(all, included...), nil } @@ -569,28 +570,28 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi } if len(opts.ImageConfigScanners) != 0 { - log.Logger.Infof("Container image config scanners: %q", opts.ImageConfigScanners) + log.Info("Container image config scanners", log.Any("scanners", opts.ImageConfigScanners)) } if opts.Scanners.Enabled(types.VulnerabilityScanner) { - log.Logger.Info("Vulnerability scanning is enabled") - log.Logger.Debugf("Vulnerability type: %s", scanOptions.VulnType) + log.Info("Vulnerability scanning is enabled") + log.Debug("Vulnerability type", log.Any("type", scanOptions.VulnType)) } // ScannerOption is filled only when config scanning is enabled. var configScannerOptions misconf.ScannerOption if opts.Scanners.Enabled(types.MisconfigScanner) || opts.ImageConfigScanners.Enabled(types.MisconfigScanner) { - log.Logger.Info("Misconfiguration scanning is enabled") + log.Info("Misconfiguration scanning is enabled") var downloadedPolicyPaths []string var disableEmbedded bool downloadedPolicyPaths, err := operation.InitBuiltinPolicies(context.Background(), opts.CacheDir, opts.Quiet, opts.SkipPolicyUpdate, opts.MisconfOptions.PolicyBundleRepository, opts.RegistryOpts()) if err != nil { if !opts.SkipPolicyUpdate { - log.Logger.Errorf("Falling back to embedded policies: %s", err) + log.Error("Falling back to embedded policies", log.Err(err)) } } else { - log.Logger.Debug("Policies successfully loaded from disk") + log.Debug("Policies successfully loaded from disk") disableEmbedded = true } configScannerOptions = misconf.ScannerOption{ @@ -617,18 +618,18 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi // Do not load config file for secret scanning if opts.Scanners.Enabled(types.SecretScanner) { ver := canonicalVersion(opts.AppVersion) - log.Logger.Info("Secret scanning is enabled") - log.Logger.Info("If your scanning is slow, please try '--scanners vuln' to disable secret scanning") - log.Logger.Infof("Please see also https://aquasecurity.github.io/trivy/%s/docs/scanner/secret/#recommendation for faster secret detection", ver) + log.Info("Secret scanning is enabled") + log.Info("If your scanning is slow, please try '--scanners vuln' to disable secret scanning") + log.Infof("Please see also https://aquasecurity.github.io/trivy/%s/docs/scanner/secret/#recommendation for faster secret detection", ver) } else { opts.SecretConfigPath = "" } if opts.Scanners.Enabled(types.LicenseScanner) { if opts.LicenseFull { - log.Logger.Info("Full license scanning is enabled") + log.Info("Full license scanning is enabled") } else { - log.Logger.Info("License scanning is enabled") + log.Info("License scanning is enabled") } } diff --git a/pkg/commands/convert/run.go b/pkg/commands/convert/run.go index 9045e54bfa3d..34e799f7a061 100644 --- a/pkg/commands/convert/run.go +++ b/pkg/commands/convert/run.go @@ -39,7 +39,7 @@ func Run(ctx context.Context, opts flag.Options) (err error) { return xerrors.Errorf("unable to filter results: %w", err) } - log.Logger.Debug("Writing report to output...") + log.Debug("Writing report to output...") if err = report.Write(ctx, r, opts); err != nil { return xerrors.Errorf("unable to write results: %w", err) } diff --git a/pkg/commands/operation/operation.go b/pkg/commands/operation/operation.go index 8f8561a7c290..2b4e2a7f5ffa 100644 --- a/pkg/commands/operation/operation.go +++ b/pkg/commands/operation/operation.go @@ -42,7 +42,7 @@ type Cache struct { // NewCache is the factory method for Cache func NewCache(c flag.CacheOptions) (Cache, error) { if strings.HasPrefix(c.CacheBackend, "redis://") { - log.Logger.Infof("Redis cache: %s", c.CacheBackendMasked()) + log.Info("Redis cache", log.String("url", c.CacheBackendMasked())) options, err := redis.ParseURL(c.CacheBackend) if err != nil { return Cache{}, err @@ -70,7 +70,7 @@ func NewCache(c flag.CacheOptions) (Cache, error) { } if c.CacheTTL != 0 { - log.Logger.Warn("'--cache-ttl' is only available with Redis cache backend") + log.Warn("'--cache-ttl' is only available with Redis cache backend") } // standalone mode @@ -94,7 +94,7 @@ func (c Cache) Reset() (err error) { // ClearDB clears the DB cache func (c Cache) ClearDB() (err error) { - log.Logger.Info("Removing DB file...") + log.Info("Removing DB file...") if err = os.RemoveAll(fsutils.CacheDir()); err != nil { return xerrors.Errorf("failed to remove the directory (%s) : %w", fsutils.CacheDir(), err) } @@ -103,7 +103,7 @@ func (c Cache) ClearDB() (err error) { // ClearArtifacts clears the artifact cache func (c Cache) ClearArtifacts() error { - log.Logger.Info("Removing artifact caches...") + log.Info("Removing artifact caches...") if err := c.Clear(); err != nil { return xerrors.Errorf("failed to remove the cache: %w", err) } @@ -123,9 +123,8 @@ func DownloadDB(ctx context.Context, appVersion, cacheDir string, dbRepository n } if needsUpdate { - log.Logger.Info("Need to update DB") - log.Logger.Infof("DB Repository: %s", dbRepository) - log.Logger.Info("Downloading DB...") + log.Info("Need to update DB") + log.Info("Downloading DB...", log.String("repository", dbRepository.String())) if err = client.Download(ctx, cacheDir, opt); err != nil { return xerrors.Errorf("failed to download vulnerability DB: %w", err) } @@ -144,8 +143,8 @@ func showDBInfo(cacheDir string) error { if err != nil { return xerrors.Errorf("something wrong with DB: %w", err) } - log.Logger.Debugf("DB Schema: %d, UpdatedAt: %s, NextUpdate: %s, DownloadedAt: %s", - meta.Version, meta.UpdatedAt, meta.NextUpdate, meta.DownloadedAt) + log.Debug("DB info", log.Int("schema", meta.Version), log.Time("updated_at", meta.UpdatedAt), + log.Time("next_update", meta.NextUpdate), log.Time("downloaded_at", meta.DownloadedAt)) return nil } @@ -168,8 +167,8 @@ func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate } if needsUpdate { - log.Logger.Info("Need to update the built-in policies") - log.Logger.Info("Downloading the built-in policies...") + log.Info("Need to update the built-in policies") + log.Info("Downloading the built-in policies...") if err = client.DownloadBuiltinPolicies(ctx, registryOpts); err != nil { return nil, xerrors.Errorf("failed to download built-in policies: %w", err) } @@ -179,7 +178,7 @@ func InitBuiltinPolicies(ctx context.Context, cacheDir string, quiet, skipUpdate if err != nil { if skipUpdate { msg := "No downloadable policies were loaded as --skip-policy-update is enabled" - log.Logger.Info(msg) + log.Info(msg) return nil, xerrors.Errorf(msg) } return nil, xerrors.Errorf("policy load error: %w", err) @@ -213,7 +212,8 @@ func Exit(opts flag.Options, failedResults bool) { func ExitOnEOL(opts flag.Options, m types.Metadata) { if opts.ExitOnEOL != 0 && m.OS != nil && m.OS.Eosl { - log.Logger.Errorf("Detected EOL OS: %s %s", m.OS.Family, m.OS.Name) + log.Error("Detected EOL OS", log.String("family", string(m.OS.Family)), + log.String("version", m.OS.Name)) os.Exit(opts.ExitOnEOL) } } diff --git a/pkg/commands/server/run.go b/pkg/commands/server/run.go index 03b8f144170a..70788db6a6f3 100644 --- a/pkg/commands/server/run.go +++ b/pkg/commands/server/run.go @@ -16,9 +16,7 @@ import ( // Run runs the scan func Run(ctx context.Context, opts flag.Options) (err error) { - if err = log.InitLogger(opts.Debug, opts.Quiet); err != nil { - return xerrors.Errorf("failed to initialize a logger: %w", err) - } + log.InitLogger(opts.Debug, opts.Quiet) // configure cache dir fsutils.SetCacheDir(opts.CacheDir) @@ -27,7 +25,7 @@ func Run(ctx context.Context, opts flag.Options) (err error) { return xerrors.Errorf("server cache error: %w", err) } defer cache.Close() - log.Logger.Debugf("cache dir: %s", fsutils.CacheDir()) + log.Debug("Cache", log.String("dir", fsutils.CacheDir())) if opts.Reset { return cache.ClearDB() diff --git a/pkg/db/db.go b/pkg/db/db.go index 9ecb281b064e..5ac539f203e8 100644 --- a/pkg/db/db.go +++ b/pkg/db/db.go @@ -96,22 +96,22 @@ func NewClient(cacheDir string, quiet bool, opts ...Option) *Client { func (c *Client) NeedsUpdate(cliVersion string, skip bool) (bool, error) { meta, err := c.metadata.Get() if err != nil { - log.Logger.Debugf("There is no valid metadata file: %s", err) + log.Debug("There is no valid metadata file", log.Err(err)) if skip { - log.Logger.Error("The first run cannot skip downloading DB") + log.Error("The first run cannot skip downloading DB") return false, xerrors.New("--skip-update cannot be specified on the first run") } meta = metadata.Metadata{Version: db.SchemaVersion} } if db.SchemaVersion < meta.Version { - log.Logger.Errorf("Trivy version (%s) is old. Update to the latest version.", cliVersion) + log.Error("The Trivy version is old. Update to the latest version.", log.String("version", cliVersion)) return false, xerrors.Errorf("the version of DB schema doesn't match. Local DB: %d, Expected: %d", meta.Version, db.SchemaVersion) } if skip { - log.Logger.Debug("Skipping DB update...") + log.Debug("Skipping DB update...") if err = c.validate(meta); err != nil { return false, xerrors.Errorf("validate error: %w", err) } @@ -119,7 +119,8 @@ func (c *Client) NeedsUpdate(cliVersion string, skip bool) (bool, error) { } if db.SchemaVersion != meta.Version { - log.Logger.Debugf("The local DB schema version (%d) does not match with supported version schema (%d).", meta.Version, db.SchemaVersion) + log.Debug("The local DB schema version does not match with supported version schema.", + log.Int("local_version", meta.Version), log.Int("supported_version", db.SchemaVersion)) return true, nil } @@ -128,7 +129,7 @@ func (c *Client) NeedsUpdate(cliVersion string, skip bool) (bool, error) { func (c *Client) validate(meta metadata.Metadata) error { if db.SchemaVersion != meta.Version { - log.Logger.Error("The local DB has an old schema version which is not supported by the current version of Trivy CLI. DB needs to be updated.") + log.Error("The local DB has an old schema version which is not supported by the current version of Trivy CLI. DB needs to be updated.") return xerrors.Errorf("--skip-update cannot be specified with the old DB schema. Local DB: %d, Expected: %d", meta.Version, db.SchemaVersion) } @@ -137,12 +138,12 @@ func (c *Client) validate(meta metadata.Metadata) error { func (c *Client) isNewDB(meta metadata.Metadata) bool { if c.clock.Now().Before(meta.NextUpdate) { - log.Logger.Debug("DB update was skipped because the local DB is the latest") + log.Debug("DB update was skipped because the local DB is the latest") return true } if c.clock.Now().Before(meta.DownloadedAt.Add(time.Hour)) { - log.Logger.Debug("DB update was skipped because the local DB was downloaded during the last hour") + log.Debug("DB update was skipped because the local DB was downloaded during the last hour") return true } return false @@ -152,7 +153,7 @@ func (c *Client) isNewDB(meta metadata.Metadata) bool { func (c *Client) Download(ctx context.Context, dst string, opt types.RegistryOptions) error { // Remove the metadata file under the cache directory before downloading DB if err := c.metadata.Delete(); err != nil { - log.Logger.Debug("no metadata file") + log.Debug("No metadata file") } art, err := c.initOCIArtifact(opt) @@ -171,7 +172,7 @@ func (c *Client) Download(ctx context.Context, dst string, opt types.RegistryOpt } func (c *Client) updateDownloadedAt(dst string) error { - log.Logger.Debug("Updating database metadata...") + log.Debug("Updating database metadata...") // We have to initialize a metadata client here // since the destination may be different from the cache directory. @@ -201,7 +202,7 @@ func (c *Client) initOCIArtifact(opt types.RegistryOptions) (*oci.Artifact, erro for _, diagnostic := range terr.Errors { // For better user experience if diagnostic.Code == transport.DeniedErrorCode || diagnostic.Code == transport.UnauthorizedErrorCode { - log.Logger.Warn("See https://aquasecurity.github.io/trivy/latest/docs/references/troubleshooting/#db") + log.Warn("See https://aquasecurity.github.io/trivy/latest/docs/references/troubleshooting/#db") break } } diff --git a/pkg/dependency/parser/c/conan/parse.go b/pkg/dependency/parser/c/conan/parse.go index 78e3bdc09636..2020377a4663 100644 --- a/pkg/dependency/parser/c/conan/parse.go +++ b/pkg/dependency/parser/c/conan/parse.go @@ -30,10 +30,14 @@ type Node struct { EndLine int } -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("conan"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -60,7 +64,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, } lib, err := parseRef(node) if err != nil { - log.Logger.Debug(err) + p.logger.Debug("Parse ref error", log.Err(err)) continue } diff --git a/pkg/dependency/parser/dotnet/core_deps/parse.go b/pkg/dependency/parser/dotnet/core_deps/parse.go index c4bf533a87df..399c38736779 100644 --- a/pkg/dependency/parser/dotnet/core_deps/parse.go +++ b/pkg/dependency/parser/dotnet/core_deps/parse.go @@ -12,10 +12,14 @@ import ( xio "github.com/aquasecurity/trivy/pkg/x/io" ) -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("dotnet"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -38,7 +42,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, split := strings.Split(nameVer, "/") if len(split) != 2 { // Invalid name - log.Logger.Warnf("Cannot parse .NET library version from: %s", nameVer) + p.logger.Warn("Cannot parse .NET library version", log.String("library", nameVer)) continue } diff --git a/pkg/dependency/parser/hex/mix/parse.go b/pkg/dependency/parser/hex/mix/parse.go index edc43fd284c6..ed5543ca4507 100644 --- a/pkg/dependency/parser/hex/mix/parse.go +++ b/pkg/dependency/parser/hex/mix/parse.go @@ -14,13 +14,17 @@ import ( ) // Parser is a parser for mix.lock -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("mix"), + } } -func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { +func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { var libs []types.Library scanner := bufio.NewScanner(r) var lineNumber int // It is used to save dependency location @@ -43,9 +47,9 @@ func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, er // git repository doesn't have dependency version // skip these dependencies if !strings.Contains(ss[0], ":git") { - log.Logger.Warnf("Cannot parse dependency: %s", line) + p.logger.Warn("Cannot parse dependency", log.String("line", line)) } else { - log.Logger.Debugf("Skip git dependencies: %s", name) + p.logger.Debug("Skip git dependencies", log.String("name", name)) } continue } diff --git a/pkg/dependency/parser/java/jar/parse.go b/pkg/dependency/parser/java/jar/parse.go index d5f1f6df0a4d..06f130c6b07c 100644 --- a/pkg/dependency/parser/java/jar/parse.go +++ b/pkg/dependency/parser/java/jar/parse.go @@ -15,7 +15,6 @@ import ( "strings" "github.com/samber/lo" - "go.uber.org/zap" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/dependency/types" @@ -34,6 +33,7 @@ type Client interface { } type Parser struct { + logger *log.Logger rootFilePath string offline bool size int64 @@ -63,6 +63,7 @@ func WithSize(size int64) Option { func NewParser(c Client, opts ...Option) types.Parser { p := &Parser{ + logger: log.WithPrefix("jar"), client: c, } @@ -82,7 +83,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, } func (p *Parser) parseArtifact(filePath string, size int64, r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { - log.Logger.Debugw("Parsing Java artifacts...", zap.String("file", filePath)) + p.logger.Debug("Parsing Java artifacts...", log.String("file", filePath)) // Try to extract artifactId and version from the file name // e.g. spring-core-5.3.4-SNAPSHOT.jar => sprint-core, 5.3.4-SNAPSHOT @@ -103,7 +104,7 @@ func (p *Parser) parseArtifact(filePath string, size int64, r xio.ReadSeekerAt) if p.offline { // In offline mode, we will not check if the artifact information is correct. if !manifestProps.Valid() { - log.Logger.Debugw("Unable to identify POM in offline mode", zap.String("file", fileName)) + p.logger.Debug("Unable to identify POM in offline mode", log.String("file", fileName)) return libs, nil, nil } return append(libs, manifestProps.Library()), nil, nil @@ -126,7 +127,7 @@ func (p *Parser) parseArtifact(filePath string, size int64, r xio.ReadSeekerAt) return nil, nil, xerrors.Errorf("failed to search by SHA1: %w", err) } - log.Logger.Debugw("No such POM in the central repositories", zap.String("file", fileName)) + p.logger.Debug("No such POM in the central repositories", log.String("file", fileName)) // Return when artifactId or version from the file name are empty if fileProps.ArtifactID == "" || fileProps.Version == "" { @@ -137,8 +138,8 @@ func (p *Parser) parseArtifact(filePath string, size int64, r xio.ReadSeekerAt) // When some artifacts have the same groupIds, it might result in false detection. fileProps.GroupID, err = p.client.SearchByArtifactID(fileProps.ArtifactID, fileProps.Version) if err == nil { - log.Logger.Debugw("POM was determined in a heuristic way", zap.String("file", fileName), - zap.String("artifact", fileProps.String())) + p.logger.Debug("POM was determined in a heuristic way", log.String("file", fileName), + log.String("artifact", fileProps.String())) libs = append(libs, fileProps.Library()) } else if !errors.Is(err, ArtifactNotFoundErr) { return nil, nil, xerrors.Errorf("failed to search by artifact id: %w", err) @@ -182,7 +183,7 @@ func (p *Parser) traverseZip(filePath string, size int64, r xio.ReadSeekerAt, fi case isArtifact(fileInJar.Name): innerLibs, _, err := p.parseInnerJar(fileInJar, filePath) // TODO process inner deps if err != nil { - log.Logger.Debugf("Failed to parse %s: %s", fileInJar.Name, err) + p.logger.Debug("Failed to parse", log.String("file", fileInJar.Name), log.Err(err)) continue } libs = append(libs, innerLibs...) diff --git a/pkg/dependency/parser/java/jar/sonatype/log.go b/pkg/dependency/parser/java/jar/sonatype/log.go index 9d4ef0b7db87..78f7f04824c3 100644 --- a/pkg/dependency/parser/java/jar/sonatype/log.go +++ b/pkg/dependency/parser/java/jar/sonatype/log.go @@ -1,32 +1,30 @@ package sonatype -import "github.com/aquasecurity/trivy/pkg/log" +import ( + "context" + "log/slog" + + "github.com/aquasecurity/trivy/pkg/log" +) // logger implements LeveledLogger // https://github.com/hashicorp/go-retryablehttp/blob/991b9d0a42d13014e3689dd49a94c02be01f4237/client.go#L285-L290 -type logger struct{} - -func (logger) Error(msg string, keysAndValues ...interface{}) { - // Use Debugw to suppress errors on failure - if msg == "request failed" { - log.Logger.Debugw(msg, keysAndValues...) - return - } - log.Logger.Errorw(msg, keysAndValues) +type handler struct { + slog.Handler } -func (logger) Info(msg string, keysAndValues ...interface{}) { - log.Logger.Infow(msg, keysAndValues...) +func newLogger() *log.Logger { + return log.New(&handler{slog.Default().Handler()}).With(log.Prefix("sonatype")) } -func (logger) Debug(msg string, keysAndValues ...interface{}) { - // This message is displayed too much - if msg == "performing request" { - return +func (h *handler) Handle(ctx context.Context, r slog.Record) error { + switch r.Message { + case "request failed": + // Use Debug to suppress errors on failure + r.Level = log.LevelDebug + case "performing request": + // This message is displayed too much + return nil } - log.Logger.Debugw(msg, keysAndValues...) -} - -func (logger) Warn(msg string, keysAndValues ...interface{}) { - log.Logger.Warnw(msg, keysAndValues...) + return h.Handler.Handle(ctx, r) } diff --git a/pkg/dependency/parser/java/jar/sonatype/sonatype.go b/pkg/dependency/parser/java/jar/sonatype/sonatype.go index 63cae20b670f..ebf13f96e42b 100644 --- a/pkg/dependency/parser/java/jar/sonatype/sonatype.go +++ b/pkg/dependency/parser/java/jar/sonatype/sonatype.go @@ -57,7 +57,7 @@ func WithHTTPClient(client *http.Client) Option { func New(opts ...Option) Sonatype { // for HTTP retry retryClient := retryablehttp.NewClient() - retryClient.Logger = logger{} + retryClient.Logger = newLogger() retryClient.RetryWaitMin = 20 * time.Second retryClient.RetryWaitMax = 5 * time.Minute retryClient.RetryMax = 5 diff --git a/pkg/dependency/parser/java/pom/artifact.go b/pkg/dependency/parser/java/pom/artifact.go index 7cbab3b5b651..5f94dbfc3739 100644 --- a/pkg/dependency/parser/java/pom/artifact.go +++ b/pkg/dependency/parser/java/pom/artifact.go @@ -158,5 +158,5 @@ func printLoopedPropertiesStack(env string, usedProps []string) { for _, prop := range usedProps { s += fmt.Sprintf("%s -> ", prop) } - log.Logger.Warnf("Lopped properties were detected: %s%s", s, env) + log.Warn("Lopped properties were detected", log.String("prop", s+env)) } diff --git a/pkg/dependency/parser/java/pom/parse.go b/pkg/dependency/parser/java/pom/parse.go index 955f8cfd9e33..18a62bffbf94 100644 --- a/pkg/dependency/parser/java/pom/parse.go +++ b/pkg/dependency/parser/java/pom/parse.go @@ -14,7 +14,6 @@ import ( multierror "github.com/hashicorp/go-multierror" "github.com/samber/lo" - "go.uber.org/zap" "golang.org/x/net/html/charset" "golang.org/x/xerrors" @@ -50,6 +49,7 @@ func WithRemoteRepos(repos []string) option { } type parser struct { + logger *log.Logger rootPath string cache pomCache localRepository string @@ -76,6 +76,7 @@ func NewParser(filePath string, opts ...option) types.Parser { } return &parser{ + logger: log.WithPrefix("pom"), rootPath: filepath.Clean(filePath), cache: newPOMCache(), localRepository: localRepository, @@ -186,7 +187,8 @@ func (p *parser) parseRoot(root artifact, uniqModules map[string]struct{}) ([]ty for _, relativePath := range result.modules { moduleArtifact, err := p.parseModule(result.filePath, relativePath) if err != nil { - log.Logger.Debugf("Unable to parse %q module: %s", result.filePath, err) + p.logger.Debug("Unable to parse the module", + log.String("file_path", result.filePath), log.Err(err)) continue } @@ -283,10 +285,11 @@ func (p *parser) resolve(art artifact, rootDepManagement []pomDependency) (analy return *result, nil } - log.Logger.Debugf("Resolving %s:%s:%s...", art.GroupID, art.ArtifactID, art.Version) + p.logger.Debug("Resolving...", log.String("group_id", art.GroupID), + log.String("artifact_id", art.ArtifactID), log.String("version", art.Version.String())) pomContent, err := p.tryRepository(art.GroupID, art.ArtifactID, art.Version.String()) if err != nil { - log.Logger.Debug(err) + p.logger.Debug("Repository error", log.Err(err)) } result, err := p.analyze(pomContent, analysisOptions{ exclusions: art.Exclusions, @@ -472,10 +475,10 @@ func (p *parser) parseParent(currentPath string, parent pomParent) (analysisResu if target.IsEmpty() && !isProperty(parent.Version) { return analysisResult{}, nil } - log.Logger.Debugf("Start parent: %s", target.String()) - defer func() { - log.Logger.Debugf("Exit parent: %s", target.String()) - }() + + logger := p.logger.With("artifact", target.String()) + logger.Debug("Start parent") + defer logger.Debug("Exit parent") // If the artifact is found in cache, it is returned. if result := p.cache.get(target); result != nil { @@ -484,7 +487,7 @@ func (p *parser) parseParent(currentPath string, parent pomParent) (analysisResu parentPOM, err := p.retrieveParent(currentPath, parent.RelativePath, target) if err != nil { - log.Logger.Debugf("parent POM not found: %s", err) + logger.Debug("Parent POM not found", log.Err(err)) } result, err := p.analyze(parentPOM, analysisOptions{}) @@ -630,13 +633,13 @@ func (p *parser) loadPOMFromLocalRepository(paths []string) (*pom, error) { func (p *parser) fetchPOMFromRemoteRepositories(paths []string) (*pom, error) { // Do not try fetching pom.xml from remote repositories in offline mode if p.offline { - log.Logger.Debug("Fetching the remote pom.xml is skipped") + p.logger.Debug("Fetching the remote pom.xml is skipped") return nil, xerrors.New("offline mode") } // try all remoteRepositories for _, repo := range p.remoteRepositories { - fetched, err := fetchPOMFromRemoteRepository(repo, paths) + fetched, err := p.fetchPOMFromRemoteRepository(repo, paths) if err != nil { return nil, xerrors.Errorf("fetch repository error: %w", err) } else if fetched == nil { @@ -647,20 +650,21 @@ func (p *parser) fetchPOMFromRemoteRepositories(paths []string) (*pom, error) { return nil, xerrors.Errorf("the POM was not found in remote remoteRepositories") } -func fetchPOMFromRemoteRepository(repo string, paths []string) (*pom, error) { +func (p *parser) fetchPOMFromRemoteRepository(repo string, paths []string) (*pom, error) { repoURL, err := url.Parse(repo) if err != nil { - log.Logger.Errorw("URL parse error", zap.String("repo", repo)) + p.logger.Error("URL parse error", log.String("repo", repo)) return nil, nil } paths = append([]string{repoURL.Path}, paths...) repoURL.Path = path.Join(paths...) + logger := p.logger.With(log.String("host", repoURL.Host), log.String("path", repoURL.Path)) client := &http.Client{} req, err := http.NewRequest("GET", repoURL.String(), http.NoBody) if err != nil { - log.Logger.Debugf("Request failed for %s%s", repoURL.Host, repoURL.Path) + logger.Debug("HTTP request failed") return nil, nil } if repoURL.User != nil { @@ -670,7 +674,7 @@ func fetchPOMFromRemoteRepository(repo string, paths []string) (*pom, error) { resp, err := client.Do(req) if err != nil || resp.StatusCode != http.StatusOK { - log.Logger.Debugf("Failed to fetch from %s%s", repoURL.Host, repoURL.Path) + logger.Debug("Failed to fetch") return nil, nil } defer resp.Body.Close() diff --git a/pkg/dependency/parser/java/pom/pom.go b/pkg/dependency/parser/java/pom/pom.go index 8b610cc5925b..89adabe0ff3e 100644 --- a/pkg/dependency/parser/java/pom/pom.go +++ b/pkg/dependency/parser/java/pom/pom.go @@ -116,6 +116,7 @@ func (p pom) licenses() []string { } func (p pom) repositories(servers []Server) []string { + logger := log.WithPrefix("pom") var urls []string for _, rep := range p.content.Repositories.Repository { // Add only enabled repositories @@ -125,7 +126,7 @@ func (p pom) repositories(servers []Server) []string { repoURL, err := url.Parse(rep.URL) if err != nil { - log.Logger.Debugf("Unable to parse remote repository url: %s", err) + logger.Debug("Unable to parse remote repository url", log.Err(err)) continue } @@ -138,7 +139,7 @@ func (p pom) repositories(servers []Server) []string { } } - log.Logger.Debugf("Adding repository %s: %s", rep.ID, rep.URL) + logger.Debug("Adding repository", log.String("id", rep.ID), log.String("url", rep.URL)) urls = append(urls, repoURL.String()) } return urls diff --git a/pkg/dependency/parser/nodejs/npm/parse.go b/pkg/dependency/parser/nodejs/npm/parse.go index b74cfa5ce2f5..46b62c609502 100644 --- a/pkg/dependency/parser/nodejs/npm/parse.go +++ b/pkg/dependency/parser/nodejs/npm/parse.go @@ -52,10 +52,14 @@ type Package struct { EndLine int } -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("npm"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -85,13 +89,14 @@ func (p *Parser) parseV2(packages map[string]Package) ([]types.Library, []types. // Resolve links first // https://docs.npmjs.com/cli/v9/configuring-npm/package-lock-json#packages - resolveLinks(packages) + p.resolveLinks(packages) directDeps := make(map[string]struct{}) for name, version := range lo.Assign(packages[""].Dependencies, packages[""].OptionalDependencies, packages[""].DevDependencies) { pkgPath := joinPaths(nodeModulesDir, name) if _, ok := packages[pkgPath]; !ok { - log.Logger.Debugf("Unable to find the direct dependency: '%s@%s'", name, version) + p.logger.Debug("Unable to find the direct dependency", + log.String("name", name), log.String("version", version)) continue } // Store the package paths of direct dependencies @@ -107,7 +112,7 @@ func (p *Parser) parseV2(packages map[string]Package) ([]types.Library, []types. // pkg.Name exists when package name != folder name pkgName := pkg.Name if pkgName == "" { - pkgName = pkgNameFromPath(pkgPath) + pkgName = p.pkgNameFromPath(pkgPath) } pkgID := packageID(pkgName, pkg.Version) @@ -164,7 +169,8 @@ func (p *Parser) parseV2(packages map[string]Package) ([]types.Library, []types. for depName, depVersion := range dependencies { depID, err := findDependsOn(pkgPath, depName, packages) if err != nil { - log.Logger.Warnf("Cannot resolve the version: '%s@%s'", depName, depVersion) + p.logger.Debug("Unable to resolve the version", + log.String("name", depName), log.String("version", depVersion)) continue } dependsOn = append(dependsOn, depID) @@ -186,7 +192,7 @@ func (p *Parser) parseV2(packages map[string]Package) ([]types.Library, []types. // function/func1 -> target of package // node_modules/func1 -> link to target // see `package-lock_v3_with_workspace.json` to better understanding -func resolveLinks(packages map[string]Package) { +func (p *Parser) resolveLinks(packages map[string]Package) { links := lo.PickBy(packages, func(_ string, pkg Package) bool { return pkg.Link }) @@ -218,8 +224,8 @@ func resolveLinks(packages map[string]Package) { // Delete the target package delete(packages, pkgPath) - if isWorkspace(pkgPath, workspaces) { - rootPkg.Dependencies[pkgNameFromPath(linkPath)] = pkg.Version + if p.isWorkspace(pkgPath, workspaces) { + rootPkg.Dependencies[p.pkgNameFromPath(linkPath)] = pkg.Version } break } @@ -227,10 +233,11 @@ func resolveLinks(packages map[string]Package) { packages[""] = rootPkg } -func isWorkspace(pkgPath string, workspaces []string) bool { +func (p *Parser) isWorkspace(pkgPath string, workspaces []string) bool { for _, workspace := range workspaces { if match, err := path.Match(workspace, pkgPath); err != nil { - log.Logger.Debugf("unable to parse workspace %q for %s", workspace, pkgPath) + p.logger.Debug("Unable to parse workspace", + log.String("workspace", workspace), log.String("pkg_path", pkgPath)) } else if match { return true } @@ -309,7 +316,8 @@ func (p *Parser) parseV1(dependencies map[string]Dependency, versions map[string } // It should not reach here. - log.Logger.Warnf("Cannot resolve the version: %s@%s", libName, requiredVer) + p.logger.Warn("Unable to resolve the version", + log.String("name", libName), log.String("version", requiredVer)) } if len(dependsOn) > 0 { @@ -330,6 +338,20 @@ func (p *Parser) parseV1(dependencies map[string]Dependency, versions map[string return libs, deps } +func (p *Parser) pkgNameFromPath(pkgPath string) string { + // lock file contains path to dependency in `node_modules`. e.g.: + // node_modules/string-width + // node_modules/string-width/node_modules/strip-ansi + // we renamed to `node_modules` directory prefixes `workspace` when resolving Links + // node_modules/function1 + // node_modules/nested_func/node_modules/debug + if index := strings.LastIndex(pkgPath, nodeModulesDir); index != -1 { + return pkgPath[index+len(nodeModulesDir)+1:] + } + p.logger.Warn("Package path doesn't have `node_modules` prefix", log.String("pkg_path", pkgPath)) + return pkgPath +} + func uniqueDeps(deps []types.Dependency) []types.Dependency { var uniqDeps []types.Dependency unique := make(map[string]struct{}) @@ -357,20 +379,6 @@ func isIndirectLib(pkgPath string, directDeps map[string]struct{}) bool { return true } -func pkgNameFromPath(pkgPath string) string { - // lock file contains path to dependency in `node_modules`. e.g.: - // node_modules/string-width - // node_modules/string-width/node_modules/strip-ansi - // we renamed to `node_modules` directory prefixes `workspace` when resolving Links - // node_modules/function1 - // node_modules/nested_func/node_modules/debug - if index := strings.LastIndex(pkgPath, nodeModulesDir); index != -1 { - return pkgPath[index+len(nodeModulesDir)+1:] - } - log.Logger.Warnf("npm %q package path doesn't have `node_modules` prefix", pkgPath) - return pkgPath -} - func joinPaths(paths ...string) string { return strings.Join(paths, "/") } diff --git a/pkg/dependency/parser/nodejs/pnpm/parse.go b/pkg/dependency/parser/nodejs/pnpm/parse.go index 9e93be6a89c1..26012b747c4e 100644 --- a/pkg/dependency/parser/nodejs/pnpm/parse.go +++ b/pkg/dependency/parser/nodejs/pnpm/parse.go @@ -36,10 +36,14 @@ type LockFile struct { Packages map[string]PackageInfo `yaml:"packages,omitempty"` } -type Parser struct{} +type Parser struct { + logger *log.Logger +} -func NewParser() types.Parser { - return &Parser{} +func NewParser() *Parser { + return &Parser{ + logger: log.WithPrefix("pnpm"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -48,7 +52,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, return nil, nil, xerrors.Errorf("decode error: %w", err) } - lockVer := parseLockfileVersion(lockFile) + lockVer := p.parseLockfileVersion(lockFile) if lockVer < 0 { return nil, nil, nil } @@ -76,7 +80,7 @@ func (p *Parser) parse(lockVer float64, lockFile LockFile) ([]types.Library, []t version := info.Version if name == "" { - name, version = parsePackage(depPath, lockVer) + name, version = p.parsePackage(depPath, lockVer) } pkgID := packageID(name, version) @@ -103,7 +107,7 @@ func (p *Parser) parse(lockVer float64, lockFile LockFile) ([]types.Library, []t return libs, deps } -func parseLockfileVersion(lockFile LockFile) float64 { +func (p *Parser) parseLockfileVersion(lockFile LockFile) float64 { switch v := lockFile.LockfileVersion.(type) { // v5 case float64: @@ -111,33 +115,29 @@ func parseLockfileVersion(lockFile LockFile) float64 { // v6+ case string: if lockVer, err := strconv.ParseFloat(v, 64); err != nil { - log.Logger.Debugf("Unable to convert the lock file version to float: %s", err) + p.logger.Debug("Unable to convert the lock file version to float", log.Err(err)) return -1 } else { return lockVer } default: - log.Logger.Debugf("Unknown type for the lock file version: %s", lockFile.LockfileVersion) + p.logger.Debug("Unknown type for the lock file version", + log.Any("version", lockFile.LockfileVersion)) return -1 } } -func isIndirectLib(name string, directDeps map[string]interface{}) bool { - _, ok := directDeps[name] - return !ok -} - // cf. https://github.com/pnpm/pnpm/blob/ce61f8d3c29eee46cee38d56ced45aea8a439a53/packages/dependency-path/src/index.ts#L112-L163 -func parsePackage(depPath string, lockFileVersion float64) (string, string) { +func (p *Parser) parsePackage(depPath string, lockFileVersion float64) (string, string) { // The version separator is different between v5 and v6+. versionSep := "@" if lockFileVersion < 6 { versionSep = "/" } - return parseDepPath(depPath, versionSep) + return p.parseDepPath(depPath, versionSep) } -func parseDepPath(depPath, versionSep string) (string, string) { +func (p *Parser) parseDepPath(depPath, versionSep string) (string, string) { // Skip registry // e.g. // - "registry.npmjs.org/lodash/4.17.10" => "lodash/4.17.10" @@ -171,12 +171,18 @@ func parseDepPath(depPath, versionSep string) (string, string) { version = version[:idx] } if _, err := semver.Parse(version); err != nil { - log.Logger.Debugf("Skip %q package. %q doesn't match semver: %s", depPath, version, err) + p.logger.Debug("Skip non-semver package", log.String("pkg_path", depPath), + log.String("version", version), log.Err(err)) return "", "" } return name, version } +func isIndirectLib(name string, directDeps map[string]interface{}) bool { + _, ok := directDeps[name] + return !ok +} + func packageID(name, version string) string { return dependency.ID(ftypes.Pnpm, name, version) } diff --git a/pkg/dependency/parser/nodejs/pnpm/parse_test.go b/pkg/dependency/parser/nodejs/pnpm/parse_test.go index 19851a2c21c0..606bc37de54d 100644 --- a/pkg/dependency/parser/nodejs/pnpm/parse_test.go +++ b/pkg/dependency/parser/nodejs/pnpm/parse_test.go @@ -215,7 +215,8 @@ func Test_parsePackage(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - gotName, gotVersion := parsePackage(tt.pkg, tt.lockFileVer) + p := NewParser() + gotName, gotVersion := p.parsePackage(tt.pkg, tt.lockFileVer) assert.Equal(t, tt.wantName, gotName) assert.Equal(t, tt.wantVersion, gotVersion) }) diff --git a/pkg/dependency/parser/nodejs/yarn/parse.go b/pkg/dependency/parser/nodejs/yarn/parse.go index 9b8394eb57c8..d1d195d59641 100644 --- a/pkg/dependency/parser/nodejs/yarn/parse.go +++ b/pkg/dependency/parser/nodejs/yarn/parse.go @@ -142,13 +142,17 @@ func parseResults(patternIDs map[string]string, dependsOn map[string][]string) ( return deps } -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("yarn"), + } } -func scanBlocks(data []byte, atEOF bool) (advance int, token []byte, err error) { +func (p *Parser) scanBlocks(data []byte, atEOF bool) (advance int, token []byte, err error) { if atEOF && len(data) == 0 { return 0, nil, nil } @@ -167,7 +171,7 @@ func scanBlocks(data []byte, atEOF bool) (advance int, token []byte, err error) return 0, nil, nil } -func parseBlock(block []byte, lineNum int) (lib Library, deps []string, newLine int, err error) { +func (p *Parser) parseBlock(block []byte, lineNum int) (lib Library, deps []string, newLine int, err error) { var ( emptyLines int // lib can start with empty lines first skipBlock bool @@ -228,7 +232,7 @@ func parseBlock(block []byte, lineNum int) (lib Library, deps []string, newLine // in case an unsupported protocol is detected // show warning and continue parsing if err != nil { - log.Logger.Warnf("Yarn protocol error: %s", err) + p.logger.Warn("Protocol error", log.Err(err)) return Library{}, nil, scanner.LineNum(lineNum), nil } @@ -275,11 +279,11 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, patternIDs := make(map[string]string) scanner := bufio.NewScanner(r) - scanner.Split(scanBlocks) + scanner.Split(p.scanBlocks) dependsOn := make(map[string][]string) for scanner.Scan() { block := scanner.Bytes() - lib, deps, newLine, err := parseBlock(block, lineNumber) + lib, deps, newLine, err := p.parseBlock(block, lineNumber) lineNumber = newLine + 2 if err != nil { return nil, nil, err diff --git a/pkg/dependency/parser/php/composer/parse.go b/pkg/dependency/parser/php/composer/parse.go index 1fbf4316db9a..a41f998a5eb1 100644 --- a/pkg/dependency/parser/php/composer/parse.go +++ b/pkg/dependency/parser/php/composer/parse.go @@ -28,10 +28,14 @@ type packageInfo struct { EndLine int } -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("composer"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -85,7 +89,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, dependsOn = append(dependsOn, lib.ID) continue } - log.Logger.Debugf("unable to find version of %s", depName) + p.logger.Debug("Unable to find version", log.String("name", depName)) } sort.Strings(dependsOn) deps = append(deps, types.Dependency{ diff --git a/pkg/dependency/parser/python/packaging/parse.go b/pkg/dependency/parser/python/packaging/parse.go index 41514872fbb7..495e0d4d78ab 100644 --- a/pkg/dependency/parser/python/packaging/parse.go +++ b/pkg/dependency/parser/python/packaging/parse.go @@ -14,15 +14,19 @@ import ( xio "github.com/aquasecurity/trivy/pkg/x/io" ) -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("python"), + } } // Parse parses egg and wheel metadata. // e.g. .egg-info/PKG-INFO and dist-info/METADATA -func (*Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { +func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { rd := textproto.NewReader(bufio.NewReader(r)) h, err := rd.ReadMIMEHeader() if e := textproto.ProtocolError(""); errors.As(err, &e) { @@ -30,7 +34,7 @@ func (*Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, e // cf. https://cs.opensource.google/go/go/+/a6642e67e16b9d769a0c08e486ba08408064df19 // However, our required key/value could have been correctly parsed, // so we continue with the subsequent process. - log.Logger.Debugf("MIME protocol error: %s", err) + p.logger.Debug("MIME protocol error", log.Err(err)) } else if err != nil && err != io.EOF { return nil, nil, xerrors.Errorf("read MIME error: %w", err) } @@ -65,7 +69,8 @@ func (*Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, e if l := h.Get("License"); l != "" { if len(licenses) != 0 { - log.Logger.Infof("License acquired from METADATA classifiers may be subject to additional terms for [%s:%s]", name, version) + p.logger.Info("License acquired from METADATA classifiers may be subject to additional terms", + log.String("name", name), log.String("version", version)) } else { license = l } diff --git a/pkg/dependency/parser/python/poetry/parse.go b/pkg/dependency/parser/python/poetry/parse.go index e476b8c18d93..30708cc67add 100644 --- a/pkg/dependency/parser/python/poetry/parse.go +++ b/pkg/dependency/parser/python/poetry/parse.go @@ -29,10 +29,14 @@ type Lockfile struct { } `toml:"package"` } -type Parser struct{} +type Parser struct { + logger *log.Logger +} -func NewParser() types.Parser { - return &Parser{} +func NewParser() *Parser { + return &Parser{ + logger: log.WithPrefix("poetry"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -42,7 +46,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, } // Keep all installed versions - libVersions := parseVersions(lockfile) + libVersions := p.parseVersions(lockfile) var libs []types.Library var deps []types.Dependency @@ -58,7 +62,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, Version: pkg.Version, }) - dependsOn := parseDependencies(pkg.Dependencies, libVersions) + dependsOn := p.parseDependencies(pkg.Dependencies, libVersions) if len(dependsOn) != 0 { deps = append(deps, types.Dependency{ ID: pkgID, @@ -71,7 +75,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, // parseVersions stores all installed versions of libraries for use in dependsOn // as the dependencies of libraries use version range. -func parseVersions(lockfile Lockfile) map[string][]string { +func (p *Parser) parseVersions(lockfile Lockfile) map[string][]string { libVersions := make(map[string][]string) for _, pkg := range lockfile.Packages { if pkg.Category == "dev" { @@ -86,11 +90,11 @@ func parseVersions(lockfile Lockfile) map[string][]string { return libVersions } -func parseDependencies(deps map[string]any, libVersions map[string][]string) []string { +func (p *Parser) parseDependencies(deps map[string]any, libVersions map[string][]string) []string { var dependsOn []string for name, versRange := range deps { - if dep, err := parseDependency(name, versRange, libVersions); err != nil { - log.Logger.Debugf("failed to parse poetry dependency: %s", err) + if dep, err := p.parseDependency(name, versRange, libVersions); err != nil { + p.logger.Debug("Failed to parse poetry dependency", log.Err(err)) } else if dep != "" { dependsOn = append(dependsOn, dep) } @@ -101,7 +105,7 @@ func parseDependencies(deps map[string]any, libVersions map[string][]string) []s return dependsOn } -func parseDependency(name string, versRange any, libVersions map[string][]string) (string, error) { +func (p *Parser) parseDependency(name string, versRange any, libVersions map[string][]string) (string, error) { name = normalizePkgName(name) vers, ok := libVersions[name] if !ok { diff --git a/pkg/dependency/parser/python/poetry/parse_test.go b/pkg/dependency/parser/python/poetry/parse_test.go index c02999a8eff8..d7f7adf630eb 100644 --- a/pkg/dependency/parser/python/poetry/parse_test.go +++ b/pkg/dependency/parser/python/poetry/parse_test.go @@ -46,7 +46,7 @@ func TestParser_Parse(t *testing.T) { require.NoError(t, err) defer f.Close() - p := &Parser{} + p := NewParser() gotLibs, gotDeps, err := p.Parse(f) if !tt.wantErr(t, err, fmt.Sprintf("Parse(%v)", tt.file)) { return @@ -116,7 +116,7 @@ func TestParseDependency(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := parseDependency(tt.packageName, tt.versionRange, tt.libsVersions) + got, err := NewParser().parseDependency(tt.packageName, tt.versionRange, tt.libsVersions) if tt.wantErr != "" { assert.ErrorContains(t, err, tt.wantErr) return diff --git a/pkg/dependency/parser/rust/cargo/parse.go b/pkg/dependency/parser/rust/cargo/parse.go index 282e25152d04..2fd6686224bc 100644 --- a/pkg/dependency/parser/rust/cargo/parse.go +++ b/pkg/dependency/parser/rust/cargo/parse.go @@ -26,10 +26,14 @@ type Lockfile struct { Packages []cargoPkg `toml:"package"` } -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("cargo"), + } } func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { @@ -71,7 +75,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, } libs = append(libs, lib) - dep := parseDependencies(pkgID, pkg, pkgs) + dep := p.parseDependencies(pkgID, pkg, pkgs) if dep != nil { deps = append(deps, *dep) } @@ -80,7 +84,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, sort.Sort(types.Dependencies(deps)) return libs, deps, nil } -func parseDependencies(pkgId string, pkg cargoPkg, pkgs map[string]cargoPkg) *types.Dependency { +func (p *Parser) parseDependencies(pkgId string, pkg cargoPkg, pkgs map[string]cargoPkg) *types.Dependency { var dependOn []string for _, pkgDep := range pkg.Dependencies { @@ -99,7 +103,7 @@ func parseDependencies(pkgId string, pkg cargoPkg, pkgs map[string]cargoPkg) *ty name := fields[0] version, ok := pkgs[name] if !ok { - log.Logger.Debugf("can't find version for %s", name) + p.logger.Debug("Cannot find version", log.String("name", name)) continue } dependOn = append(dependOn, packageID(name, version.Version)) @@ -108,7 +112,7 @@ func parseDependencies(pkgId string, pkg cargoPkg, pkgs map[string]cargoPkg) *ty case 2, 3: dependOn = append(dependOn, packageID(fields[0], fields[1])) default: - log.Logger.Debugf("wrong dependency format for %s", pkgDep) + p.logger.Debug("Wrong dependency format", log.String("dep", pkgDep)) continue } } diff --git a/pkg/dependency/parser/swift/cocoapods/parse.go b/pkg/dependency/parser/swift/cocoapods/parse.go index 7b2a580fd74c..ae71bc09a3d7 100644 --- a/pkg/dependency/parser/swift/cocoapods/parse.go +++ b/pkg/dependency/parser/swift/cocoapods/parse.go @@ -16,17 +16,21 @@ import ( xio "github.com/aquasecurity/trivy/pkg/x/io" ) -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("cocoapods"), + } } type lockFile struct { Pods []any `yaml:"PODS"` // pod can be string or map[string]interface{} } -func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { +func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { lock := &lockFile{} decoder := yaml.NewDecoder(r) if err := decoder.Decode(&lock); err != nil { @@ -36,19 +40,19 @@ func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, er parsedDeps := make(map[string]types.Library) // dependency name => Library directDeps := make(map[string][]string) // dependency name => slice of child dependency names for _, pod := range lock.Pods { - switch p := pod.(type) { + switch dep := pod.(type) { case string: // dependency with version number - lib, err := parseDep(p) + lib, err := parseDep(dep) if err != nil { - log.Logger.Debug(err) + p.logger.Debug("Dependency parse error", log.Err(err)) continue } parsedDeps[lib.Name] = lib case map[string]interface{}: // dependency with its child dependencies - for dep, childDeps := range p { + for dep, childDeps := range dep { lib, err := parseDep(dep) if err != nil { - log.Logger.Debug(err) + p.logger.Debug("Dependency parse error", log.Err(err)) continue } parsedDeps[lib.Name] = lib diff --git a/pkg/dependency/parser/swift/swift/parse.go b/pkg/dependency/parser/swift/swift/parse.go index daeb8d3ef243..74a507f847fb 100644 --- a/pkg/dependency/parser/swift/swift/parse.go +++ b/pkg/dependency/parser/swift/swift/parse.go @@ -17,13 +17,17 @@ import ( ) // Parser is a parser for Package.resolved files -type Parser struct{} +type Parser struct { + logger *log.Logger +} func NewParser() types.Parser { - return &Parser{} + return &Parser{ + logger: log.WithPrefix("swift"), + } } -func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { +func (p *Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, error) { var lockFile LockFile input, err := io.ReadAll(r) if err != nil { @@ -43,7 +47,7 @@ func (Parser) Parse(r xio.ReadSeekerAt) ([]types.Library, []types.Dependency, er // Skip packages for which we cannot resolve the version if pin.State.Version == "" && pin.State.Branch == "" { - log.Logger.Warnf("Unable to resolve %q. Both the version and branch fields are empty.", name) + p.logger.Warn("Unable to resolve. Both the version and branch fields are empty.", log.String("name", name)) continue } diff --git a/pkg/detector/library/compare/compare.go b/pkg/detector/library/compare/compare.go index 60829deb8141..9f6f82c26de4 100644 --- a/pkg/detector/library/compare/compare.go +++ b/pkg/detector/library/compare/compare.go @@ -31,7 +31,7 @@ func IsVulnerable(pkgVer string, advisory dbTypes.Advisory, match matchVersion) if len(advisory.VulnerableVersions) != 0 { matched, err = match(pkgVer, strings.Join(advisory.VulnerableVersions, " || ")) if err != nil { - log.Logger.Warn(err) + log.Warn("Version matching error", log.Err(err)) return false } else if !matched { // the version is not vulnerable @@ -48,7 +48,7 @@ func IsVulnerable(pkgVer string, advisory dbTypes.Advisory, match matchVersion) matched, err = match(pkgVer, strings.Join(secureVersions, " || ")) if err != nil { - log.Logger.Warn(err) + log.Warn("Version matching error", log.Err(err)) return false } return !matched diff --git a/pkg/detector/library/driver.go b/pkg/detector/library/driver.go index b2f5b6babc38..e94f2b4db89f 100644 --- a/pkg/detector/library/driver.go +++ b/pkg/detector/library/driver.go @@ -73,7 +73,7 @@ func NewDriver(libType ftypes.LangType) (Driver, bool) { ecosystem = vulnerability.Cocoapods comparer = rubygems.Comparer{} case ftypes.CondaPkg: - log.Logger.Warn("Conda package is supported for SBOM, not for vulnerability scanning") + log.Warn("Conda package is supported for SBOM, not for vulnerability scanning") return Driver{}, false case ftypes.Bitnami: ecosystem = vulnerability.Bitnami @@ -82,7 +82,8 @@ func NewDriver(libType ftypes.LangType) (Driver, bool) { ecosystem = vulnerability.Kubernetes comparer = compare.GenericComparer{} default: - log.Logger.Warnf("The %q library type is not supported for vulnerability scanning", libType) + log.Warn("The library type is not supported for vulnerability scanning", + log.String("type", string(libType))) return Driver{}, false } return Driver{ diff --git a/pkg/detector/ospkg/alma/alma.go b/pkg/detector/ospkg/alma/alma.go index 67465f04a1b0..ba38c89b7236 100644 --- a/pkg/detector/ospkg/alma/alma.go +++ b/pkg/detector/ospkg/alma/alma.go @@ -38,12 +38,10 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using AlmaLinux scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting AlmaLinux vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Major(osVer) - log.Logger.Debugf("AlmaLinux: os version: %s", osVer) - log.Logger.Debugf("AlmaLinux: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability var skipPkgs []string @@ -79,7 +77,8 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa } } if len(skipPkgs) > 0 { - log.Logger.Infof("Skipped detection of these packages: %q because modular packages cannot be detected correctly due to a bug in AlmaLinux. See also: https://bugs.almalinux.org/view.php?id=173", skipPkgs) + log.InfoContext(ctx, "Skipped detection of the packages because modular packages cannot be detected correctly due to a bug in AlmaLinux. See also: https://bugs.almalinux.org/view.php?id=173", + log.Any("packages", skipPkgs)) } return vulns, nil diff --git a/pkg/detector/ospkg/alma/alma_test.go b/pkg/detector/ospkg/alma/alma_test.go index bd70079fd189..cd7a318d8f10 100644 --- a/pkg/detector/ospkg/alma/alma_test.go +++ b/pkg/detector/ospkg/alma/alma_test.go @@ -162,7 +162,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := alma.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/alpine/alpine.go b/pkg/detector/ospkg/alpine/alpine.go index 4be5cf128431..48f7abcfbec6 100644 --- a/pkg/detector/ospkg/alpine/alpine.go +++ b/pkg/detector/ospkg/alpine/alpine.go @@ -64,21 +64,20 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using Alpine scanner -func (s *Scanner) Detect(osVer string, repo *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Alpine vulnerabilities...") +func (s *Scanner) Detect(ctx context.Context, osVer string, repo *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Minor(osVer) repoRelease := s.repoRelease(repo) - log.Logger.Debugf("alpine: os version: %s", osVer) - log.Logger.Debugf("alpine: package repository: %s", repoRelease) - log.Logger.Debugf("alpine: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.String("repository", repoRelease), log.Int("pkg_num", len(pkgs))) stream := osVer if repoRelease != "" && osVer != repoRelease { // Prefer the repository release. Use OS version only when the repository is not detected. stream = repoRelease if repoRelease != "edge" { // TODO: we should detect the current edge version. - log.Logger.Warnf("Mixing Alpine versions is unsupported, OS: '%s', repository: '%s'", osVer, repoRelease) + log.WarnContext(ctx, "Mixing Alpine versions is unsupported", + log.String("os", osVer), log.String("repository", repoRelease)) } } @@ -95,12 +94,12 @@ func (s *Scanner) Detect(osVer string, repo *ftypes.Repository, pkgs []ftypes.Pa sourceVersion, err := version.NewVersion(utils.FormatSrcVersion(pkg)) if err != nil { - log.Logger.Debugf("failed to parse Alpine Linux installed package version: %s", err) + log.DebugContext(ctx, "Failed to parse the installed package version", log.Err(err)) continue } for _, adv := range advisories { - if !s.isVulnerable(sourceVersion, adv) { + if !s.isVulnerable(ctx, sourceVersion, adv) { continue } vulns = append(vulns, types.DetectedVulnerability{ @@ -119,7 +118,7 @@ func (s *Scanner) Detect(osVer string, repo *ftypes.Repository, pkgs []ftypes.Pa return vulns, nil } -func (s *Scanner) isVulnerable(installedVersion version.Version, adv dbTypes.Advisory) bool { +func (s *Scanner) isVulnerable(ctx context.Context, installedVersion version.Version, adv dbTypes.Advisory) bool { // This logic is for unfixed vulnerabilities, but Trivy DB doesn't have advisories for unfixed vulnerabilities for now // because Alpine just provides potentially vulnerable packages. It will cause a lot of false positives. // This is for Aqua commercial products. @@ -127,7 +126,8 @@ func (s *Scanner) isVulnerable(installedVersion version.Version, adv dbTypes.Adv // AffectedVersion means which version introduced this vulnerability. affectedVersion, err := version.NewVersion(adv.AffectedVersion) if err != nil { - log.Logger.Debugf("failed to parse Alpine Linux affected package version: %s", err) + log.DebugContext(ctx, "Failed to parse the affected package version", + log.String("version", adv.AffectedVersion), log.Err(err)) return false } if affectedVersion.GreaterThan(installedVersion) { @@ -144,7 +144,8 @@ func (s *Scanner) isVulnerable(installedVersion version.Version, adv dbTypes.Adv // Compare versions for fixed vulnerabilities fixedVersion, err := version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("failed to parse Alpine Linux fixed version: %s", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) return false } diff --git a/pkg/detector/ospkg/alpine/alpine_test.go b/pkg/detector/ospkg/alpine/alpine_test.go index f420cf5576ab..eba7258e1392 100644 --- a/pkg/detector/ospkg/alpine/alpine_test.go +++ b/pkg/detector/ospkg/alpine/alpine_test.go @@ -251,7 +251,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := alpine.NewScanner() - got, err := s.Detect(tt.args.osVer, tt.args.repo, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, tt.args.repo, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/amazon/amazon.go b/pkg/detector/ospkg/amazon/amazon.go index 0c5a35f13a8f..c1ec250ec45b 100644 --- a/pkg/detector/ospkg/amazon/amazon.go +++ b/pkg/detector/ospkg/amazon/amazon.go @@ -40,17 +40,16 @@ func NewScanner() *Scanner { } // Detect scans the packages using amazon scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Amazon Linux vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = strings.Fields(osVer)[0] // The format `2023.xxx.xxxx` can be used. osVer = osver.Major(osVer) if osVer != "2" && osVer != "2022" && osVer != "2023" { osVer = "1" } - log.Logger.Debugf("amazon: os version: %s", osVer) - log.Logger.Debugf("amazon: the number of packages: %d", len(pkgs)) + + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { @@ -66,14 +65,16 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa installedVersion, err := version.NewVersion(installed) if err != nil { - log.Logger.Debugf("failed to parse Amazon Linux installed package version: %s", err) + log.DebugContext(ctx, "Failed to parse the installed package version", + log.String("version", installed), log.Err(err)) continue } for _, adv := range advisories { fixedVersion, err := version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("failed to parse Amazon Linux package version: %s", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) continue } diff --git a/pkg/detector/ospkg/amazon/amazon_test.go b/pkg/detector/ospkg/amazon/amazon_test.go index c9c5a3c65840..83100e98c72c 100644 --- a/pkg/detector/ospkg/amazon/amazon_test.go +++ b/pkg/detector/ospkg/amazon/amazon_test.go @@ -177,7 +177,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := amazon.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/chainguard/chainguard.go b/pkg/detector/ospkg/chainguard/chainguard.go index bba9642c481c..6874c4c93b91 100644 --- a/pkg/detector/ospkg/chainguard/chainguard.go +++ b/pkg/detector/ospkg/chainguard/chainguard.go @@ -27,10 +27,8 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using Chainguard scanner -func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Chainguard vulnerabilities...") - - log.Logger.Debugf("chainguard: the number of packages: %d", len(pkgs)) +func (s *Scanner) Detect(ctx context.Context, _ string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { + log.InfoContext(ctx, "Detecting Chainguard vulnerabilities...", log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { @@ -46,12 +44,12 @@ func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) installed := utils.FormatVersion(pkg) installedVersion, err := version.NewVersion(installed) if err != nil { - log.Logger.Debugf("failed to parse Chainguard installed package version: %s", err) + log.DebugContext(ctx, "Failed to parse the installed package version", log.Err(err)) continue } for _, adv := range advisories { - if !s.isVulnerable(installedVersion, adv) { + if !s.isVulnerable(ctx, installedVersion, adv) { continue } vulns = append(vulns, types.DetectedVulnerability{ @@ -70,11 +68,12 @@ func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) return vulns, nil } -func (s *Scanner) isVulnerable(installedVersion version.Version, adv dbTypes.Advisory) bool { +func (s *Scanner) isVulnerable(ctx context.Context, installedVersion version.Version, adv dbTypes.Advisory) bool { // Compare versions for fixed vulnerabilities fixedVersion, err := version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("failed to parse Chainguard fixed version: %s", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) return false } diff --git a/pkg/detector/ospkg/chainguard/chainguard_test.go b/pkg/detector/ospkg/chainguard/chainguard_test.go index 446693ce2170..27758c191636 100644 --- a/pkg/detector/ospkg/chainguard/chainguard_test.go +++ b/pkg/detector/ospkg/chainguard/chainguard_test.go @@ -194,7 +194,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := chainguard.NewScanner() - got, err := s.Detect("", tt.args.repo, tt.args.pkgs) + got, err := s.Detect(nil, "", tt.args.repo, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/debian/debian.go b/pkg/detector/ospkg/debian/debian.go index c350f6d2c281..e347146f0747 100644 --- a/pkg/detector/ospkg/debian/debian.go +++ b/pkg/detector/ospkg/debian/debian.go @@ -54,18 +54,17 @@ func NewScanner() *Scanner { } // Detect scans and return vulnerabilities using Debian scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Debian vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Major(osVer) - log.Logger.Debugf("debian: os version: %s", osVer) - log.Logger.Debugf("debian: the number of packages: %d", len(pkgs)) + + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { sourceVersion, err := version.NewVersion(utils.FormatSrcVersion(pkg)) if err != nil { - log.Logger.Debugf("Debian installed package version error: %s", err) + log.DebugContext(ctx, "Installed package version error", log.Err(err)) continue } @@ -106,7 +105,8 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa var fixedVersion version.Version fixedVersion, err = version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("Debian advisory package version error: %s", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) continue } diff --git a/pkg/detector/ospkg/debian/debian_test.go b/pkg/detector/ospkg/debian/debian_test.go index 8c22a386a74d..2f5c2b3595ab 100644 --- a/pkg/detector/ospkg/debian/debian_test.go +++ b/pkg/detector/ospkg/debian/debian_test.go @@ -115,7 +115,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := debian.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/detect.go b/pkg/detector/ospkg/detect.go index 32ed2ff9c5ab..bbeb8e8649d8 100644 --- a/pkg/detector/ospkg/detect.go +++ b/pkg/detector/ospkg/detect.go @@ -55,12 +55,14 @@ func RegisterDriver(name ftypes.OSType, driver Driver) { // Driver defines operations for OS package scan type Driver interface { - Detect(string, *ftypes.Repository, []ftypes.Package) ([]types.DetectedVulnerability, error) + Detect(context.Context, string, *ftypes.Repository, []ftypes.Package) ([]types.DetectedVulnerability, error) IsSupportedVersion(context.Context, ftypes.OSType, string) bool } // Detect detects the vulnerabilities func Detect(ctx context.Context, _, osFamily ftypes.OSType, osName string, repo *ftypes.Repository, _ time.Time, pkgs []ftypes.Package) ([]types.DetectedVulnerability, bool, error) { + ctx = log.WithContextPrefix(ctx, string(osFamily)) + driver, err := newDriver(osFamily) if err != nil { return nil, false, ErrUnsupportedOS @@ -73,7 +75,7 @@ func Detect(ctx context.Context, _, osFamily ftypes.OSType, osName string, repo filteredPkgs := lo.Filter(pkgs, func(pkg ftypes.Package, index int) bool { return pkg.Name != "gpg-pubkey" }) - vulns, err := driver.Detect(osName, repo, filteredPkgs) + vulns, err := driver.Detect(ctx, osName, repo, filteredPkgs) if err != nil { return nil, false, xerrors.Errorf("failed detection: %w", err) } @@ -86,6 +88,6 @@ func newDriver(osFamily ftypes.OSType) (Driver, error) { return driver, nil } - log.Logger.Warnf("unsupported os : %s", osFamily) + log.Warn("Unsupported os", log.String("family", string(osFamily))) return nil, ErrUnsupportedOS } diff --git a/pkg/detector/ospkg/mariner/mariner.go b/pkg/detector/ospkg/mariner/mariner.go index 6e1054151518..ae9d80157381 100644 --- a/pkg/detector/ospkg/mariner/mariner.go +++ b/pkg/detector/ospkg/mariner/mariner.go @@ -27,14 +27,12 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using CBL-Mariner scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting CBL-Mariner vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { // e.g. 1.0.20210127 osVer = osver.Minor(osVer) - log.Logger.Debugf("CBL-Mariner: os version: %s", osVer) - log.Logger.Debugf("CBL-Mariner: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { diff --git a/pkg/detector/ospkg/mariner/mariner_test.go b/pkg/detector/ospkg/mariner/mariner_test.go index 262f211d8401..199ab1dd996a 100644 --- a/pkg/detector/ospkg/mariner/mariner_test.go +++ b/pkg/detector/ospkg/mariner/mariner_test.go @@ -105,8 +105,11 @@ func TestScanner_Detect(t *testing.T) { }, }, { - name: "broken advisory", - fixtures: []string{"testdata/fixtures/invalid.yaml", "testdata/fixtures/data-source.yaml"}, + name: "broken advisory", + fixtures: []string{ + "testdata/fixtures/invalid.yaml", + "testdata/fixtures/data-source.yaml", + }, args: args{ osVer: "1.0", pkgs: []ftypes.Package{ @@ -134,7 +137,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := mariner.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/oracle/oracle.go b/pkg/detector/ospkg/oracle/oracle.go index 186e0d2734dc..2b234698e38a 100644 --- a/pkg/detector/ospkg/oracle/oracle.go +++ b/pkg/detector/ospkg/oracle/oracle.go @@ -54,12 +54,10 @@ func extractKsplice(v string) string { } // Detect scans and return vulnerability in Oracle scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Oracle Linux vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Major(osVer) - log.Logger.Debugf("Oracle Linux: os version: %s", osVer) - log.Logger.Debugf("Oracle Linux: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { diff --git a/pkg/detector/ospkg/oracle/oracle_test.go b/pkg/detector/ospkg/oracle/oracle_test.go index 40c8f26f2d9a..530639c9ea73 100644 --- a/pkg/detector/ospkg/oracle/oracle_test.go +++ b/pkg/detector/ospkg/oracle/oracle_test.go @@ -250,7 +250,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.NotNil(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/photon/photon.go b/pkg/detector/ospkg/photon/photon.go index b6c00f3c24f8..135ac0d5b283 100644 --- a/pkg/detector/ospkg/photon/photon.go +++ b/pkg/detector/ospkg/photon/photon.go @@ -39,10 +39,9 @@ func NewScanner() *Scanner { } // Detect scans and returns vulnerabilities using photon scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Photon Linux vulnerabilities...") - log.Logger.Debugf("Photon Linux: os version: %s", osVer) - log.Logger.Debugf("Photon Linux: the number of packages: %d", len(pkgs)) +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { diff --git a/pkg/detector/ospkg/photon/photon_test.go b/pkg/detector/ospkg/photon/photon_test.go index b81b0fd30d6d..a85957f6c516 100644 --- a/pkg/detector/ospkg/photon/photon_test.go +++ b/pkg/detector/ospkg/photon/photon_test.go @@ -94,7 +94,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := photon.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/redhat/redhat.go b/pkg/detector/ospkg/redhat/redhat.go index fe7581dbf481..277fa6203424 100644 --- a/pkg/detector/ospkg/redhat/redhat.go +++ b/pkg/detector/ospkg/redhat/redhat.go @@ -77,17 +77,15 @@ func NewScanner() *Scanner { } // Detect scans and returns redhat vulnerabilities -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting RHEL/CentOS vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Major(osVer) - log.Logger.Debugf("Red Hat: os version: %s", osVer) - log.Logger.Debugf("Red Hat: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting RHEL/CentOS vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { if !isFromSupportedVendor(pkg) { - log.Logger.Debugf("Skipping %s: unsupported vendor", pkg.Name) + log.DebugContext(ctx, "Skipping the package with unsupported vendor", log.String("package", pkg.Name)) continue } diff --git a/pkg/detector/ospkg/redhat/redhat_test.go b/pkg/detector/ospkg/redhat/redhat_test.go index 3910b87f9cac..fe24220490a8 100644 --- a/pkg/detector/ospkg/redhat/redhat_test.go +++ b/pkg/detector/ospkg/redhat/redhat_test.go @@ -369,7 +369,7 @@ func TestScanner_Detect(t *testing.T) { defer func() { _ = dbtest.Close() }() s := redhat.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) require.Equal(t, tt.wantErr, err != nil, err) assert.Equal(t, tt.want, got) }) diff --git a/pkg/detector/ospkg/rocky/rocky.go b/pkg/detector/ospkg/rocky/rocky.go index 49aaa4d0a543..51efbdd2e01b 100644 --- a/pkg/detector/ospkg/rocky/rocky.go +++ b/pkg/detector/ospkg/rocky/rocky.go @@ -37,12 +37,10 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using Rocky Linux scanner -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Rocky Linux vulnerabilities...") - +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { osVer = osver.Major(osVer) - log.Logger.Debugf("Rocky Linux: os version: %s", osVer) - log.Logger.Debugf("Rocky Linux: the number of packages: %d", len(pkgs)) + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability var skipPkgs []string @@ -79,7 +77,8 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa } } if len(skipPkgs) > 0 { - log.Logger.Infof("Skipped detection of these packages: %q because modular packages cannot be detected correctly due to a bug in Rocky Linux Errata. See also: https://forums.rockylinux.org/t/some-errata-missing-in-comparison-with-rhel-and-almalinux/3843", skipPkgs) + log.InfoContext(ctx, "Skipped detection of the packages because modular packages cannot be detected correctly due to a bug in Rocky Linux Errata. See also: https://forums.rockylinux.org/t/some-errata-missing-in-comparison-with-rhel-and-almalinux/3843", + log.Any("packages", skipPkgs)) } return vulns, nil diff --git a/pkg/detector/ospkg/rocky/rocky_test.go b/pkg/detector/ospkg/rocky/rocky_test.go index dddba1df850a..91250aa73776 100644 --- a/pkg/detector/ospkg/rocky/rocky_test.go +++ b/pkg/detector/ospkg/rocky/rocky_test.go @@ -122,7 +122,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := rocky.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/suse/suse.go b/pkg/detector/ospkg/suse/suse.go index 617161df87aa..a5ccade5c813 100644 --- a/pkg/detector/ospkg/suse/suse.go +++ b/pkg/detector/ospkg/suse/suse.go @@ -89,10 +89,9 @@ func NewScanner(t Type) *Scanner { } // Detect scans and returns the vulnerabilities -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting SUSE vulnerabilities...") - log.Logger.Debugf("SUSE: os version: %s", osVer) - log.Logger.Debugf("SUSE: the number of packages: %d", len(pkgs)) +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { diff --git a/pkg/detector/ospkg/suse/suse_test.go b/pkg/detector/ospkg/suse/suse_test.go index 7766863e2ec4..eb3e7d9673b4 100644 --- a/pkg/detector/ospkg/suse/suse_test.go +++ b/pkg/detector/ospkg/suse/suse_test.go @@ -97,7 +97,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := suse.NewScanner(tt.distribution) - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/ubuntu/ubuntu.go b/pkg/detector/ospkg/ubuntu/ubuntu.go index 46948806a64d..7c6453050992 100644 --- a/pkg/detector/ospkg/ubuntu/ubuntu.go +++ b/pkg/detector/ospkg/ubuntu/ubuntu.go @@ -76,10 +76,9 @@ func NewScanner() *Scanner { } // Detect scans and returns the vulnerabilities -func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Ubuntu vulnerabilities...") - log.Logger.Debugf("ubuntu: os version: %s", osVer) - log.Logger.Debugf("ubuntu: the number of packages: %d", len(pkgs)) +func (s *Scanner) Detect(ctx context.Context, osVer string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { + log.InfoContext(ctx, "Detecting vulnerabilities...", log.String("os_version", osVer), + log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { @@ -91,7 +90,7 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa sourceVersion, err := version.NewVersion(utils.FormatSrcVersion(pkg)) if err != nil { - log.Logger.Debugf("failed to parse Ubuntu installed package version: %w", err) + log.DebugContext(ctx, "Failed to parse the installed package version", log.Err(err)) continue } @@ -115,7 +114,8 @@ func (s *Scanner) Detect(osVer string, _ *ftypes.Repository, pkgs []ftypes.Packa fixedVersion, err := version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("failed to parse Ubuntu package version: %w", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) continue } diff --git a/pkg/detector/ospkg/ubuntu/ubuntu_test.go b/pkg/detector/ospkg/ubuntu/ubuntu_test.go index a2218e211b77..a95c48bb24ee 100644 --- a/pkg/detector/ospkg/ubuntu/ubuntu_test.go +++ b/pkg/detector/ospkg/ubuntu/ubuntu_test.go @@ -179,7 +179,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := ubuntu.NewScanner() - got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs) + got, err := s.Detect(nil, tt.args.osVer, nil, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/detector/ospkg/version/version.go b/pkg/detector/ospkg/version/version.go index dc47ffd88409..8ed344f1f3ee 100644 --- a/pkg/detector/ospkg/version/version.go +++ b/pkg/detector/ospkg/version/version.go @@ -31,7 +31,8 @@ func Minor(osVer string) string { func Supported(ctx context.Context, eolDates map[string]time.Time, osFamily ftypes.OSType, osVer string) bool { eol, ok := eolDates[osVer] if !ok { - log.Logger.Warnf("This OS version is not on the EOL list: %s %s", osFamily, osVer) + log.Warn("This OS version is not on the EOL list", + log.String("family", string(osFamily)), log.String("version", osVer)) return true // can be the latest version } return clock.Now(ctx).Before(eol) diff --git a/pkg/detector/ospkg/wolfi/wolfi.go b/pkg/detector/ospkg/wolfi/wolfi.go index 9757fc6aa637..c77c968f993f 100644 --- a/pkg/detector/ospkg/wolfi/wolfi.go +++ b/pkg/detector/ospkg/wolfi/wolfi.go @@ -27,10 +27,8 @@ func NewScanner() *Scanner { } // Detect vulnerabilities in package using Wolfi scanner -func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { - log.Logger.Info("Detecting Wolfi vulnerabilities...") - - log.Logger.Debugf("wolfi: the number of packages: %d", len(pkgs)) +func (s *Scanner) Detect(ctx context.Context, _ string, _ *ftypes.Repository, pkgs []ftypes.Package) ([]types.DetectedVulnerability, error) { + log.InfoContext(ctx, "Detecting vulnerabilities...", log.Int("pkg_num", len(pkgs))) var vulns []types.DetectedVulnerability for _, pkg := range pkgs { @@ -46,12 +44,13 @@ func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) installed := utils.FormatVersion(pkg) installedVersion, err := version.NewVersion(installed) if err != nil { - log.Logger.Debugf("failed to parse Wolfi Linux installed package version: %s", err) + log.DebugContext(ctx, "Failed to parse the installed package version", + log.String("version", installed), log.Err(err)) continue } for _, adv := range advisories { - if !s.isVulnerable(installedVersion, adv) { + if !s.isVulnerable(ctx, installedVersion, adv) { continue } vulns = append(vulns, types.DetectedVulnerability{ @@ -70,11 +69,12 @@ func (s *Scanner) Detect(_ string, _ *ftypes.Repository, pkgs []ftypes.Package) return vulns, nil } -func (s *Scanner) isVulnerable(installedVersion version.Version, adv dbTypes.Advisory) bool { +func (s *Scanner) isVulnerable(ctx context.Context, installedVersion version.Version, adv dbTypes.Advisory) bool { // Compare versions for fixed vulnerabilities fixedVersion, err := version.NewVersion(adv.FixedVersion) if err != nil { - log.Logger.Debugf("failed to parse Wolfi Linux fixed version: %s", err) + log.DebugContext(ctx, "Failed to parse the fixed version", + log.String("version", adv.FixedVersion), log.Err(err)) return false } diff --git a/pkg/detector/ospkg/wolfi/wolfi_test.go b/pkg/detector/ospkg/wolfi/wolfi_test.go index 78c1e4818c31..2ef5fb664f55 100644 --- a/pkg/detector/ospkg/wolfi/wolfi_test.go +++ b/pkg/detector/ospkg/wolfi/wolfi_test.go @@ -194,7 +194,7 @@ func TestScanner_Detect(t *testing.T) { defer db.Close() s := wolfi.NewScanner() - got, err := s.Detect("", tt.args.repo, tt.args.pkgs) + got, err := s.Detect(nil, "", tt.args.repo, tt.args.pkgs) if tt.wantErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) diff --git a/pkg/fanal/analyzer/analyzer.go b/pkg/fanal/analyzer/analyzer.go index c5f55fd5fa15..9312a90ad283 100644 --- a/pkg/fanal/analyzer/analyzer.go +++ b/pkg/fanal/analyzer/analyzer.go @@ -16,8 +16,8 @@ import ( "golang.org/x/xerrors" fos "github.com/aquasecurity/trivy/pkg/fanal/analyzer/os" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/misconf" xio "github.com/aquasecurity/trivy/pkg/x/io" ) @@ -92,7 +92,7 @@ const GroupBuiltin Group = "builtin" func RegisterAnalyzer(analyzer analyzer) { if _, ok := analyzers[analyzer.Type()]; ok { - log.Logger.Fatalf("analyzer %s is registered twice", analyzer.Type()) + log.Fatal("Analyzer is registered twice", log.String("type", string(analyzer.Type()))) } analyzers[analyzer.Type()] = analyzer } @@ -101,7 +101,7 @@ type postAnalyzerInitialize func(options AnalyzerOptions) (PostAnalyzer, error) func RegisterPostAnalyzer(t Type, initializer postAnalyzerInitialize) { if _, ok := postAnalyzers[t]; ok { - log.Logger.Fatalf("analyzer %s is registered twice", t) + log.Fatal("Analyzer is registered twice", log.String("type", string(t))) } postAnalyzers[t] = initializer } @@ -120,6 +120,7 @@ type CustomGroup interface { type Opener func() (xio.ReadSeekCloserAt, error) type AnalyzerGroup struct { + logger *log.Logger analyzers []analyzer postAnalyzers []PostAnalyzer filePatterns map[Type][]*regexp.Regexp @@ -318,6 +319,7 @@ func NewAnalyzerGroup(opt AnalyzerOptions) (AnalyzerGroup, error) { } group := AnalyzerGroup{ + logger: log.WithPrefix("analyzer"), filePatterns: make(map[Type][]*regexp.Regexp), } for _, p := range opt.FilePatterns { @@ -411,7 +413,7 @@ func (ag AnalyzerGroup) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, lim } rc, err := opener() if errors.Is(err, fs.ErrPermission) { - log.Logger.Debugf("Permission error: %s", filePath) + ag.logger.Debug("Permission error", log.String("file_path", filePath)) break } else if err != nil { return xerrors.Errorf("unable to open %s: %w", filePath, err) @@ -435,7 +437,7 @@ func (ag AnalyzerGroup) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, lim Options: opts, }) if err != nil && !errors.Is(err, fos.AnalyzeOSError) { - log.Logger.Debugf("Analysis error: %s", err) + ag.logger.Debug("Analysis error", log.Err(err)) return } result.Merge(ret) diff --git a/pkg/fanal/analyzer/config_analyzer.go b/pkg/fanal/analyzer/config_analyzer.go index 651b936f1ac7..c5682694a5c3 100644 --- a/pkg/fanal/analyzer/config_analyzer.go +++ b/pkg/fanal/analyzer/config_analyzer.go @@ -114,7 +114,7 @@ func (ag *ConfigAnalyzerGroup) AnalyzeImageConfig(ctx context.Context, targetOS r, err := a.Analyze(ctx, input) if err != nil { - log.Logger.Debugf("Image config analysis error: %s", err) + log.Debug("Image config analysis error", log.Err(err)) continue } diff --git a/pkg/fanal/analyzer/imgconf/secret/secret.go b/pkg/fanal/analyzer/imgconf/secret/secret.go index 3d153e5f1b1f..8d59726281b6 100644 --- a/pkg/fanal/analyzer/imgconf/secret/secret.go +++ b/pkg/fanal/analyzer/imgconf/secret/secret.go @@ -52,7 +52,7 @@ func (a *secretAnalyzer) Analyze(_ context.Context, input analyzer.ConfigAnalysi }) if len(result.Findings) == 0 { - log.Logger.Debug("No secrets found in container image config") + log.Debug("No secrets found in container image config") return nil, nil } diff --git a/pkg/fanal/analyzer/language/analyze.go b/pkg/fanal/analyzer/language/analyze.go index d31cbb2dfb0f..8a156bece710 100644 --- a/pkg/fanal/analyzer/language/analyze.go +++ b/pkg/fanal/analyzer/language/analyze.go @@ -84,7 +84,7 @@ func toApplication(fileType types.LangType, filePath, libFilePath string, r xio. // Calculate the file digest when one of `spdx` formats is selected d, err := calculateDigest(r) if err != nil { - log.Logger.Warnf("Unable to get checksum for %s: %s", filePath, err) + log.Warn("Unable to get checksum", log.String("file_path", filePath), log.Err(err)) } deps := make(map[string][]string) diff --git a/pkg/fanal/analyzer/language/dart/pub/pubspec.go b/pkg/fanal/analyzer/language/dart/pub/pubspec.go index ab924cafd191..1981e08a023d 100644 --- a/pkg/fanal/analyzer/language/dart/pub/pubspec.go +++ b/pkg/fanal/analyzer/language/dart/pub/pubspec.go @@ -35,11 +35,13 @@ const ( // pubSpecLockAnalyzer analyzes `pubspec.lock` type pubSpecLockAnalyzer struct { + logger *log.Logger parser godeptypes.Parser } func newPubSpecLockAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return pubSpecLockAnalyzer{ + logger: log.WithPrefix("pub"), parser: pub.NewParser(), }, nil } @@ -49,9 +51,9 @@ func (a pubSpecLockAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostA // get all DependsOn from cache dir // lib ID -> DependsOn names - allDependsOn, err := findDependsOn() + allDependsOn, err := a.findDependsOn() if err != nil { - log.Logger.Warnf("Unable to parse cache dir: %s", err) + a.logger.Warn("Unable to parse cache dir", log.Err(err)) } required := func(path string, d fs.DirEntry) bool { @@ -98,10 +100,11 @@ func (a pubSpecLockAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostA }, nil } -func findDependsOn() (map[string][]string, error) { +func (a pubSpecLockAnalyzer) findDependsOn() (map[string][]string, error) { dir := cacheDir() if !fsutils.DirExists(dir) { - log.Logger.Debugf("Cache dir (%s) not found. Need 'dart pub get' to fill dependency relationships", dir) + a.logger.Debug("Cache dir not found. Need 'dart pub get' to fill dependency relationships", + log.String("dir", dir)) return nil, nil } @@ -113,7 +116,7 @@ func findDependsOn() (map[string][]string, error) { if err := fsutils.WalkDir(os.DirFS(dir), ".", required, func(path string, d fs.DirEntry, r io.Reader) error { id, dependsOn, err := parsePubSpecYaml(r) if err != nil { - log.Logger.Debugf("Unable to parse %q: %s", path, err) + a.logger.Debug("Unable to parse pubspec.yaml", log.String("path", path), log.Err(err)) return nil } if id != "" { diff --git a/pkg/fanal/analyzer/language/dotnet/nuget/nuspec.go b/pkg/fanal/analyzer/language/dotnet/nuget/nuspec.go index b42d1cec38d2..6f25c91e565a 100644 --- a/pkg/fanal/analyzer/language/dotnet/nuget/nuspec.go +++ b/pkg/fanal/analyzer/language/dotnet/nuget/nuspec.go @@ -30,10 +30,13 @@ type License struct { } type nuspecParser struct { + logger *log.Logger packagesDir string // global packages folder - https: //learn.microsoft.com/en-us/nuget/consume-packages/managing-the-global-packages-and-cache-folders } func newNuspecParser() nuspecParser { + logger := log.WithPrefix("nuget") + // cf. https: //learn.microsoft.com/en-us/nuget/consume-packages/managing-the-global-packages-and-cache-folders packagesDir := os.Getenv("NUGET_PACKAGES") if packagesDir == "" { @@ -41,11 +44,12 @@ func newNuspecParser() nuspecParser { } if !fsutils.DirExists(packagesDir) { - log.Logger.Debugf("The nuget packages directory couldn't be found. License search disabled") + logger.Debug("The nuget packages directory couldn't be found. License search disabled") return nuspecParser{} } return nuspecParser{ + logger: logger, packagesDir: packagesDir, } } diff --git a/pkg/fanal/analyzer/language/golang/mod/mod.go b/pkg/fanal/analyzer/language/golang/mod/mod.go index 5aa0ae2293fe..cc9b1b439a95 100644 --- a/pkg/fanal/analyzer/language/golang/mod/mod.go +++ b/pkg/fanal/analyzer/language/golang/mod/mod.go @@ -52,6 +52,8 @@ type gomodAnalyzer struct { leafModParser godeptypes.Parser licenseClassifierConfidenceLevel float64 + + logger *log.Logger } func newGoModAnalyzer(opt analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { @@ -60,6 +62,7 @@ func newGoModAnalyzer(opt analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, erro sumParser: sum.NewParser(), leafModParser: mod.NewParser(false), licenseClassifierConfidenceLevel: opt.LicenseScannerOption.ClassifierConfidenceLevel, + logger: log.WithPrefix("golang"), }, nil } @@ -97,7 +100,7 @@ func (a *gomodAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalys } if err = a.fillAdditionalData(apps); err != nil { - log.Logger.Warnf("Unable to collect additional info: %s", err) + a.logger.Warn("Unable to collect additional info", log.Err(err)) } return &analyzer.AnalysisResult{ @@ -128,7 +131,8 @@ func (a *gomodAnalyzer) fillAdditionalData(apps []types.Application) error { // $GOPATH/pkg/mod modPath := filepath.Join(gopath, "pkg", "mod") if !fsutils.DirExists(modPath) { - log.Logger.Debugf("GOPATH (%s) not found. Need 'go mod download' to fill licenses and dependency relationships", modPath) + a.logger.Debug("GOPATH not found. Need 'go mod download' to fill licenses and dependency relationships", + log.String("GOPATH", modPath)) return nil } @@ -185,7 +189,8 @@ func (a *gomodAnalyzer) collectDeps(modDir, pkgID string) (godeptypes.Dependency modPath := filepath.Join(modDir, "go.mod") f, err := os.Open(modPath) if errors.Is(err, fs.ErrNotExist) { - log.Logger.Debugf("Unable to identify dependencies of %s as it doesn't support Go modules", pkgID) + a.logger.Debug("Unable to identify dependencies as it doesn't support Go modules", + log.String("module", pkgID)) return godeptypes.Dependency{}, nil } else if err != nil { return godeptypes.Dependency{}, xerrors.Errorf("file open error: %w", err) diff --git a/pkg/fanal/analyzer/language/java/gradle/lockfile.go b/pkg/fanal/analyzer/language/java/gradle/lockfile.go index 5dddb0b49c3c..2426722c31ca 100644 --- a/pkg/fanal/analyzer/language/java/gradle/lockfile.go +++ b/pkg/fanal/analyzer/language/java/gradle/lockfile.go @@ -32,19 +32,21 @@ const ( // gradleLockAnalyzer analyzes '*gradle.lockfile' type gradleLockAnalyzer struct { + logger *log.Logger parser godeptypes.Parser } func newGradleLockAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &gradleLockAnalyzer{ + logger: log.WithPrefix("gradle"), parser: lockfile.NewParser(), }, nil } func (a gradleLockAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysisInput) (*analyzer.AnalysisResult, error) { - poms, err := parsePoms() + poms, err := a.parsePoms() if err != nil { - log.Logger.Warnf("Unable to get licenses and dependsOn: %s", err) + a.logger.Warn("Unable to get licenses and dependencies", log.Err(err)) } required := func(path string, d fs.DirEntry) bool { diff --git a/pkg/fanal/analyzer/language/java/gradle/pom.go b/pkg/fanal/analyzer/language/java/gradle/pom.go index 638b5c9fd61b..cf24e4716054 100644 --- a/pkg/fanal/analyzer/language/java/gradle/pom.go +++ b/pkg/fanal/analyzer/language/java/gradle/pom.go @@ -12,7 +12,7 @@ import ( "golang.org/x/net/html/charset" "golang.org/x/xerrors" - "github.com/aquasecurity/trivy/pkg/fanal/log" + "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/utils/fsutils" ) @@ -65,8 +65,8 @@ func (props *Properties) UnmarshalXML(d *xml.Decoder, _ xml.StartElement) error return nil } -func parsePoms() (map[string]pomXML, error) { - cacheDir := detectCacheDir() +func (a gradleLockAnalyzer) parsePoms() (map[string]pomXML, error) { + cacheDir := a.detectCacheDir() // Cache dir is not found if cacheDir == "" { return nil, nil @@ -80,7 +80,7 @@ func parsePoms() (map[string]pomXML, error) { err := fsutils.WalkDir(os.DirFS(cacheDir), ".", required, func(path string, _ fs.DirEntry, r io.Reader) error { pom, err := parsePom(r, path) if err != nil { - log.Logger.Debugf("Unable to parse %q: %s", path, err) + a.logger.Debug("Unable to parse pom", log.String("file_path", path), log.Err(err)) return nil } @@ -146,7 +146,7 @@ func (pom *pomXML) resolveDependencyVersions() error { return nil } -func detectCacheDir() string { +func (a gradleLockAnalyzer) detectCacheDir() string { // https://docs.gradle.org/current/userguide/directory_layout.html dir := os.Getenv("GRADLE_USER_HOME") if dir == "" { @@ -159,7 +159,7 @@ func detectCacheDir() string { dir = filepath.Join(dir, "caches") if !fsutils.DirExists(dir) { - log.Logger.Debug("Unable to get licenses and dependsOn. Gradle cache dir doesn't exist.") + a.logger.Debug("Unable to get licenses and dependencies. Gradle cache dir doesn't exist.") return "" } return dir diff --git a/pkg/fanal/analyzer/language/nodejs/license/license.go b/pkg/fanal/analyzer/language/nodejs/license/license.go index 7889a2b4e9a8..0a797c558e56 100644 --- a/pkg/fanal/analyzer/language/nodejs/license/license.go +++ b/pkg/fanal/analyzer/language/nodejs/license/license.go @@ -17,12 +17,14 @@ import ( ) type License struct { + logger *log.Logger parser *packagejson.Parser classifierConfidenceLevel float64 } func NewLicense(classifierConfidenceLevel float64) *License { return &License{ + logger: log.WithPrefix("npm"), parser: packagejson.NewParser(), classifierConfidenceLevel: classifierConfidenceLevel, } @@ -42,7 +44,8 @@ func (l *License) Traverse(fsys fs.FS, root string) (map[string][]string, error) return nil } - log.Logger.Debugf("License names are missing in %q, an attempt to find them in the %q file", pkgJSONPath, licenseFileName) + l.logger.Debug("License names are missing, an attempt to find them in the license file", + log.String("file", pkgJSONPath), log.String("license_file", licenseFileName)) licenseFilePath := path.Join(path.Dir(pkgJSONPath), licenseFileName) if findings, err := classifyLicense(licenseFilePath, l.classifierConfidenceLevel, fsys); err != nil { @@ -51,7 +54,8 @@ func (l *License) Traverse(fsys fs.FS, root string) (map[string][]string, error) // License found licenses[pkg.ID] = findings.Names() } else { - log.Logger.Debugf("The license file %q was not found or the license could not be classified", licenseFilePath) + l.logger.Debug("The license file was not found or the license could not be classified", + log.String("license_file", licenseFilePath)) } return nil } diff --git a/pkg/fanal/analyzer/language/nodejs/npm/npm.go b/pkg/fanal/analyzer/language/nodejs/npm/npm.go index c5dd5d26eed0..44123eade970 100644 --- a/pkg/fanal/analyzer/language/nodejs/npm/npm.go +++ b/pkg/fanal/analyzer/language/nodejs/npm/npm.go @@ -32,12 +32,14 @@ const ( ) type npmLibraryAnalyzer struct { + logger *log.Logger lockParser godeptypes.Parser packageParser *packagejson.Parser } func newNpmLibraryAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &npmLibraryAnalyzer{ + logger: log.WithPrefix("npm"), lockParser: npm.NewParser(), packageParser: packagejson.NewParser(), }, nil @@ -54,7 +56,7 @@ func (a npmLibraryAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAn // Find all licenses from package.json files under node_modules dirs licenses, err := a.findLicenses(input.FS, filePath) if err != nil { - log.Logger.Errorf("Unable to collect licenses: %s", err) + a.logger.Error("Unable to collect licenses", log.Err(err)) licenses = make(map[string]string) } @@ -127,7 +129,8 @@ func (a npmLibraryAnalyzer) findLicenses(fsys fs.FS, lockPath string) (map[strin dir := path.Dir(lockPath) root := path.Join(dir, "node_modules") if _, err := fs.Stat(fsys, root); errors.Is(err, fs.ErrNotExist) { - log.Logger.Infof(`To collect the license information of packages in %q, "npm install" needs to be performed beforehand`, lockPath) + a.logger.Info(`To collect the license information of packages, "npm install" needs to be performed beforehand`, + log.String("dir", root)) return nil, nil } diff --git a/pkg/fanal/analyzer/language/nodejs/npm/npm_test.go b/pkg/fanal/analyzer/language/nodejs/npm/npm_test.go index 7635e0266729..9c1cc51a55d8 100644 --- a/pkg/fanal/analyzer/language/nodejs/npm/npm_test.go +++ b/pkg/fanal/analyzer/language/nodejs/npm/npm_test.go @@ -15,7 +15,7 @@ import ( ) func TestMain(m *testing.M) { - _ = log.InitLogger(false, true) + log.InitLogger(false, true) os.Exit(m.Run()) } diff --git a/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go b/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go index 1cbd6b4896aa..3d654662078b 100644 --- a/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go +++ b/pkg/fanal/analyzer/language/nodejs/yarn/yarn.go @@ -42,6 +42,7 @@ const version = 2 var fragmentRegexp = regexp.MustCompile(`(\S+):(@?.*?)(@(.*?)|)$`) type yarnAnalyzer struct { + logger *log.Logger packageJsonParser *packagejson.Parser lockParser godeptypes.Parser comparer npm.Comparer @@ -50,6 +51,7 @@ type yarnAnalyzer struct { func newYarnAnalyzer(opt analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &yarnAnalyzer{ + logger: log.WithPrefix("yarn"), packageJsonParser: packagejson.NewParser(), lockParser: yarn.NewParser(), comparer: npm.Comparer{}, @@ -75,12 +77,13 @@ func (a yarnAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysis licenses, err := a.traverseLicenses(input.FS, filePath) if err != nil { - log.Logger.Debugf("Unable to traverse licenses: %s", err) + a.logger.Debug("Unable to traverse licenses", log.Err(err)) } // Parse package.json alongside yarn.lock to find direct deps and mark dev deps if err = a.analyzeDependencies(input.FS, path.Dir(filePath), app); err != nil { - log.Logger.Warnf("Unable to parse %q to remove dev dependencies: %s", path.Join(path.Dir(filePath), types.NpmPkg), err) + a.logger.Warn("Unable to parse package.json to remove dev dependencies", + log.String("file_path", path.Join(path.Dir(filePath), types.NpmPkg)), log.Err(err)) } // Fill licenses @@ -156,7 +159,7 @@ func (a yarnAnalyzer) analyzeDependencies(fsys fs.FS, dir string, app *types.App packageJsonPath := path.Join(dir, types.NpmPkg) directDeps, directDevDeps, err := a.parsePackageJsonDependencies(fsys, packageJsonPath) if errors.Is(err, fs.ErrNotExist) { - log.Logger.Debugf("Yarn: %s not found", packageJsonPath) + a.logger.Debug("package.json not found", log.String("path", packageJsonPath)) return nil } else if err != nil { return xerrors.Errorf("unable to parse %s: %w", dir, err) diff --git a/pkg/fanal/analyzer/language/php/composer/composer.go b/pkg/fanal/analyzer/language/php/composer/composer.go index d0244514a0e7..6fffecf05a50 100644 --- a/pkg/fanal/analyzer/language/php/composer/composer.go +++ b/pkg/fanal/analyzer/language/php/composer/composer.go @@ -62,7 +62,8 @@ func (a composerAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnal // Parse composer.json alongside composer.lock to identify the direct dependencies if err = a.mergeComposerJson(input.FS, filepath.Dir(path), app); err != nil { - log.Logger.Warnf("Unable to parse %q to identify direct dependencies: %s", filepath.Join(filepath.Dir(path), types.ComposerJson), err) + log.Warn("Unable to parse composer.json to identify direct dependencies", + log.String("path", filepath.Join(filepath.Dir(path), types.ComposerJson)), log.Err(err)) } sort.Sort(app.Libraries) apps = append(apps, *app) @@ -109,7 +110,7 @@ func (a composerAnalyzer) mergeComposerJson(fsys fs.FS, dir string, app *types.A p, err := a.parseComposerJson(fsys, path) if errors.Is(err, fs.ErrNotExist) { // Assume all the packages are direct dependencies as it cannot identify them from composer.lock - log.Logger.Debugf("Unable to determine the direct dependencies: %s not found", path) + log.Debug("Unable to determine the direct dependencies, composer.json not found", log.String("path", path)) return nil } else if err != nil { return xerrors.Errorf("unable to parse %s: %w", path, err) diff --git a/pkg/fanal/analyzer/language/python/packaging/packaging.go b/pkg/fanal/analyzer/language/python/packaging/packaging.go index c45a4f5aef0f..6f2c508b5404 100644 --- a/pkg/fanal/analyzer/language/python/packaging/packaging.go +++ b/pkg/fanal/analyzer/language/python/packaging/packaging.go @@ -34,6 +34,7 @@ const version = 1 func newPackagingAnalyzer(opt analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &packagingAnalyzer{ + logger: log.WithPrefix("python"), pkgParser: packaging.NewParser(), licenseClassifierConfidenceLevel: opt.LicenseScannerOption.ClassifierConfidenceLevel, }, nil @@ -54,6 +55,7 @@ var ( ) type packagingAnalyzer struct { + logger *log.Logger pkgParser godeptypes.Parser licenseClassifierConfidenceLevel float64 } @@ -99,7 +101,7 @@ func (a packagingAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAna } if err := a.fillAdditionalData(input.FS, app); err != nil { - log.Logger.Warnf("Unable to collect additional info: %s", err) + a.logger.Warn("Unable to collect additional info", log.Err(err)) } apps = append(apps, *app) diff --git a/pkg/fanal/analyzer/language/python/poetry/poetry.go b/pkg/fanal/analyzer/language/python/poetry/poetry.go index 90897e8f12ba..8a68a61439c3 100644 --- a/pkg/fanal/analyzer/language/python/poetry/poetry.go +++ b/pkg/fanal/analyzer/language/python/poetry/poetry.go @@ -27,12 +27,14 @@ func init() { const version = 1 type poetryAnalyzer struct { + logger *log.Logger pyprojectParser *pyproject.Parser lockParser godeptypes.Parser } func newPoetryAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &poetryAnalyzer{ + logger: log.WithPrefix("poetry"), pyprojectParser: pyproject.NewParser(), lockParser: poetry.NewParser(), }, nil @@ -56,7 +58,8 @@ func (a poetryAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalys // Parse pyproject.toml alongside poetry.lock to identify the direct dependencies if err = a.mergePyProject(input.FS, filepath.Dir(path), app); err != nil { - log.Logger.Warnf("Unable to parse %q to identify direct dependencies: %s", filepath.Join(filepath.Dir(path), types.PyProject), err) + a.logger.Warn("Unable to parse pyproject.toml to identify direct dependencies", + log.String("path", filepath.Join(filepath.Dir(path), types.PyProject)), log.Err(err)) } apps = append(apps, *app) @@ -94,7 +97,7 @@ func (a poetryAnalyzer) mergePyProject(fsys fs.FS, dir string, app *types.Applic p, err := a.parsePyProject(fsys, path) if errors.Is(err, fs.ErrNotExist) { // Assume all the packages are direct dependencies as it cannot identify them from poetry.lock - log.Logger.Debugf("Poetry: %s not found", path) + a.logger.Debug("pyproject.toml not found", log.String("path", path)) return nil } else if err != nil { return xerrors.Errorf("unable to parse %s: %w", path, err) diff --git a/pkg/fanal/analyzer/language/rust/cargo/cargo.go b/pkg/fanal/analyzer/language/rust/cargo/cargo.go index f487ba0c46e8..ba0654a4942f 100644 --- a/pkg/fanal/analyzer/language/rust/cargo/cargo.go +++ b/pkg/fanal/analyzer/language/rust/cargo/cargo.go @@ -41,12 +41,14 @@ var requiredFiles = []string{ } type cargoAnalyzer struct { + logger *log.Logger lockParser godeptypes.Parser comparer compare.GenericComparer } func newCargoAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { return &cargoAnalyzer{ + logger: log.WithPrefix("cargo"), lockParser: cargo.NewParser(), comparer: compare.GenericComparer{}, }, nil @@ -70,7 +72,8 @@ func (a cargoAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysi // Parse Cargo.toml alongside Cargo.lock to identify the direct dependencies if err = a.removeDevDependencies(input.FS, path.Dir(filePath), app); err != nil { - log.Logger.Warnf("Unable to parse %q to identify direct dependencies: %s", path.Join(path.Dir(filePath), types.CargoToml), err) + a.logger.Warn("Unable to parse Cargo.toml q to identify direct dependencies", + log.String("path", path.Join(path.Dir(filePath), types.CargoToml)), log.Err(err)) } sort.Sort(app.Libraries) apps = append(apps, *app) @@ -107,7 +110,7 @@ func (a cargoAnalyzer) removeDevDependencies(fsys fs.FS, dir string, app *types. cargoTOMLPath := path.Join(dir, types.CargoToml) directDeps, err := a.parseRootCargoTOML(fsys, cargoTOMLPath) if errors.Is(err, fs.ErrNotExist) { - log.Logger.Debugf("Cargo: %s not found", cargoTOMLPath) + a.logger.Debug("Cargo.toml not found", log.String("path", cargoTOMLPath)) return nil } else if err != nil { return xerrors.Errorf("unable to parse %s: %w", cargoTOMLPath, err) @@ -128,7 +131,7 @@ func (a cargoAnalyzer) removeDevDependencies(fsys fs.FS, dir string, app *types. } if match, err := a.matchVersion(pkg.Version, constraint); err != nil { - log.Logger.Warnf("Unable to match Cargo version: package: %s, error: %s", pkg.ID, err) + a.logger.Warn("Unable to match Cargo version", log.String("package", pkg.ID), log.Err(err)) continue } else if match { // Mark as a direct dependency @@ -179,7 +182,7 @@ func (a cargoAnalyzer) parseRootCargoTOML(fsys fs.FS, filePath string) (map[stri memberPath := path.Join(path.Dir(filePath), member, types.CargoToml) memberDeps, _, err := parseCargoTOML(fsys, memberPath) if err != nil { - log.Logger.Warnf("Unable to parse %q: %s", memberPath, err) + a.logger.Warn("Unable to parse Cargo.toml", log.String("member_path", memberPath), log.Err(err)) continue } // Member dependencies shouldn't overwrite dependencies from root cargo.toml file diff --git a/pkg/fanal/analyzer/licensing/license.go b/pkg/fanal/analyzer/licensing/license.go index 3e3986d732f8..42872b1c8474 100644 --- a/pkg/fanal/analyzer/licensing/license.go +++ b/pkg/fanal/analyzer/licensing/license.go @@ -12,9 +12,9 @@ import ( "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/licensing" + "github.com/aquasecurity/trivy/pkg/log" xio "github.com/aquasecurity/trivy/pkg/x/io" ) @@ -37,18 +37,48 @@ var ( } acceptedExtensions = []string{ - ".asp", ".aspx", ".bas", ".bat", ".b", ".c", ".cue", ".cgi", ".cs", ".css", ".fish", ".html", ".h", ".ini", - ".java", ".js", ".jsx", ".markdown", ".md", ".py", ".php", ".pl", ".r", ".rb", ".sh", ".sql", ".ts", - ".tsx", ".txt", ".vue", ".zsh", + ".asp", + ".aspx", + ".bas", + ".bat", + ".b", + ".c", + ".cue", + ".cgi", + ".cs", + ".css", + ".fish", + ".html", + ".h", + ".ini", + ".java", + ".js", + ".jsx", + ".markdown", + ".md", + ".py", + ".php", + ".pl", + ".r", + ".rb", + ".sh", + ".sql", + ".ts", + ".tsx", + ".txt", + ".vue", + ".zsh", } acceptedFileNames = []string{ - "license", "licence", "copyright", + "license", + "licence", + "copyright", } ) func init() { - analyzer.RegisterAnalyzer(&licenseFileAnalyzer{}) + analyzer.RegisterAnalyzer(newLicenseFileAnalyzer()) } // licenseFileAnalyzer is an analyzer for file headers and license files @@ -56,8 +86,13 @@ type licenseFileAnalyzer struct { classifierConfidenceLevel float64 } -func (a licenseFileAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { - log.Logger.Debugf("License scanning: %s", input.FilePath) +func newLicenseFileAnalyzer() *licenseFileAnalyzer { + return &licenseFileAnalyzer{} +} + +func (a *licenseFileAnalyzer) Analyze(ctx context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { + ctx = log.WithContextPrefix(ctx, "license") + log.DebugContext(ctx, "License scanning", log.String("file_path", input.FilePath)) // need files to be text based, readable files readable, err := isHumanReadable(input.Content, input.Info.Size()) @@ -81,7 +116,7 @@ func (a *licenseFileAnalyzer) Init(opt analyzer.AnalyzerOptions) error { return nil } -func (a licenseFileAnalyzer) Required(filePath string, _ os.FileInfo) bool { +func (a *licenseFileAnalyzer) Required(filePath string, _ os.FileInfo) bool { for _, skipDir := range skipDirs { if strings.Contains(filePath, skipDir) { return false @@ -116,10 +151,10 @@ func isHumanReadable(content xio.ReadSeekerAt, fileSize int64) (bool, error) { return true, nil } -func (a licenseFileAnalyzer) Type() analyzer.Type { +func (a *licenseFileAnalyzer) Type() analyzer.Type { return analyzer.TypeLicenseFile } -func (a licenseFileAnalyzer) Version() int { +func (a *licenseFileAnalyzer) Version() int { return version } diff --git a/pkg/fanal/analyzer/licensing/license_test.go b/pkg/fanal/analyzer/licensing/license_test.go index fd04029873d9..292b6aea98be 100644 --- a/pkg/fanal/analyzer/licensing/license_test.go +++ b/pkg/fanal/analyzer/licensing/license_test.go @@ -53,7 +53,7 @@ func Test_licenseAnalyzer_Analyze(t *testing.T) { fi, err := f.Stat() require.NoError(t, err) - a := licenseFileAnalyzer{} + a := newLicenseFileAnalyzer() got, err := a.Analyze(context.TODO(), analyzer.AnalysisInput{ FilePath: tt.filePath, Content: f, diff --git a/pkg/fanal/analyzer/pkg/apk/apk.go b/pkg/fanal/analyzer/pkg/apk/apk.go index 5f3b82dbbe76..bb2007470b1b 100644 --- a/pkg/fanal/analyzer/pkg/apk/apk.go +++ b/pkg/fanal/analyzer/pkg/apk/apk.go @@ -23,7 +23,7 @@ import ( ) func init() { - analyzer.RegisterAnalyzer(&alpinePkgAnalyzer{}) + analyzer.RegisterAnalyzer(newAlpinePkgAnalyzer()) } const analyzerVersion = 2 @@ -32,9 +32,12 @@ var requiredFiles = []string{"lib/apk/db/installed"} type alpinePkgAnalyzer struct{} -func (a alpinePkgAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { +func newAlpinePkgAnalyzer() *alpinePkgAnalyzer { return &alpinePkgAnalyzer{} } + +func (a alpinePkgAnalyzer) Analyze(ctx context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { + ctx = log.WithContextPrefix(ctx, "apk") scanner := bufio.NewScanner(input.Content) - parsedPkgs, installedFiles := a.parseApkInfo(scanner) + parsedPkgs, installedFiles := a.parseApkInfo(ctx, scanner) return &analyzer.AnalysisResult{ PackageInfos: []types.PackageInfo{ @@ -47,7 +50,7 @@ func (a alpinePkgAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInp }, nil } -func (a alpinePkgAnalyzer) parseApkInfo(scanner *bufio.Scanner) ([]types.Package, []string) { +func (a alpinePkgAnalyzer) parseApkInfo(ctx context.Context, scanner *bufio.Scanner) ([]types.Package, []string) { var ( pkgs []types.Package pkg types.Package @@ -76,7 +79,8 @@ func (a alpinePkgAnalyzer) parseApkInfo(scanner *bufio.Scanner) ([]types.Package case "V:": version = line[2:] if !apkVersion.Valid(version) { - log.Logger.Warnf("Invalid Version Found : OS %s, Package %s, Version %s", "alpine", pkg.Name, version) + log.WarnContext(ctx, "Invalid version found", + log.String("name", pkg.Name), log.String("version", version)) continue } pkg.Version = version @@ -99,7 +103,7 @@ func (a alpinePkgAnalyzer) parseApkInfo(scanner *bufio.Scanner) ([]types.Package case "A:": pkg.Arch = line[2:] case "C:": - d := decodeChecksumLine(line) + d := a.decodeChecksumLine(ctx, line) if d != "" { pkg.Digest = d } @@ -223,9 +227,9 @@ func (a alpinePkgAnalyzer) Version() int { } // decodeChecksumLine decodes checksum line -func decodeChecksumLine(line string) digest.Digest { +func (a alpinePkgAnalyzer) decodeChecksumLine(ctx context.Context, line string) digest.Digest { if len(line) < 2 { - log.Logger.Debugf("Unable to decode checksum line of apk package: %s", line) + log.DebugContext(ctx, "Unable to decode checksum line of apk package", log.String("line", line)) return "" } // https://wiki.alpinelinux.org/wiki/Apk_spec#Package_Checksum_Field @@ -239,7 +243,7 @@ func decodeChecksumLine(line string) digest.Digest { decodedDigestString, err := base64.StdEncoding.DecodeString(d) if err != nil { - log.Logger.Debugf("unable to decode digest: %s", err) + log.DebugContext(ctx, "Unable to decode digest", log.Err(err)) return "" } h := hex.EncodeToString(decodedDigestString) diff --git a/pkg/fanal/analyzer/pkg/apk/apk_test.go b/pkg/fanal/analyzer/pkg/apk/apk_test.go index 948cd1775da0..08a5d302d324 100644 --- a/pkg/fanal/analyzer/pkg/apk/apk_test.go +++ b/pkg/fanal/analyzer/pkg/apk/apk_test.go @@ -2,6 +2,7 @@ package apk import ( "bufio" + "context" "os" "testing" @@ -51,9 +52,12 @@ var pkgs = []types.Package{ SrcName: "alpine-baselayout", SrcVersion: "3.0.3-r0", Licenses: []string{"GPL-2.0"}, - DependsOn: []string{"busybox@1.24.2-r9", "musl@1.1.14-r10"}, - Arch: "x86_64", - Digest: "sha1:a214896150411d72dd1fafdb32d1c6c4855cccfa", + DependsOn: []string{ + "busybox@1.24.2-r9", + "musl@1.1.14-r10", + }, + Arch: "x86_64", + Digest: "sha1:a214896150411d72dd1fafdb32d1c6c4855cccfa", InstalledFiles: []string{ "etc/hosts", "etc/sysctl.conf", @@ -121,9 +125,12 @@ var pkgs = []types.Package{ SrcName: "openssl", SrcVersion: "1.0.2h-r1", Licenses: []string{"openssl"}, - DependsOn: []string{"musl@1.1.14-r10", "zlib@1.2.8-r2"}, - Arch: "x86_64", - Digest: "sha1:65c860ff8f103b664f40ba849a3f5a51c69c8beb", + DependsOn: []string{ + "musl@1.1.14-r10", + "zlib@1.2.8-r2", + }, + Arch: "x86_64", + Digest: "sha1:65c860ff8f103b664f40ba849a3f5a51c69c8beb", InstalledFiles: []string{ "lib/libcrypto.so.1.0.0", "usr/bin/c_rehash", @@ -199,8 +206,12 @@ var pkgs = []types.Package{ Version: "1.1.14-r10", SrcName: "musl", SrcVersion: "1.1.14-r10", - Licenses: []string{"MIT", "BSD-3-Clause", "GPL-2.0"}, - Digest: "sha1:608aa1dd39eff7bc6615d3e5e33383750f8f5ecc", + Licenses: []string{ + "MIT", + "BSD-3-Clause", + "GPL-2.0", + }, + Digest: "sha1:608aa1dd39eff7bc6615d3e5e33383750f8f5ecc", DependsOn: []string{ "musl@1.1.14-r10", "scanelf@1.1.6-r0", @@ -288,8 +299,12 @@ var pkgs = []types.Package{ Version: "2.7.4-r0", SrcName: "ada", SrcVersion: "2.7.4-r0", - Licenses: []string{"Apache-2.0", "MIT", "MPL-2.0"}, - Digest: "sha1:593154f80c440685448e0f52479725d7bc9b678d", + Licenses: []string{ + "Apache-2.0", + "MIT", + "MPL-2.0", + }, + Digest: "sha1:593154f80c440685448e0f52479725d7bc9b678d", DependsOn: []string{ "musl@1.1.14-r10", }, @@ -431,7 +446,7 @@ func TestParseApkInfo(t *testing.T) { require.NoError(t, err) defer f.Close() scanner := bufio.NewScanner(f) - gotPkgs, gotFiles := a.parseApkInfo(scanner) + gotPkgs, gotFiles := a.parseApkInfo(context.Background(), scanner) assert.Equal(t, tt.wantPkgs, gotPkgs) assert.Equal(t, tt.wantFiles, gotFiles) diff --git a/pkg/fanal/analyzer/pkg/dpkg/dpkg.go b/pkg/fanal/analyzer/pkg/dpkg/dpkg.go index 74fd4d82ef7b..d73c905fd413 100644 --- a/pkg/fanal/analyzer/pkg/dpkg/dpkg.go +++ b/pkg/fanal/analyzer/pkg/dpkg/dpkg.go @@ -16,7 +16,6 @@ import ( debVersion "github.com/knqyf263/go-deb-version" "github.com/samber/lo" - "go.uber.org/zap" "golang.org/x/exp/slices" "golang.org/x/xerrors" @@ -31,10 +30,14 @@ func init() { analyzer.RegisterPostAnalyzer(analyzer.TypeDpkg, newDpkgAnalyzer) } -type dpkgAnalyzer struct{} +type dpkgAnalyzer struct { + logger *log.Logger +} func newDpkgAnalyzer(_ analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) { - return &dpkgAnalyzer{}, nil + return &dpkgAnalyzer{ + logger: log.WithPrefix("dpkg"), + }, nil } const ( @@ -58,7 +61,7 @@ func (a dpkgAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysis // parse `available` file to get digest for packages digests, err := a.parseDpkgAvailable(input.FS) if err != nil { - log.Logger.Debugf("Unable to parse %q file: %s", availableFile, err) + a.logger.Debug("Unable to parse the available file", log.String("file", availableFile), log.Err(err)) } required := func(path string, d fs.DirEntry) bool { @@ -155,7 +158,7 @@ func (a dpkgAnalyzer) parseDpkgAvailable(fsys fs.FS) (map[string]digest.Digest, for scanner.Scan() { header, err := scanner.Header() if !errors.Is(err, io.EOF) && err != nil { - log.Logger.Warnw("Parse error", zap.String("file", availableFile), zap.Error(err)) + a.logger.Warn("Parse error", log.String("file", availableFile), log.Err(err)) continue } name, version, checksum := header.Get("Package"), header.Get("Version"), header.Get("SHA256") @@ -181,7 +184,7 @@ func (a dpkgAnalyzer) parseDpkgStatus(filePath string, r io.Reader, digests map[ for scanner.Scan() { header, err := scanner.Header() if !errors.Is(err, io.EOF) && err != nil { - log.Logger.Warnw("Parse error", zap.String("file", filePath), zap.Error(err)) + a.logger.Warn("Parse error", log.String("file", filePath), log.Err(err)) continue } @@ -251,8 +254,8 @@ func (a dpkgAnalyzer) parseDpkgPkg(header textproto.MIMEHeader) *types.Package { } if v, err := debVersion.NewVersion(pkg.Version); err != nil { - log.Logger.Warnw("Invalid version", zap.String("OS", "debian"), - zap.String("package", pkg.Name), zap.String("version", pkg.Version)) + a.logger.Warn("Invalid version", log.String("OS", "debian"), + log.String("package", pkg.Name), log.String("version", pkg.Version)) return nil } else { pkg.ID = a.pkgID(pkg.Name, pkg.Version) @@ -262,8 +265,8 @@ func (a dpkgAnalyzer) parseDpkgPkg(header textproto.MIMEHeader) *types.Package { } if v, err := debVersion.NewVersion(pkg.SrcVersion); err != nil { - log.Logger.Warnw("Invalid source version", zap.String("OS", "debian"), - zap.String("package", pkg.Name), zap.String("version", pkg.SrcVersion)) + a.logger.Warn("Invalid source version", log.String("OS", "debian"), + log.String("package", pkg.Name), log.String("version", pkg.SrcVersion)) return nil } else { pkg.SrcVersion = v.Version() diff --git a/pkg/fanal/analyzer/pkg/rpm/rpm.go b/pkg/fanal/analyzer/pkg/rpm/rpm.go index f3a52286e578..70d5b9dcd26a 100644 --- a/pkg/fanal/analyzer/pkg/rpm/rpm.go +++ b/pkg/fanal/analyzer/pkg/rpm/rpm.go @@ -16,13 +16,13 @@ import ( "github.com/aquasecurity/trivy/pkg/digest" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/utils" + "github.com/aquasecurity/trivy/pkg/log" ) func init() { - analyzer.RegisterAnalyzer(&rpmPkgAnalyzer{}) + analyzer.RegisterAnalyzer(newRPMPkgAnalyzer()) } const version = 3 @@ -63,12 +63,17 @@ var osVendors = []string{ type rpmPkgAnalyzer struct{} +func newRPMPkgAnalyzer() *rpmPkgAnalyzer { + return &rpmPkgAnalyzer{} +} + type RPMDB interface { ListPackages() ([]*rpmdb.PackageInfo, error) } -func (a rpmPkgAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { - parsedPkgs, installedFiles, err := a.parsePkgInfo(input.Content) +func (a rpmPkgAnalyzer) Analyze(ctx context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { + ctx = log.WithContextPrefix(ctx, "rpm") + parsedPkgs, installedFiles, err := a.parsePkgInfo(ctx, input.Content) if err != nil { return nil, xerrors.Errorf("failed to parse rpmdb: %w", err) } @@ -84,7 +89,7 @@ func (a rpmPkgAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) }, nil } -func (a rpmPkgAnalyzer) parsePkgInfo(rc io.Reader) (types.Packages, []string, error) { +func (a rpmPkgAnalyzer) parsePkgInfo(ctx context.Context, rc io.Reader) (types.Packages, []string, error) { filePath, err := writeToTempFile(rc) if err != nil { return nil, nil, xerrors.Errorf("temp file error: %w", err) @@ -99,10 +104,10 @@ func (a rpmPkgAnalyzer) parsePkgInfo(rc io.Reader) (types.Packages, []string, er } defer db.Close() - return a.listPkgs(db) + return a.listPkgs(ctx, db) } -func (a rpmPkgAnalyzer) listPkgs(db RPMDB) (types.Packages, []string, error) { +func (a rpmPkgAnalyzer) listPkgs(ctx context.Context, db RPMDB) (types.Packages, []string, error) { // equivalent: // new version: rpm -qa --qf "%{NAME} %{EPOCHNUM} %{VERSION} %{RELEASE} %{SOURCERPM} %{ARCH}\n" // old version: rpm -qa --qf "%{NAME} %{EPOCH} %{VERSION} %{RELEASE} %{SOURCERPM} %{ARCH}\n" @@ -126,7 +131,7 @@ func (a rpmPkgAnalyzer) listPkgs(db RPMDB) (types.Packages, []string, error) { // source epoch is not included in SOURCERPM srcName, srcVer, srcRel, err = splitFileName(pkg.SourceRpm) if err != nil { - log.Logger.Debugf("Invalid Source RPM Found: %s", pkg.SourceRpm) + log.DebugContext(ctx, "Invalid Source RPM Found", log.String("sourcerpm", pkg.SourceRpm)) } } diff --git a/pkg/fanal/analyzer/pkg/rpm/rpm_test.go b/pkg/fanal/analyzer/pkg/rpm/rpm_test.go index 2ef2a0b46219..7c1ec8ca77c8 100644 --- a/pkg/fanal/analyzer/pkg/rpm/rpm_test.go +++ b/pkg/fanal/analyzer/pkg/rpm/rpm_test.go @@ -125,13 +125,20 @@ func Test_rpmPkgAnalyzer_listPkgs(t *testing.T) { mock: mock{ packages: []*rpmdb.PackageInfo{ { - Name: "glibc", - Version: "2.17", - Release: "307.el7.1", - Arch: "x86_64", - SourceRpm: "glibc-2.17-317.el7.src.rpm", - DirNames: []string{"/etc", "/lib64"}, - DirIndexes: []int32{0, 0, 1}, + Name: "glibc", + Version: "2.17", + Release: "307.el7.1", + Arch: "x86_64", + SourceRpm: "glibc-2.17-317.el7.src.rpm", + DirNames: []string{ + "/etc", + "/lib64", + }, + DirIndexes: []int32{ + 0, + 0, + 1, + }, BaseNames: []string{ "ld.so.conf", "rpc", @@ -182,7 +189,13 @@ func Test_rpmPkgAnalyzer_listPkgs(t *testing.T) { "/usr/lib/.build-id/aa/", "/usr/share/man/man1/", }, - DirIndexes: []int32{0, 1, 2, 3, 4}, + DirIndexes: []int32{ + 0, + 1, + 2, + 3, + 4, + }, BaseNames: []string{ "curl", ".build-id", @@ -259,8 +272,8 @@ func Test_rpmPkgAnalyzer_listPkgs(t *testing.T) { err: tt.mock.err, } - a := rpmPkgAnalyzer{} - gotPkgs, gotFiles, err := a.listPkgs(m) + a := newRPMPkgAnalyzer() + gotPkgs, gotFiles, err := a.listPkgs(context.Background(), m) if tt.wantErr != "" { assert.ErrorContains(t, err, tt.wantErr) return diff --git a/pkg/fanal/analyzer/secret/secret_test.go b/pkg/fanal/analyzer/secret/secret_test.go index 4f499c4d59c3..7cba1d137e8f 100644 --- a/pkg/fanal/analyzer/secret/secret_test.go +++ b/pkg/fanal/analyzer/secret/secret_test.go @@ -111,7 +111,10 @@ func TestSecretAnalyzer(t *testing.T) { Secrets: []types.Secret{ { FilePath: "testdata/secret.txt", - Findings: []types.SecretFinding{wantFinding1, wantFinding2}, + Findings: []types.SecretFinding{ + wantFinding1, + wantFinding2, + }, }, }, }, @@ -124,7 +127,10 @@ func TestSecretAnalyzer(t *testing.T) { Secrets: []types.Secret{ { FilePath: "/testdata/secret.txt", - Findings: []types.SecretFinding{wantFinding1, wantFinding2}, + Findings: []types.SecretFinding{ + wantFinding1, + wantFinding2, + }, }, }, }, @@ -151,7 +157,7 @@ func TestSecretAnalyzer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - a := &secret.SecretAnalyzer{} + a := secret.SecretAnalyzer{} err := a.Init(analyzer.AnalyzerOptions{ SecretScannerOption: analyzer.SecretScannerOption{ConfigPath: tt.configPath}, }) @@ -161,7 +167,7 @@ func TestSecretAnalyzer(t *testing.T) { fi, err := content.Stat() require.NoError(t, err) - got, err := a.Analyze(context.TODO(), analyzer.AnalysisInput{ + got, err := a.Analyze(context.Background(), analyzer.AnalysisInput{ FilePath: tt.filePath, Dir: tt.dir, Content: content, diff --git a/pkg/fanal/applier/docker.go b/pkg/fanal/applier/docker.go index abcc1ce51958..0e9781edbca6 100644 --- a/pkg/fanal/applier/docker.go +++ b/pkg/fanal/applier/docker.go @@ -253,7 +253,7 @@ func ApplyLayers(layers []ftypes.BlobInfo) ftypes.ArtifactDetail { func newPURL(pkgType ftypes.TargetType, metadata types.Metadata, pkg ftypes.Package) *packageurl.PackageURL { p, err := purl.New(pkgType, metadata, pkg) if err != nil { - log.Logger.Errorf("Failed to create PackageURL: %s", err) + log.Error("Failed to create PackageURL", log.Err(err)) return nil } return p.Unwrap() diff --git a/pkg/fanal/artifact/image/image.go b/pkg/fanal/artifact/image/image.go index 782d13a86097..e22f8d7bb0a2 100644 --- a/pkg/fanal/artifact/image/image.go +++ b/pkg/fanal/artifact/image/image.go @@ -19,14 +19,15 @@ import ( "github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/handler" "github.com/aquasecurity/trivy/pkg/fanal/image" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/walker" + "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/parallel" "github.com/aquasecurity/trivy/pkg/semaphore" ) type Artifact struct { + logger *log.Logger image types.Image cache cache.ArtifactCache walker walker.LayerTar @@ -60,6 +61,7 @@ func NewArtifact(img types.Image, c cache.ArtifactCache, opt artifact.Option) (a } return Artifact{ + logger: log.WithPrefix("image"), image: img, cache: c, walker: walker.NewLayerTar(opt.SkipFiles, opt.SkipDirs), @@ -76,6 +78,7 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error) if err != nil { return types.ArtifactReference{}, xerrors.Errorf("unable to get the image ID: %w", err) } + a.logger.Debug("Detected image ID", log.String("image_id", imageID)) configFile, err := a.image.ConfigFile() if err != nil { @@ -83,10 +86,7 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error) } diffIDs := a.diffIDs(configFile) - - // Debug - log.Logger.Debugf("Image ID: %s", imageID) - log.Logger.Debugf("Diff IDs: %v", diffIDs) + a.logger.Debug("Detected diff ID", log.Any("diff_ids", diffIDs)) // Try retrieving a remote SBOM document if res, err := a.retrieveRemoteSBOM(ctx); err == nil { @@ -99,7 +99,7 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error) // Try to detect base layers. baseDiffIDs := a.guessBaseLayers(diffIDs, configFile) - log.Logger.Debugf("Base Layers: %v", baseDiffIDs) + a.logger.Debug("Detected base layers", log.Any("diff_ids", baseDiffIDs)) // Convert image ID and layer IDs to cache keys imageKey, layerKeys, err := a.calcCacheKeys(imageID, diffIDs) @@ -117,7 +117,7 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error) missingImageKey := imageKey if missingImage { - log.Logger.Debugf("Missing image ID in cache: %s", imageID) + a.logger.Debug("Missing image ID in cache", log.String("image_id", imageID)) } else { missingImageKey = "" } @@ -239,7 +239,7 @@ func (a Artifact) inspect(ctx context.Context, missingImage string, layerKeys, b } func (a Artifact) inspectLayer(ctx context.Context, layerInfo LayerInfo, disabled []analyzer.Type) (types.BlobInfo, error) { - log.Logger.Debugf("Missing diff ID in cache: %s", layerInfo.DiffID) + a.logger.Debug("Missing diff ID in cache", log.String("diff_id", layerInfo.DiffID)) layerDigest, rc, err := a.uncompressedLayer(layerInfo.DiffID) if err != nil { diff --git a/pkg/fanal/artifact/image/remote_sbom.go b/pkg/fanal/artifact/image/remote_sbom.go index f0c9ae26bfeb..9bb609e64c3b 100644 --- a/pkg/fanal/artifact/image/remote_sbom.go +++ b/pkg/fanal/artifact/image/remote_sbom.go @@ -15,8 +15,8 @@ import ( sbomatt "github.com/aquasecurity/trivy/pkg/attestation/sbom" "github.com/aquasecurity/trivy/pkg/fanal/artifact/sbom" - "github.com/aquasecurity/trivy/pkg/fanal/log" ftypes "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/oci" "github.com/aquasecurity/trivy/pkg/remote" "github.com/aquasecurity/trivy/pkg/types" @@ -42,7 +42,7 @@ func (a Artifact) retrieveRemoteSBOM(ctx context.Context) (ftypes.ArtifactRefere ref, err := inspect(ctx) if errors.Is(err, errNoSBOMFound) { // Try the next SBOM source - log.Logger.Debugf("No SBOM found in the source: %s", sbomSource) + a.logger.Debug("No SBOM found in the source", log.String("source", sbomSource)) continue } else if err != nil { return ftypes.ArtifactReference{}, xerrors.Errorf("SBOM searching error: %w", err) @@ -74,7 +74,8 @@ func (a Artifact) inspectOCIReferrerSBOM(ctx context.Context) (ftypes.ArtifactRe } res, err := a.parseReferrer(ctx, digest.Context().String(), m) if err != nil { - log.Logger.Warnf("Error with SBOM via OCI referrers (%s): %s", m.Digest.String(), err) + a.logger.Warn("Error with SBOM via OCI referrers", + log.String("digest", m.Digest.String()), log.Err(err)) continue } return res, nil @@ -110,7 +111,7 @@ func (a Artifact) parseReferrer(ctx context.Context, repo string, desc v1.Descri } // Found SBOM - log.Logger.Infof("Found SBOM (%s) in the OCI referrers", res.Type) + a.logger.Info("Found SBOM in the OCI referrers", log.String("type", string(res.Type))) return res, nil } @@ -151,7 +152,8 @@ func (a Artifact) inspectRekorSBOMAttestation(ctx context.Context) (ftypes.Artif } // Found SBOM - log.Logger.Infof("Found SBOM (%s) in Rekor (%s)", res.Type, a.artifactOption.RekorURL) + a.logger.Info("Found SBOM in Rekor", log.String("type", string(res.Type)), + log.String("url", a.artifactOption.RekorURL)) return res, nil } diff --git a/pkg/fanal/artifact/image/remote_sbom_test.go b/pkg/fanal/artifact/image/remote_sbom_test.go index c9255c2057bd..ef777fe5c641 100644 --- a/pkg/fanal/artifact/image/remote_sbom_test.go +++ b/pkg/fanal/artifact/image/remote_sbom_test.go @@ -141,7 +141,7 @@ func TestArtifact_InspectRekorAttestation(t *testing.T) { }, } - require.NoError(t, log.InitLogger(false, true)) + log.InitLogger(false, true) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ts := rekortest.NewServer(t) diff --git a/pkg/fanal/artifact/local/fs.go b/pkg/fanal/artifact/local/fs.go index 8d7409cdfae3..ff72d01a72fd 100644 --- a/pkg/fanal/artifact/local/fs.go +++ b/pkg/fanal/artifact/local/fs.go @@ -61,7 +61,7 @@ func buildPathsToSkip(base string, paths []string) []string { var relativePaths []string absBase, err := filepath.Abs(base) if err != nil { - log.Logger.Warnf("Failed to get an absolute path of %s: %s", base, err) + log.Warn("Failed to get an absolute path", log.String("base", base), log.Err(err)) return nil } for _, path := range paths { @@ -84,12 +84,12 @@ func buildPathsToSkip(base string, paths []string) []string { absSkipPath, err := filepath.Abs(path) if err != nil { - log.Logger.Warnf("Failed to get an absolute path of %s: %s", base, err) + log.Warn("Failed to get an absolute path", log.String("base", base), log.Err(err)) continue } rel, err := filepath.Rel(absBase, absSkipPath) if err != nil { - log.Logger.Warnf("Failed to get a relative path from %s to %s: %s", base, path, err) + log.Warn("Failed to get an relative path", log.String("base", base), log.Err(err)) continue } diff --git a/pkg/fanal/artifact/sbom/sbom.go b/pkg/fanal/artifact/sbom/sbom.go index 2be0ce31e6a4..90eed8c89e1b 100644 --- a/pkg/fanal/artifact/sbom/sbom.go +++ b/pkg/fanal/artifact/sbom/sbom.go @@ -49,7 +49,7 @@ func (a Artifact) Inspect(_ context.Context) (types.ArtifactReference, error) { if err != nil { return types.ArtifactReference{}, xerrors.Errorf("failed to detect SBOM format: %w", err) } - log.Logger.Infof("Detected SBOM format: %s", format) + log.Info("Detected SBOM format", log.String("format", string(format))) bom, err := sbom.Decode(f, format) if err != nil { diff --git a/pkg/fanal/artifact/vm/ami.go b/pkg/fanal/artifact/vm/ami.go index 6ce2dbd4ef30..791c41d52896 100644 --- a/pkg/fanal/artifact/vm/ami.go +++ b/pkg/fanal/artifact/vm/ami.go @@ -41,7 +41,7 @@ func newAMI(imageID string, storage Storage, region, endpoint string) (*AMI, err if snapshotID == "" { continue } - log.Logger.Infof("Snapshot %s found", snapshotID) + log.WithPrefix("ami").Info("Snapshot found", log.String("snapshot_id", snapshotID)) ebs, err := newEBS(snapshotID, storage, region, endpoint) if err != nil { return nil, xerrors.Errorf("new EBS error: %w", err) diff --git a/pkg/fanal/artifact/vm/ebs.go b/pkg/fanal/artifact/vm/ebs.go index 879d5e9b424c..d9881edf193a 100644 --- a/pkg/fanal/artifact/vm/ebs.go +++ b/pkg/fanal/artifact/vm/ebs.go @@ -21,6 +21,7 @@ const storageEBSCacheSize = 128 // EBS represents an artifact for AWS EBS snapshots type EBS struct { Storage + logger *log.Logger snapshotID string ebs ebsfile.EBSAPI } @@ -33,6 +34,7 @@ func newEBS(snapshotID string, vm Storage, region, endpoint string) (*EBS, error return &EBS{ Storage: vm, + logger: log.WithPrefix("ebs"), snapshotID: snapshotID, ebs: ebs, }, nil @@ -107,7 +109,7 @@ func (a *EBS) calcCacheKey(key string) (string, error) { func (a *EBS) hasCache(cacheKey string) bool { _, missingCacheKeys, err := a.cache.MissingBlobs(cacheKey, []string{cacheKey}) if err != nil { - log.Logger.Debugf("Unable to query missing cache: %s", err) + a.logger.Debug("Unable to query missing cache", log.Err(err)) return false } @@ -116,6 +118,6 @@ func (a *EBS) hasCache(cacheKey string) bool { return true } - log.Logger.Debugf("Missing virtual machine cache: %s", cacheKey) + a.logger.Debug("Missing virtual machine cache", log.String("key", cacheKey)) return false } diff --git a/pkg/fanal/artifact/vm/file.go b/pkg/fanal/artifact/vm/file.go index cecddf57e472..58fd4d46c96a 100644 --- a/pkg/fanal/artifact/vm/file.go +++ b/pkg/fanal/artifact/vm/file.go @@ -49,8 +49,9 @@ func newFile(filePath string, storage Storage) (*ImageFile, error) { return nil, err } - log.Logger.Debugf("VM image not detected: %s", err) - log.Logger.Debugf("Assume raw image") + logger := log.WithPrefix("vm") + logger.Debug("VM image not detected", log.Err(err)) + logger.Debug("Assume raw image") fi, err := f.Stat() if err != nil { return nil, xerrors.Errorf("file stat error: %w", err) diff --git a/pkg/fanal/handler/unpackaged/unpackaged.go b/pkg/fanal/handler/unpackaged/unpackaged.go index 5f450c7923cb..119cae3e7dad 100644 --- a/pkg/fanal/handler/unpackaged/unpackaged.go +++ b/pkg/fanal/handler/unpackaged/unpackaged.go @@ -24,6 +24,7 @@ func init() { const version = 1 type unpackagedHook struct { + logger *log.Logger client sbomatt.Rekor } @@ -33,6 +34,7 @@ func NewUnpackagedHandler(opt artifact.Option) (handler.PostHandler, error) { return nil, xerrors.Errorf("rekor client error: %w", err) } return unpackagedHook{ + logger: log.WithPrefix("unpackaged"), client: c, }, nil } @@ -68,7 +70,7 @@ func (h unpackagedHook) Handle(ctx context.Context, res *analyzer.AnalysisResult } if len(bom.Applications) > 0 { - log.Logger.Infof("Found SBOM attestation in Rekor: %s", filePath) + h.logger.Info("Found SBOM attestation in Rekor", log.String("file_path", filePath)) // Take the first app since this SBOM should contain a single application. app := bom.Applications[0] app.FilePath = filePath // Use the original file path rather than the one in the SBOM. diff --git a/pkg/fanal/handler/unpackaged/unpackaged_test.go b/pkg/fanal/handler/unpackaged/unpackaged_test.go index b466aa83282a..685af042d131 100644 --- a/pkg/fanal/handler/unpackaged/unpackaged_test.go +++ b/pkg/fanal/handler/unpackaged/unpackaged_test.go @@ -74,7 +74,7 @@ func Test_unpackagedHook_Handle(t *testing.T) { }, } - require.NoError(t, log.InitLogger(false, true)) + log.InitLogger(false, true) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ts := rekortest.NewServer(t) diff --git a/pkg/fanal/image/daemon/image.go b/pkg/fanal/image/daemon/image.go index 23adf966f84b..5d80cb93eee4 100644 --- a/pkg/fanal/image/daemon/image.go +++ b/pkg/fanal/image/daemon/image.go @@ -132,7 +132,7 @@ func (img *image) ConfigFile() (*v1.ConfigFile, error) { } func (img *image) configFile() (*v1.ConfigFile, error) { - log.Logger.Debug("Saving the container image to a local file to obtain the image config...") + log.Debug("Saving the container image to a local file to obtain the image config...") // Need to fall back into expensive operations like "docker save" // because the config file cannot be generated properly from container engine API for some reason. diff --git a/pkg/fanal/image/image.go b/pkg/fanal/image/image.go index fc8e14ab37e1..2a8862a89ab5 100644 --- a/pkg/fanal/image/image.go +++ b/pkg/fanal/image/image.go @@ -42,7 +42,7 @@ func NewContainerImage(ctx context.Context, imageName string, opt types.ImageOpt for _, src := range opt.ImageSources { trySrc, ok := imageSourceFuncs[src] if !ok { - log.Logger.Warnf("Unknown image source: '%s'", src) + log.Warn("Unknown image source", log.String("source", string(src))) continue } diff --git a/pkg/fanal/image/registry/token.go b/pkg/fanal/image/registry/token.go index 1e51b0fd31a4..b959c6cc7bbc 100644 --- a/pkg/fanal/image/registry/token.go +++ b/pkg/fanal/image/registry/token.go @@ -8,8 +8,8 @@ import ( "github.com/aquasecurity/trivy/pkg/fanal/image/registry/azure" "github.com/aquasecurity/trivy/pkg/fanal/image/registry/ecr" "github.com/aquasecurity/trivy/pkg/fanal/image/registry/google" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" ) var ( @@ -41,7 +41,7 @@ func GetToken(ctx context.Context, domain string, opt types.RegistryOptions) (au username, password, err := registry.GetCredential(ctx) if err != nil { // only skip check registry if error occurred - log.Logger.Debug(err) + log.Debug("Credential error", log.Err(err)) break } return authn.Basic{ diff --git a/pkg/fanal/log/log.go b/pkg/fanal/log/log.go deleted file mode 100644 index 93344390d03e..000000000000 --- a/pkg/fanal/log/log.go +++ /dev/null @@ -1,17 +0,0 @@ -package log - -import ( - "go.uber.org/zap" -) - -var Logger *zap.SugaredLogger - -func init() { - if logger, err := zap.NewProduction(); err == nil { - Logger = logger.Sugar() - } -} - -func SetLogger(l *zap.SugaredLogger) { - Logger = l -} diff --git a/pkg/fanal/secret/scanner.go b/pkg/fanal/secret/scanner.go index c773b9707ae3..ef10ad45bb0c 100644 --- a/pkg/fanal/secret/scanner.go +++ b/pkg/fanal/secret/scanner.go @@ -14,13 +14,14 @@ import ( "golang.org/x/xerrors" "gopkg.in/yaml.v3" - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" ) var lineSep = []byte{'\n'} type Scanner struct { + logger *log.Logger *Global } @@ -123,7 +124,8 @@ func (s *Scanner) FindSubmatchLocations(r Rule, content []byte) []Location { var submatchLocations []Location matchsIndices := r.Regex.FindAllSubmatchIndex(content, -1) for _, matchIndices := range matchsIndices { - matchLocation := Location{ // first two indexes are always start and end of the whole match + matchLocation := Location{ + // first two indexes are always start and end of the whole match Start: matchIndices[0], End: matchIndices[1], } @@ -151,7 +153,10 @@ func (r *Rule) getMatchSubgroupsLocations(matchLocs []int) []Location { if name == r.SecretGroupName { startLocIndex := 2 * i endLocIndex := startLocIndex + 1 - locations = append(locations, Location{Start: matchLocs[startLocIndex], End: matchLocs[endLocIndex]}) + locations = append(locations, Location{ + Start: matchLocs[startLocIndex], + End: matchLocs[endLocIndex], + }) } } return locations @@ -270,17 +275,18 @@ func ParseConfig(configPath string) (*Config, error) { return nil, nil } + logger := log.WithPrefix("secret").With("config_path", configPath) f, err := os.Open(configPath) if errors.Is(err, os.ErrNotExist) { // If the specified file doesn't exist, it just uses built-in rules and allow rules. - log.Logger.Debugf("No secret config detected: %s", configPath) + logger.Debug("No secret config detected") return nil, nil } else if err != nil { return nil, xerrors.Errorf("file open error %s: %w", configPath, err) } defer f.Close() - log.Logger.Infof("Loading %s for secret scanning...", configPath) + logger.Info("Loading the config file s for secret scanning...") var config Config if err = yaml.NewDecoder(f).Decode(&config); err != nil { @@ -291,12 +297,17 @@ func ParseConfig(configPath string) (*Config, error) { } func NewScanner(config *Config) Scanner { + logger := log.WithPrefix("secret") + // Use the default rules if config == nil { - return Scanner{Global: &Global{ - Rules: builtinRules, - AllowRules: builtinAllowRules, - }} + return Scanner{ + logger: logger, + Global: &Global{ + Rules: builtinRules, + AllowRules: builtinAllowRules, + }, + } } enabledRules := builtinRules @@ -321,11 +332,14 @@ func NewScanner(config *Config) Scanner { return !slices.Contains(config.DisableAllowRuleIDs, v.ID) }) - return Scanner{Global: &Global{ - Rules: rules, - AllowRules: allowRules, - ExcludeBlock: config.ExcludeBlock, - }} + return Scanner{ + logger: logger, + Global: &Global{ + Rules: rules, + AllowRules: allowRules, + ExcludeBlock: config.ExcludeBlock, + }, + } } type ScanArgs struct { @@ -339,9 +353,11 @@ type Match struct { } func (s *Scanner) Scan(args ScanArgs) types.Secret { + logger := s.logger.With("file_path", args.FilePath) + // Global allowed paths if s.AllowPath(args.FilePath) { - log.Logger.Debugf("Skipped secret scanning on %q matching allowed paths", args.FilePath) + logger.Debug("Skipped secret scanning matching allowed paths") return types.Secret{ FilePath: args.FilePath, } @@ -354,15 +370,16 @@ func (s *Scanner) Scan(args ScanArgs) types.Secret { var findings []types.SecretFinding globalExcludedBlocks := newBlocks(args.Content, s.ExcludeBlock.Regexes) for _, rule := range s.Rules { + ruleLogger := logger.With("rule_id", rule.ID) // Check if the file path should be scanned by this rule if !rule.MatchPath(args.FilePath) { - log.Logger.Debugf("Skipped secret scanning on %q as non-compliant to the rule %q", args.FilePath, rule.ID) + ruleLogger.Debug("Skipped secret scanning as non-compliant to the rule") continue } // Check if the file path should be allowed if rule.AllowPath(args.FilePath) { - log.Logger.Debugf("Skipped secret scanning on %q as allowed", args.FilePath) + ruleLogger.Debug("Skipped secret scanning as allowed") continue } diff --git a/pkg/fanal/secret/scanner_test.go b/pkg/fanal/secret/scanner_test.go index 2e43047f9d53..fe73270b9ae7 100644 --- a/pkg/fanal/secret/scanner_test.go +++ b/pkg/fanal/secret/scanner_test.go @@ -6,19 +6,15 @@ import ( "path/filepath" "testing" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/zap" - "go.uber.org/zap/zapcore" - - "github.com/aquasecurity/trivy/pkg/fanal/log" "github.com/aquasecurity/trivy/pkg/fanal/secret" "github.com/aquasecurity/trivy/pkg/fanal/types" + "github.com/aquasecurity/trivy/pkg/log" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestMain(m *testing.M) { - logger, _ := zap.NewDevelopment(zap.IncreaseLevel(zapcore.FatalLevel)) - log.SetLogger(logger.Sugar()) + log.InitLogger(false, true) os.Exit(m.Run()) } @@ -675,7 +671,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "secret.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "secret.txt"), - Findings: []types.SecretFinding{wantFinding1, wantFinding2}, + Findings: []types.SecretFinding{ + wantFinding1, + wantFinding2, + }, }, }, { @@ -684,7 +683,11 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "aws-secrets.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "aws-secrets.txt"), - Findings: []types.SecretFinding{wantFinding5, wantFinding10, wantFinding9}, + Findings: []types.SecretFinding{ + wantFinding5, + wantFinding10, + wantFinding9, + }, }, }, { @@ -720,7 +723,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "docker-secrets.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "docker-secrets.txt"), - Findings: []types.SecretFinding{wantFindingDockerKey1, wantFindingDockerKey2}, + Findings: []types.SecretFinding{ + wantFindingDockerKey1, + wantFindingDockerKey2, + }, }, }, { @@ -738,7 +744,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "secret.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "secret.txt"), - Findings: []types.SecretFinding{wantFinding1, wantFinding2}, + Findings: []types.SecretFinding{ + wantFinding1, + wantFinding2, + }, }, }, { @@ -761,7 +770,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "secret.md"), want: types.Secret{ FilePath: filepath.Join("testdata", "secret.md"), - Findings: []types.SecretFinding{wantFinding1, wantFinding2}, + Findings: []types.SecretFinding{ + wantFinding1, + wantFinding2, + }, }, }, { @@ -770,7 +782,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "builtin-rule-secret.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "builtin-rule-secret.txt"), - Findings: []types.SecretFinding{wantFinding5a, wantFinding6}, + Findings: []types.SecretFinding{ + wantFinding5a, + wantFinding6, + }, }, }, { @@ -877,7 +892,10 @@ func TestSecretScanner(t *testing.T) { inputFilePath: filepath.Join("testdata", "secret.txt"), want: types.Secret{ FilePath: filepath.Join("testdata", "secret.txt"), - Findings: []types.SecretFinding{wantFinding3, wantFinding4}, + Findings: []types.SecretFinding{ + wantFinding3, + wantFinding4, + }, }, }, { diff --git a/pkg/fanal/walker/fs.go b/pkg/fanal/walker/fs.go index 5397c1bdfc10..88a623ed1792 100644 --- a/pkg/fanal/walker/fs.go +++ b/pkg/fanal/walker/fs.go @@ -88,7 +88,7 @@ func (w FS) walkFast(root string, walkFn fastWalkFunc) error { // Multiple goroutines stat the filesystem concurrently. The provided // walkFn must be safe for concurrent use. - log.Logger.Debugf("Walk the file tree rooted at '%s' in parallel", root) + log.Debug("Walking the file tree in parallel", log.String("root", root)) if err := swalker.Walk(root, walkFn, errorCallbackOption); err != nil { return xerrors.Errorf("walk error: %w", err) } @@ -96,7 +96,7 @@ func (w FS) walkFast(root string, walkFn fastWalkFunc) error { } func (w FS) walkSlow(root string, walkFn fastWalkFunc) error { - log.Logger.Debugf("Walk the file tree rooted at '%s' in series", root) + log.Debug("Walking the file tree in series", log.String("root", root)) err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error { if err != nil { return w.errCallback(path, err) diff --git a/pkg/fanal/walker/vm.go b/pkg/fanal/walker/vm.go index 8d13e0aee57c..42206fa5efcc 100644 --- a/pkg/fanal/walker/vm.go +++ b/pkg/fanal/walker/vm.go @@ -35,6 +35,7 @@ func AppendPermitDiskName(s ...string) { type VM struct { walker + logger *log.Logger threshold int64 analyzeFn WalkFunc } @@ -42,6 +43,7 @@ type VM struct { func NewVM(skipFiles, skipDirs []string) *VM { threshold := defaultSizeThreshold return &VM{ + logger: log.WithPrefix("vm"), walker: newWalker(skipFiles, skipDirs), threshold: threshold, } @@ -72,7 +74,7 @@ func (w *VM) Walk(vreader *io.SectionReader, root string, fn WalkFunc) error { // Walk each partition if err = w.diskWalk(root, partition); err != nil { - log.Logger.Warnf("Partition error: %s", err.Error()) + w.logger.Warn("Partition error", log.Err(err)) } } return nil @@ -80,7 +82,7 @@ func (w *VM) Walk(vreader *io.SectionReader, root string, fn WalkFunc) error { // Inject disk partitioning processes from externally with diskWalk. func (w *VM) diskWalk(root string, partition types.Partition) error { - log.Logger.Debugf("Found partition: %s", partition.Name()) + w.logger.Debug("Found partition", log.String("name", partition.Name())) sr := partition.GetSectionReader() @@ -89,7 +91,7 @@ func (w *VM) diskWalk(root string, partition types.Partition) error { if err != nil { return xerrors.Errorf("LVM detection error: %w", err) } else if foundLVM { - log.Logger.Errorf("LVM is not supported, skip %s.img", partition.Name()) + w.logger.Error("LVM is not supported, skipping", log.String("name", partition.Name()+".img")) return nil } diff --git a/pkg/fanal/walker/walk.go b/pkg/fanal/walker/walk.go index cebc86ee76d7..7d7d71e702f7 100644 --- a/pkg/fanal/walker/walk.go +++ b/pkg/fanal/walker/walk.go @@ -60,7 +60,7 @@ func (w *walker) shouldSkipFile(filePath string) bool { if err != nil { return false // return early if bad pattern } else if match { - log.Logger.Debugf("Skipping file: %s", filePath) + log.Debug("Skipping file", log.String("file_path", filePath)) return true } } @@ -81,7 +81,7 @@ func (w *walker) shouldSkipDir(dir string) bool { if match, err := doublestar.Match(pattern, dir); err != nil { return false // return early if bad pattern } else if match { - log.Logger.Debugf("Skipping directory: %s", dir) + log.Debug("Skipping directory", log.String("dir", dir)) return true } } diff --git a/pkg/flag/db_flags.go b/pkg/flag/db_flags.go index 58e7809a2152..fd426ae9ccbb 100644 --- a/pkg/flag/db_flags.go +++ b/pkg/flag/db_flags.go @@ -4,7 +4,6 @@ import ( "fmt" "github.com/google/go-containerregistry/pkg/name" - "go.uber.org/zap" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/db" @@ -145,7 +144,7 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { return DBOptions{}, xerrors.New("--skip-java-db-update and --download-java-db-only options can not be specified both") } if light { - log.Logger.Warn("'--light' option is deprecated and will be removed. See also: https://github.com/aquasecurity/trivy/discussions/1649") + log.Warn("'--light' option is deprecated and will be removed. See also: https://github.com/aquasecurity/trivy/discussions/1649") } var dbRepository, javaDBRepository name.Reference @@ -157,8 +156,8 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { // Add the schema version if the tag is not specified for backward compatibility. if t, ok := dbRepository.(name.Tag); ok && t.TagStr() == "" { dbRepository = t.Tag(fmt.Sprint(db.SchemaVersion)) - log.Logger.Infow("Adding schema version to the DB repository for backward compatibility", - zap.String("repository", dbRepository.String())) + log.Info("Adding schema version to the DB repository for backward compatibility", + log.String("repository", dbRepository.String())) } } @@ -169,8 +168,8 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) { // Add the schema version if the tag is not specified for backward compatibility. if t, ok := javaDBRepository.(name.Tag); ok && t.TagStr() == "" { javaDBRepository = t.Tag(fmt.Sprint(javadb.SchemaVersion)) - log.Logger.Infow("Adding schema version to the Java DB repository for backward compatibility", - zap.String("repository", javaDBRepository.String())) + log.Info("Adding schema version to the Java DB repository for backward compatibility", + log.String("repository", javaDBRepository.String())) } } diff --git a/pkg/flag/db_flags_test.go b/pkg/flag/db_flags_test.go index b53f29135d74..5d121033fd3b 100644 --- a/pkg/flag/db_flags_test.go +++ b/pkg/flag/db_flags_test.go @@ -1,17 +1,14 @@ package flag_test import ( + "github.com/aquasecurity/trivy/pkg/log" "github.com/google/go-containerregistry/pkg/name" "testing" + "github.com/aquasecurity/trivy/pkg/flag" "github.com/spf13/viper" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "go.uber.org/zap" - "go.uber.org/zap/zaptest/observer" - - "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" ) func TestDBFlagGroup_ToOptions(t *testing.T) { @@ -86,9 +83,7 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - level := zap.WarnLevel - core, obs := observer.New(level) - log.Logger = zap.New(core).Sugar() + out := newLogger(log.LevelWarn) viper.Set(flag.SkipDBUpdateFlag.ConfigName, tt.fields.SkipDBUpdate) viper.Set(flag.DownloadDBOnlyFlag.ConfigName, tt.fields.DownloadDBOnly) @@ -109,11 +104,7 @@ func TestDBFlagGroup_ToOptions(t *testing.T) { assert.EqualExportedValues(t, tt.want, got) // Assert log messages - var gotMessages []string - for _, entry := range obs.AllUntimed() { - gotMessages = append(gotMessages, entry.Message) - } - assert.Equal(t, tt.wantLogs, gotMessages, tt.name) + assert.Equal(t, tt.wantLogs, out.Messages(), tt.name) }) } } diff --git a/pkg/flag/options.go b/pkg/flag/options.go index cfdce46e2240..9448f7c0f2fa 100644 --- a/pkg/flag/options.go +++ b/pkg/flag/options.go @@ -121,7 +121,7 @@ func (f *Flag[T]) parse() any { } v = viper.Get(alias.ConfigName) if v != nil { - log.Logger.Warnf("'%s' in config file is deprecated. Use '%s' instead.", alias.ConfigName, f.ConfigName) + log.Warnf("'%s' in config file is deprecated. Use '%s' instead.", alias.ConfigName, f.ConfigName) return v } } @@ -276,7 +276,7 @@ func (f *Flag[T]) BindEnv() error { } if alias.Deprecated { if _, ok := os.LookupEnv(envAlias); ok { - log.Logger.Warnf("'%s' is deprecated. Use '%s' instead.", envAlias, envName) + log.Warnf("'%s' is deprecated. Use '%s' instead.", envAlias, envName) } } } @@ -355,18 +355,18 @@ type Options struct { // Align takes consistency of options func (o *Options) Align() { if o.Format == types.FormatSPDX || o.Format == types.FormatSPDXJSON { - log.Logger.Info(`"--format spdx" and "--format spdx-json" disable security scanning`) + log.Info(`"--format spdx" and "--format spdx-json" disable security scanning`) o.Scanners = nil } // Vulnerability scanning is disabled by default for CycloneDX. if o.Format == types.FormatCycloneDX && !viper.IsSet(ScannersFlag.ConfigName) && len(o.K8sOptions.Components) == 0 { // remove K8sOptions.Components validation check when vuln scan is supported for k8s report with cycloneDX - log.Logger.Info(`"--format cyclonedx" disables security scanning. Specify "--scanners vuln" explicitly if you want to include vulnerabilities in the CycloneDX report.`) + log.Info(`"--format cyclonedx" disables security scanning. Specify "--scanners vuln" explicitly if you want to include vulnerabilities in the CycloneDX report.`) o.Scanners = nil } if o.Format == types.FormatCycloneDX && len(o.K8sOptions.Components) > 0 { - log.Logger.Info(`"k8s with --format cyclonedx" disable security scanning`) + log.Info(`"k8s with --format cyclonedx" disable security scanning`) o.Scanners = nil } } @@ -736,7 +736,7 @@ func (a flagAliases) NormalizeFunc() func(*pflag.FlagSet, string) pflag.Normaliz if alias.deprecated { // NormalizeFunc is called several times alias.once.Do(func() { - log.Logger.Warnf("'--%s' is deprecated. Use '--%s' instead.", name, alias.formalName) + log.Warnf("'--%s' is deprecated. Use '--%s' instead.", name, alias.formalName) }) } name = alias.formalName diff --git a/pkg/flag/options_test.go b/pkg/flag/options_test.go index 092e09d7b411..7827b4303b7d 100644 --- a/pkg/flag/options_test.go +++ b/pkg/flag/options_test.go @@ -1,11 +1,15 @@ package flag_test import ( + "bytes" "github.com/aquasecurity/trivy/pkg/flag" + "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/types" "github.com/samber/lo" "github.com/spf13/cobra" "github.com/stretchr/testify/require" + "log/slog" + "strings" "testing" "github.com/spf13/viper" @@ -125,3 +129,26 @@ func setSliceValue[T any](key string, value []T) { viper.Set(key, value) } } + +type Output struct { + b *bytes.Buffer +} + +func (o Output) Messages() []string { + var messages []string + for _, line := range strings.Split(o.b.String(), "\n") { + if line == "" { + continue + } + ss := strings.Split(line, "\t") + messages = append(messages, strings.Join(ss[2:], "\t")) + } + return messages +} + +func newLogger(level slog.Level) Output { + out := bytes.NewBuffer(nil) + logger := log.New(log.NewHandler(out, &log.Options{Level: level})) + log.SetDefault(logger) + return Output{b: out} +} diff --git a/pkg/flag/remote_flags.go b/pkg/flag/remote_flags.go index 9277f2db908f..2348ef649e66 100644 --- a/pkg/flag/remote_flags.go +++ b/pkg/flag/remote_flags.go @@ -110,16 +110,16 @@ func (f *RemoteFlagGroup) ToOptions() (RemoteOptions, error) { if serverAddr == "" && listen == "" { switch { case len(customHeaders) > 0: - log.Logger.Warn(`"--custom-header" can be used only with "--server"`) + log.Warn(`"--custom-header" can be used only with "--server"`) case token != "": - log.Logger.Warn(`"--token" can be used only with "--server"`) + log.Warn(`"--token" can be used only with "--server"`) case tokenHeader != "" && tokenHeader != DefaultTokenHeader: - log.Logger.Warn(`"--token-header" can be used only with "--server"`) + log.Warn(`"--token-header" can be used only with "--server"`) } } if token == "" && tokenHeader != DefaultTokenHeader { - log.Logger.Warn(`"--token-header" should be used with "--token"`) + log.Warn(`"--token-header" should be used with "--token"`) } if token != "" && tokenHeader != "" { diff --git a/pkg/flag/remote_flags_test.go b/pkg/flag/remote_flags_test.go index 4500b0bb5ca9..d6a7a95387db 100644 --- a/pkg/flag/remote_flags_test.go +++ b/pkg/flag/remote_flags_test.go @@ -1,17 +1,14 @@ package flag_test import ( + "github.com/aquasecurity/trivy/pkg/log" "github.com/stretchr/testify/require" "net/http" "testing" + "github.com/aquasecurity/trivy/pkg/flag" "github.com/spf13/viper" "github.com/stretchr/testify/assert" - "go.uber.org/zap" - "go.uber.org/zap/zaptest/observer" - - "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" ) func TestRemoteFlagGroup_ToOptions(t *testing.T) { @@ -98,9 +95,7 @@ func TestRemoteFlagGroup_ToOptions(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - level := zap.WarnLevel - core, obs := observer.New(level) - log.Logger = zap.New(core).Sugar() + out := newLogger(log.LevelWarn) viper.Set(flag.ServerAddrFlag.ConfigName, tt.fields.Server) viper.Set(flag.ServerCustomHeadersFlag.ConfigName, tt.fields.CustomHeaders) @@ -119,11 +114,7 @@ func TestRemoteFlagGroup_ToOptions(t *testing.T) { assert.Equalf(t, tt.want, got, "ToOptions()") // Assert log messages - var gotMessages []string - for _, entry := range obs.AllUntimed() { - gotMessages = append(gotMessages, entry.Message) - } - assert.Equal(t, tt.wantLogs, gotMessages, tt.name) + assert.Equal(t, tt.wantLogs, out.Messages(), tt.name) }) } } diff --git a/pkg/flag/report_flags.go b/pkg/flag/report_flags.go index 94b8c2ff689d..c079e6bb256a 100644 --- a/pkg/flag/report_flags.go +++ b/pkg/flag/report_flags.go @@ -198,29 +198,29 @@ func (f *ReportFlagGroup) ToOptions() (ReportOptions, error) { if template != "" { if format == "" { - log.Logger.Warn("'--template' is ignored because '--format template' is not specified. Use '--template' option with '--format template' option.") + log.Warn("'--template' is ignored because '--format template' is not specified. Use '--template' option with '--format template' option.") } else if format != "template" { - log.Logger.Warnf("'--template' is ignored because '--format %s' is specified. Use '--template' option with '--format template' option.", format) + log.Warnf("'--template' is ignored because '--format %s' is specified. Use '--template' option with '--format template' option.", format) } } else { if format == types.FormatTemplate { - log.Logger.Warn("'--format template' is ignored because '--template' is not specified. Specify '--template' option when you use '--format template'.") + log.Warn("'--format template' is ignored because '--template' is not specified. Specify '--template' option when you use '--format template'.") } } // "--list-all-pkgs" option is unavailable with "--format table". // If user specifies "--list-all-pkgs" with "--format table", we should warn it. if listAllPkgs && format == types.FormatTable { - log.Logger.Warn(`"--list-all-pkgs" cannot be used with "--format table". Try "--format json" or other formats.`) + log.Warn(`"--list-all-pkgs" cannot be used with "--format table". Try "--format json" or other formats.`) } // "--dependency-tree" option is available only with "--format table". if dependencyTree { - log.Logger.Infof(`"--dependency-tree" only shows the dependents of vulnerable packages. ` + + log.Info(`"--dependency-tree" only shows the dependents of vulnerable packages. ` + `Note that it is the reverse of the usual dependency tree, which shows the packages that depend on the vulnerable package. ` + `It supports limited package managers. Please see the document for the detail.`) if format != types.FormatTable { - log.Logger.Warn(`"--dependency-tree" can be used only with "--format table".`) + log.Warn(`"--dependency-tree" can be used only with "--format table".`) } } @@ -275,16 +275,16 @@ func loadComplianceTypes(compliance string) (spec.ComplianceSpec, error) { func (f *ReportFlagGroup) forceListAllPkgs(format types.Format, listAllPkgs, dependencyTree bool) bool { if slices.Contains(types.SupportedSBOMFormats, format) && !listAllPkgs { - log.Logger.Debugf("%q automatically enables '--list-all-pkgs'.", types.SupportedSBOMFormats) + log.Debugf("%q automatically enables '--list-all-pkgs'.", types.SupportedSBOMFormats) return true } // We need this flag to insert dependency locations into Sarif('Package' struct contains 'Locations') if format == types.FormatSarif && !listAllPkgs { - log.Logger.Debugf("Sarif format automatically enables '--list-all-pkgs' to get locations") + log.Debug("Sarif format automatically enables '--list-all-pkgs' to get locations") return true } if dependencyTree && !listAllPkgs { - log.Logger.Debugf("'--dependency-tree' enables '--list-all-pkgs'.") + log.Debug("'--dependency-tree' enables '--list-all-pkgs'.") return true } return false @@ -300,6 +300,6 @@ func toSeverity(severity []string) []dbTypes.Severity { sev, _ := dbTypes.NewSeverity(s) return sev }) - log.Logger.Debugf("Severities: %q", severities) + log.Debug("Parsed severities", log.Any("severities", severities)) return severities } diff --git a/pkg/flag/report_flags_test.go b/pkg/flag/report_flags_test.go index 1d230398d5c7..37ba3c81b84f 100644 --- a/pkg/flag/report_flags_test.go +++ b/pkg/flag/report_flags_test.go @@ -1,19 +1,16 @@ package flag_test import ( + "github.com/aquasecurity/trivy/pkg/log" "testing" - "github.com/spf13/viper" - "github.com/stretchr/testify/assert" - "go.uber.org/zap" - "go.uber.org/zap/zaptest/observer" - dbTypes "github.com/aquasecurity/trivy-db/pkg/types" "github.com/aquasecurity/trivy/pkg/compliance/spec" "github.com/aquasecurity/trivy/pkg/flag" iacTypes "github.com/aquasecurity/trivy/pkg/iac/types" - "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/types" + "github.com/spf13/viper" + "github.com/stretchr/testify/assert" ) func TestReportFlagGroup_ToOptions(t *testing.T) { @@ -67,7 +64,7 @@ func TestReportFlagGroup_ToOptions(t *testing.T) { }, wantLogs: []string{ `["cyclonedx" "spdx" "spdx-json" "github"] automatically enables '--list-all-pkgs'.`, - `Severities: ["CRITICAL"]`, + `Parsed severities severities=[CRITICAL]`, }, want: flag.ReportOptions{ Severities: []dbTypes.Severity{ @@ -187,12 +184,11 @@ func TestReportFlagGroup_ToOptions(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Cleanup(viper.Reset) - level := zap.WarnLevel + level := log.LevelWarn if tt.fields.debug { - level = zap.DebugLevel + level = log.LevelDebug } - core, obs := observer.New(level) - log.Logger = zap.New(core).Sugar() + out := newLogger(level) setValue(flag.FormatFlag.ConfigName, string(tt.fields.format)) setValue(flag.TemplateFlag.ConfigName, tt.fields.template) @@ -229,11 +225,7 @@ func TestReportFlagGroup_ToOptions(t *testing.T) { assert.Equalf(t, tt.want, got, "ToOptions()") // Assert log messages - var gotMessages []string - for _, entry := range obs.AllUntimed() { - gotMessages = append(gotMessages, entry.Message) - } - assert.Equal(t, tt.wantLogs, gotMessages, tt.name) + assert.Equal(t, tt.wantLogs, out.Messages(), tt.name) }) } } diff --git a/pkg/flag/sbom_flags.go b/pkg/flag/sbom_flags.go index f5ab1aff3189..388911abd83e 100644 --- a/pkg/flag/sbom_flags.go +++ b/pkg/flag/sbom_flags.go @@ -56,7 +56,7 @@ func (f *SBOMFlagGroup) ToOptions() (SBOMOptions, error) { sbomFormat := f.SBOMFormat.Value() if artifactType != "" || sbomFormat != "" { - log.Logger.Error("'trivy sbom' is now for scanning SBOM. " + + log.Error("'trivy sbom' is now for scanning SBOM. " + "See https://github.com/aquasecurity/trivy/discussions/2407 for the detail") return SBOMOptions{}, xerrors.New("'--artifact-type' and '--sbom-format' are no longer available") } diff --git a/pkg/flag/scan_flags.go b/pkg/flag/scan_flags.go index aba3961b0243..e2128816e849 100644 --- a/pkg/flag/scan_flags.go +++ b/pkg/flag/scan_flags.go @@ -48,7 +48,7 @@ var ( case "misconf", "misconfiguration": return string(types.MisconfigScanner) case "config": - log.Logger.Warn("'--scanners config' is deprecated. Use '--scanners misconfig' instead. See https://github.com/aquasecurity/trivy/discussions/5586 for the detail.") + log.Warn("'--scanners config' is deprecated. Use '--scanners misconfig' instead. See https://github.com/aquasecurity/trivy/discussions/5586 for the detail.") return string(types.MisconfigScanner) } return s @@ -175,7 +175,7 @@ func (f *ScanFlagGroup) ToOptions(args []string) (ScanOptions, error) { parallel := f.Parallel.Value() if f.Parallel != nil && parallel == 0 { - log.Logger.Infof("Set '--parallel' to the number of CPUs (%d)", runtime.NumCPU()) + log.Info("Set '--parallel' to the number of CPUs", log.Int("parallel", runtime.NumCPU())) parallel = runtime.NumCPU() } diff --git a/pkg/flag/vulnerability_flags.go b/pkg/flag/vulnerability_flags.go index 3989fbfa1c51..f0db9e7b70c3 100644 --- a/pkg/flag/vulnerability_flags.go +++ b/pkg/flag/vulnerability_flags.go @@ -89,7 +89,7 @@ func (f *VulnerabilityFlagGroup) ToOptions() (VulnerabilityOptions, error) { switch { case ignoreUnfixed && len(ignoreStatuses) > 0: - log.Logger.Warn("'--ignore-unfixed' is ignored because '--ignore-status' is specified") + log.Warn("'--ignore-unfixed' is ignored because '--ignore-status' is specified") case ignoreUnfixed: // '--ignore-unfixed' is a shorthand of '--ignore-status'. ignoreStatuses = lo.FilterMap(dbTypes.Statuses, func(s string, _ int) (dbTypes.Status, bool) { @@ -102,7 +102,7 @@ func (f *VulnerabilityFlagGroup) ToOptions() (VulnerabilityOptions, error) { case len(ignoreStatuses) == 0: ignoreStatuses = nil } - log.Logger.Debugw("Ignore statuses", "statuses", ignoreStatuses) + log.Debug("Ignore statuses", log.Any("statuses", ignoreStatuses)) return VulnerabilityOptions{ VulnType: f.VulnType.Value(), diff --git a/pkg/flag/vulnerability_flags_test.go b/pkg/flag/vulnerability_flags_test.go index 02ee3c8d9605..4f4490753aff 100644 --- a/pkg/flag/vulnerability_flags_test.go +++ b/pkg/flag/vulnerability_flags_test.go @@ -1,17 +1,14 @@ package flag_test import ( + "github.com/aquasecurity/trivy/pkg/log" "github.com/stretchr/testify/require" "testing" - "github.com/spf13/viper" - "github.com/stretchr/testify/assert" - "go.uber.org/zap" - "go.uber.org/zap/zaptest/observer" - "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/types" + "github.com/spf13/viper" + "github.com/stretchr/testify/assert" ) func TestVulnerabilityFlagGroup_ToOptions(t *testing.T) { @@ -49,10 +46,7 @@ func TestVulnerabilityFlagGroup_ToOptions(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - level := zap.WarnLevel - - core, obs := observer.New(level) - log.Logger = zap.New(core).Sugar() + out := newLogger(log.LevelWarn) viper.Set(flag.VulnTypeFlag.ConfigName, tt.fields.vulnType) @@ -66,11 +60,7 @@ func TestVulnerabilityFlagGroup_ToOptions(t *testing.T) { assert.Equalf(t, tt.want, got, "ToOptions()") // Assert log messages - var gotMessages []string - for _, entry := range obs.AllUntimed() { - gotMessages = append(gotMessages, entry.Message) - } - assert.Equal(t, tt.wantLogs, gotMessages, tt.name) + assert.Equal(t, tt.wantLogs, out.Messages(), tt.name) }) } diff --git a/pkg/iac/ignore/parse.go b/pkg/iac/ignore/parse.go index 075f1f621203..bb2f13603263 100644 --- a/pkg/iac/ignore/parse.go +++ b/pkg/iac/ignore/parse.go @@ -50,7 +50,7 @@ func parseLine(line string, rng types.Range, parsers []RuleSectionParser) []Rule rule, err := parseComment(section, rng, parsers) if err != nil { - log.Logger.Debugf("Failed to parse rule at %s: %s", rng.String(), err.Error()) + log.Debug("Failed to parse rule", log.String("range", rng.String()), log.Err(err)) continue } rules = append(rules, rule) @@ -60,7 +60,10 @@ func parseLine(line string, rng types.Range, parsers []RuleSectionParser) []Rule } func hasIgnoreRulePrefix(s string) (string, bool) { - for _, prefix := range []string{"tfsec:", "trivy:"} { + for _, prefix := range []string{ + "tfsec:", + "trivy:", + } { if after, found := strings.CutPrefix(s, prefix); found { return after, true } @@ -153,10 +156,10 @@ func (s *expiryDateParser) Key() string { func (s *expiryDateParser) Parse(str string) bool { parsed, err := time.Parse("2006-01-02", str) if err != nil { - log.Logger.Debugf("Incorrect time to ignore is specified: %s", str) + log.Debug("Incorrect time to ignore is specified", log.String("time", str)) parsed = time.Time{} } else if time.Now().After(parsed) { - log.Logger.Debug("Ignore rule time has expired for location: %s", s.rng.String()) + log.Debug("Ignore rule time has expired for location", log.String("range", s.rng.String())) } s.expiry = parsed diff --git a/pkg/javadb/client.go b/pkg/javadb/client.go index 86194b263569..408456e16500 100644 --- a/pkg/javadb/client.go +++ b/pkg/javadb/client.go @@ -48,15 +48,15 @@ func (u *Updater) Update() error { if !errors.Is(err, os.ErrNotExist) { return xerrors.Errorf("Java DB metadata error: %w", err) } else if u.skip { - log.Logger.Error("The first run cannot skip downloading Java DB") + log.Error("The first run cannot skip downloading Java DB") return xerrors.New("'--skip-java-db-update' cannot be specified on the first run") } } if (meta.Version != SchemaVersion || meta.NextUpdate.Before(time.Now().UTC())) && !u.skip { // Download DB - log.Logger.Infof("Java DB Repository: %s", u.repo) - log.Logger.Info("Downloading the Java DB...") + log.Info("Java DB Repository", log.Any("repository", u.repo)) + log.Info("Downloading the Java DB...") // TODO: support remote options var a *oci.Artifact @@ -78,7 +78,7 @@ func (u *Updater) Update() error { if err = metac.Update(meta); err != nil { return xerrors.Errorf("Java DB metadata update error: %w", err) } - log.Logger.Info("The Java DB is cached for 3 days. If you want to update the database more frequently, " + + log.Info("The Java DB is cached for 3 days. If you want to update the database more frequently, " + "the '--reset' flag clears the DB cache.") } diff --git a/pkg/k8s/commands/cluster.go b/pkg/k8s/commands/cluster.go index bf28f26f5d7f..b4ad9be78347 100644 --- a/pkg/k8s/commands/cluster.go +++ b/pkg/k8s/commands/cluster.go @@ -3,6 +3,7 @@ package commands import ( "context" + "go.uber.org/zap" "golang.org/x/exp/slices" "golang.org/x/xerrors" @@ -10,12 +11,14 @@ import ( "github.com/aquasecurity/trivy-kubernetes/pkg/k8s" "github.com/aquasecurity/trivy-kubernetes/pkg/trivyk8s" "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/types" ) // clusterRun runs scan on kubernetes cluster func clusterRun(ctx context.Context, opts flag.Options, cluster k8s.Cluster) error { + // TODO: replace with log.Logger + logger, _ := zap.NewProduction() + if err := validateReportArguments(opts); err != nil { return err } @@ -23,13 +26,13 @@ func clusterRun(ctx context.Context, opts flag.Options, cluster k8s.Cluster) err var err error switch opts.Format { case types.FormatCycloneDX: - artifacts, err = trivyk8s.New(cluster, log.Logger).ListClusterBomInfo(ctx) + artifacts, err = trivyk8s.New(cluster, logger.Sugar()).ListClusterBomInfo(ctx) if err != nil { return xerrors.Errorf("get k8s artifacts with node info error: %w", err) } case types.FormatJSON, types.FormatTable: if opts.Scanners.AnyEnabled(types.MisconfigScanner) && slices.Contains(opts.Components, "infra") { - artifacts, err = trivyk8s.New(cluster, log.Logger, trivyk8s.WithExcludeOwned(opts.ExcludeOwned)).ListArtifactAndNodeInfo(ctx, + artifacts, err = trivyk8s.New(cluster, logger.Sugar(), trivyk8s.WithExcludeOwned(opts.ExcludeOwned)).ListArtifactAndNodeInfo(ctx, trivyk8s.WithScanJobNamespace(opts.NodeCollectorNamespace), trivyk8s.WithIgnoreLabels(opts.ExcludeNodes), trivyk8s.WithScanJobImageRef(opts.NodeCollectorImageRef), @@ -38,7 +41,7 @@ func clusterRun(ctx context.Context, opts flag.Options, cluster k8s.Cluster) err return xerrors.Errorf("get k8s artifacts with node info error: %w", err) } } else { - artifacts, err = trivyk8s.New(cluster, log.Logger).ListArtifacts(ctx) + artifacts, err = trivyk8s.New(cluster, logger.Sugar()).ListArtifacts(ctx) if err != nil { return xerrors.Errorf("get k8s artifacts error: %w", err) } diff --git a/pkg/k8s/commands/namespace.go b/pkg/k8s/commands/namespace.go index a748bf38dd9d..6d828d4efdf9 100644 --- a/pkg/k8s/commands/namespace.go +++ b/pkg/k8s/commands/namespace.go @@ -3,24 +3,27 @@ package commands import ( "context" + "go.uber.org/zap" "golang.org/x/xerrors" "github.com/aquasecurity/trivy-kubernetes/pkg/k8s" "github.com/aquasecurity/trivy-kubernetes/pkg/trivyk8s" "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" ) // namespaceRun runs scan on kubernetes cluster func namespaceRun(ctx context.Context, opts flag.Options, cluster k8s.Cluster) error { + // TODO: replace with slog.Logger + logger, _ := zap.NewProduction() + if err := validateReportArguments(opts); err != nil { return err } var trivyk trivyk8s.TrivyK8S if opts.AllNamespaces { - trivyk = trivyk8s.New(cluster, log.Logger).AllNamespaces() + trivyk = trivyk8s.New(cluster, logger.Sugar()).AllNamespaces() } else { - trivyk = trivyk8s.New(cluster, log.Logger).Namespace(getNamespace(opts, cluster.GetCurrentNamespace())) + trivyk = trivyk8s.New(cluster, logger.Sugar()).Namespace(getNamespace(opts, cluster.GetCurrentNamespace())) } artifacts, err := trivyk.ListArtifacts(ctx) diff --git a/pkg/k8s/commands/resource.go b/pkg/k8s/commands/resource.go index 10557e5a62f0..1662fe25d4d8 100644 --- a/pkg/k8s/commands/resource.go +++ b/pkg/k8s/commands/resource.go @@ -4,13 +4,13 @@ import ( "context" "strings" + "go.uber.org/zap" "golang.org/x/xerrors" "github.com/aquasecurity/trivy-kubernetes/pkg/artifacts" "github.com/aquasecurity/trivy-kubernetes/pkg/k8s" "github.com/aquasecurity/trivy-kubernetes/pkg/trivyk8s" "github.com/aquasecurity/trivy/pkg/flag" - "github.com/aquasecurity/trivy/pkg/log" ) // resourceRun runs scan on kubernetes cluster @@ -24,7 +24,9 @@ func resourceRun(ctx context.Context, args []string, opts flag.Options, cluster var trivyk trivyk8s.TrivyK8S - trivyk = trivyk8s.New(cluster, log.Logger, trivyk8s.WithExcludeOwned(opts.ExcludeOwned)) + // TODO: replace with slog.Logger + logger, _ := zap.NewProduction() + trivyk = trivyk8s.New(cluster, logger.Sugar(), trivyk8s.WithExcludeOwned(opts.ExcludeOwned)) if opts.AllNamespaces { trivyk = trivyk.AllNamespaces() diff --git a/pkg/k8s/commands/run.go b/pkg/k8s/commands/run.go index e9e3510f6bce..567c63fa461e 100644 --- a/pkg/k8s/commands/run.go +++ b/pkg/k8s/commands/run.go @@ -27,6 +27,11 @@ const ( // Run runs a k8s scan func Run(ctx context.Context, args []string, opts flag.Options) error { + ctx, cancel := context.WithTimeout(ctx, opts.Timeout) + defer cancel() + + ctx = log.WithContextPrefix(ctx, "k8s") + cluster, err := k8s.GetCluster( k8s.WithContext(opts.K8sOptions.ClusterContext), k8s.WithKubeConfig(opts.K8sOptions.KubeConfig), @@ -36,12 +41,10 @@ func Run(ctx context.Context, args []string, opts flag.Options) error { if err != nil { return xerrors.Errorf("failed getting k8s cluster: %w", err) } - ctx, cancel := context.WithTimeout(ctx, opts.Timeout) - defer cancel() defer func() { if errors.Is(err, context.DeadlineExceeded) { - log.Logger.Warn("Increase --timeout value") + log.Warn("Increase --timeout value") } }() opts.K8sVersion = cluster.GetClusterVersion() @@ -68,8 +71,8 @@ type runner struct { func newRunner(flagOpts flag.Options, cluster string) *runner { return &runner{ - flagOpts, - cluster, + flagOpts: flagOpts, + cluster: cluster, } } @@ -83,7 +86,7 @@ func (r *runner) run(ctx context.Context, artifacts []*k8sArtifacts.Artifact) er } defer func() { if err := runner.Close(ctx); err != nil { - log.Logger.Errorf("failed to close runner: %s", err) + log.ErrorContext(ctx, "Failed to close runner", log.Err(err)) } }() diff --git a/pkg/k8s/report/report.go b/pkg/k8s/report/report.go index 5de332a703bc..0861a8669143 100644 --- a/pkg/k8s/report/report.go +++ b/pkg/k8s/report/report.go @@ -1,6 +1,7 @@ package report import ( + "errors" "fmt" "io" "strings" @@ -260,7 +261,7 @@ func createK8sResource(artifact *artifacts.Artifact, scanResults types.Results) func (r Report) PrintErrors() { for _, resource := range r.Resources { if resource.Error != "" { - log.Logger.Errorf("Error during vulnerabilities or misconfiguration scan: %s", resource.Error) + log.Error("Error during vulnerabilities or misconfiguration scan", log.Err(errors.New(resource.Error))) } } } diff --git a/pkg/k8s/scanner/io.go b/pkg/k8s/scanner/io.go index 09709db6441e..9c32699ddd63 100644 --- a/pkg/k8s/scanner/io.go +++ b/pkg/k8s/scanner/io.go @@ -28,7 +28,7 @@ func createTempFile(artifact *artifacts.Artifact) (string, error) { } defer func() { if err := file.Close(); err != nil { - log.Logger.Errorf("failed to close temp file %s: %s:", file.Name(), err) + log.Error("Failed to close temp file", log.String("path", file.Name()), log.Err(err)) } }() @@ -42,7 +42,7 @@ func createTempFile(artifact *artifacts.Artifact) (string, error) { func removeFile(filename string) { if err := os.Remove(filename); err != nil { - log.Logger.Errorf("failed to remove temp file %s: %s:", filename, err) + log.Error("Failed to remove temp file", log.String("path", filename), log.Err(err)) } } diff --git a/pkg/k8s/scanner/scanner.go b/pkg/k8s/scanner/scanner.go index 55fe4c1e9386..68698ea1d3d2 100644 --- a/pkg/k8s/scanner/scanner.go +++ b/pkg/k8s/scanner/scanner.go @@ -53,21 +53,12 @@ func NewScanner(cluster string, runner cmd.Runner, opts flag.Options) *Scanner { func (s *Scanner) Scan(ctx context.Context, artifactsData []*artifacts.Artifact) (report.Report, error) { // disable logs before scanning - err := log.InitLogger(s.opts.Debug, true) - if err != nil { - return report.Report{}, xerrors.Errorf("logger error: %w", err) - } + log.InitLogger(s.opts.Debug, true) // enable log, this is done in a defer function, // to enable logs even when the function returns earlier // due to an error - defer func() { - err = log.InitLogger(s.opts.Debug, false) - if err != nil { - // we use log.Fatal here because the error was to enable the logger - log.Fatal(xerrors.Errorf("can't enable logger error: %w", err)) - } - }() + defer log.InitLogger(s.opts.Debug, false) if s.opts.Format == types.FormatCycloneDX { kbom, err := s.clusterInfoToReportResources(artifactsData) @@ -139,8 +130,7 @@ func (s *Scanner) Scan(ctx context.Context, artifactsData []*artifacts.Artifact) } p := parallel.NewPipeline(s.opts.Parallel, !s.opts.Quiet, resourceArtifacts, onItem, onResult) - err = p.Do(ctx) - if err != nil { + if err := p.Do(ctx); err != nil { return report.Report{}, err } if s.opts.Scanners.AnyEnabled(types.VulnerabilityScanner) { @@ -168,7 +158,6 @@ func (s *Scanner) scanVulns(ctx context.Context, artifact *artifacts.Artifact, o imageReport, err := s.runner.ScanImage(ctx, opts) if err != nil { - log.Logger.Warnf("failed to scan image %s: %s", image, err) resources = append(resources, report.CreateResource(artifact, imageReport, err)) continue } @@ -196,7 +185,6 @@ func (s *Scanner) scanMisconfigs(ctx context.Context, artifact *artifacts.Artifa // remove config file after scanning removeFile(configFile) if err != nil { - log.Logger.Debugf("failed to scan config %s/%s: %s", artifact.Kind, artifact.Name, err) return report.CreateResource(artifact, configReport, err), err } diff --git a/pkg/licensing/classifier.go b/pkg/licensing/classifier.go index 0af770ef987a..74f825f303a7 100644 --- a/pkg/licensing/classifier.go +++ b/pkg/licensing/classifier.go @@ -25,7 +25,7 @@ func initGoogleClassifier() error { // This loading is expensive and should be called only when the license classification is needed. var err error classifierOnce.Do(func() { - log.Logger.Debug("Loading the default license classifier...") + log.Debug("Loading the default license classifier...") cf, err = assets.DefaultClassifier() }) return err diff --git a/pkg/log/context.go b/pkg/log/context.go new file mode 100644 index 000000000000..67a161875d0f --- /dev/null +++ b/pkg/log/context.go @@ -0,0 +1,47 @@ +package log + +import ( + "context" + "log/slog" +) + +// prefixContextKey is the context key for logger. +// It is unexported to prevent collisions with context keys defined in other packages. +type prefixContextKey struct{} + +// WithContextPrefix returns a new context with the given prefix. +func WithContextPrefix(ctx context.Context, prefix string) context.Context { + if prefix == "" { + return ctx + } + return context.WithValue(ctx, prefixContextKey{}, "["+prefix+"] ") +} + +func contextualPrefix(ctx context.Context) string { + if prefix, ok := ctx.Value(prefixContextKey{}).(string); ok { + return prefix + } + return "" +} + +// attrContextKey is the context key for logger. +// It is unexported to prevent collisions with context keys defined in other packages. +type attrContextKey struct{} + +// WithContextAttrs returns a new context with the given attrs. +func WithContextAttrs(ctx context.Context, attrs ...slog.Attr) context.Context { + if len(attrs) == 0 { + return ctx + } + if ctxAttrs := contextualAttrs(ctx); ctxAttrs != nil { + attrs = append(ctxAttrs, attrs...) + } + return context.WithValue(ctx, attrContextKey{}, attrs) +} + +func contextualAttrs(ctx context.Context) []slog.Attr { + if attrs, ok := ctx.Value(attrContextKey{}).([]slog.Attr); ok { + return attrs + } + return nil +} diff --git a/pkg/log/handler.go b/pkg/log/handler.go new file mode 100644 index 000000000000..2e9bbe4a67dd --- /dev/null +++ b/pkg/log/handler.go @@ -0,0 +1,304 @@ +package log + +import ( + "bytes" + "context" + "fmt" + "io" + "log/slog" + "slices" + "strconv" + "sync" + "time" + + "github.com/fatih/color" + "github.com/samber/lo" + "golang.org/x/xerrors" +) + +const ( + errKey = "err" + prefixKey = "prefix" +) + +type ColorHandler struct { + opts Options + prefix string + preformatted []byte // data from WithGroup and WithAttrs + groups []string // groups from WithGroup + mu *sync.Mutex + out io.Writer +} + +type Options struct { + // Level reports the minimum level to log. + // Levels with lower levels are discarded. + // If nil, the Handler uses [slog.LevelInfo]. + Level slog.Leveler +} + +func NewHandler(out io.Writer, opts *Options) *ColorHandler { + h := &ColorHandler{ + out: out, + mu: &sync.Mutex{}, + } + if opts != nil { + h.opts = *opts + } + if h.opts.Level == nil { + h.opts.Level = slog.LevelInfo + } + return h +} + +func (h *ColorHandler) Enabled(_ context.Context, level slog.Level) bool { + return level >= h.opts.Level.Level() +} + +func (h *ColorHandler) WithGroup(name string) slog.Handler { + if name == "" { + return h + } + h2 := *h + // Add an unopened group to h2 without modifying h. + h2.groups = make([]string, len(h.groups)+1) + copy(h2.groups, h.groups) + h2.groups[len(h2.groups)-1] = name + return &h2 +} + +func (h *ColorHandler) WithAttrs(attrs []slog.Attr) slog.Handler { + if len(attrs) == 0 { + return h + } + h2 := *h + + // Force an append to copy the underlying array. + h2.preformatted = slices.Clip(h.preformatted) + + // Pre-format the attributes. + for _, a := range attrs { + if isLogPrefix(a) { + h2.prefix = string(a.Value.Any().(logPrefix)) + continue + } + h2.preformatted = h2.appendAttr(h2.preformatted, a, h.groups) + } + return &h2 +} + +func (h *ColorHandler) appendAttr(buf []byte, a slog.Attr, groups []string) []byte { + // Resolve the Attr's value before doing anything else. + a.Value = a.Value.Resolve() + // Ignore empty Attrs and log prefixes. + if a.Equal(slog.Attr{}) || isLogPrefix(a) { + return buf + } + + var key string + for _, g := range groups { + key += g + "." + } + key += a.Key + + switch a.Value.Kind() { + case slog.KindString: + // Quote string values, to make them easy to parse. + buf = append(buf, key...) + buf = append(buf, '=') + buf = strconv.AppendQuote(buf, a.Value.String()) + case slog.KindTime: + // Write times in a standard way, without the monotonic time. + buf = append(buf, key...) + buf = append(buf, '=') + buf = a.Value.Time().AppendFormat(buf, time.RFC3339Nano) + case slog.KindGroup: + attrs := a.Value.Group() + // Ignore empty groups. + if len(attrs) == 0 { + return buf + } + if a.Key != "" { + groups = append(groups, a.Key) + } + for _, ga := range attrs { + buf = h.appendAttr(buf, ga, groups) + } + buf = bytes.TrimRight(buf, " ") // Trim the trailing space. + default: + buf = append(buf, key...) + buf = append(buf, '=') + if err, ok := a.Value.Any().(error); ok { + buf = strconv.AppendQuote(buf, color.HiRedString(err.Error())) + } else { + buf = append(buf, a.Value.String()...) + } + } + return append(buf, ' ') +} + +func (h *ColorHandler) Handle(ctx context.Context, r slog.Record) error { + bufp := allocBuf() + buf := *bufp + defer func() { + *bufp = buf + freeBuf(bufp) + }() + + buf = h.handle(ctx, buf, r) + + h.mu.Lock() + defer h.mu.Unlock() + + if _, err := h.out.Write(buf); err != nil { + return xerrors.Errorf("failed to write log: %w", err) + } + + return nil +} + +func (h *ColorHandler) handle(ctx context.Context, buf []byte, r slog.Record) []byte { + colorize := color.New() + switch r.Level { + case slog.LevelDebug: + colorize = colorize.Add(color.FgHiBlack) + case slog.LevelInfo: + colorize = colorize.Add(color.FgHiBlue) + case slog.LevelWarn: + colorize = colorize.Add(color.FgHiYellow) + case slog.LevelError: + colorize = colorize.Add(color.FgHiRed) + case LevelFatal: + colorize = colorize.Add(color.FgRed) + } + + // Timestamp + if !r.Time.IsZero() { + buf = append(buf, r.Time.Format(time.RFC3339)...) + buf = append(buf, '\t') + } + + // Level + buf = append(buf, colorize.Sprint(levelString(r.Level))...) + buf = append(buf, '\t') + + // Message + buf = append(buf, h.Prefix(ctx, r)+r.Message...) + if r.Level == LevelFatal { + // Show the error and return early. + format := lo.Ternary(h.opts.Level == slog.LevelDebug, "\n - %+v\n", "\t%v\n") + return fmt.Appendf(buf, format, h.Err(r)) + } + + // Attrs + var preformatted []byte + for _, a := range contextualAttrs(ctx) { + preformatted = h.appendAttr(preformatted, a, h.groups) + } + preformatted = append(preformatted, h.preformatted...) + + if len(preformatted) > 0 || r.NumAttrs() > 0 { + buf = append(buf, '\t') + } + + if len(preformatted) > 0 { + buf = append(buf, preformatted...) + } + r.Attrs(func(a slog.Attr) bool { + buf = h.appendAttr(buf, a, h.groups) + return true + }) + + // Trim the trailing space. + buf = bytes.TrimRight(buf, " ") + buf = append(buf, '\n') + + return buf +} + +// Err returns the error from the attrs, if any. +func (h *ColorHandler) Err(r slog.Record) error { + return findKey[error](errKey, r) +} + +// Prefix returns the prefix from the attrs, if any. +func (h *ColorHandler) Prefix(ctx context.Context, r slog.Record) string { + if attrPrefix := string(findKey[logPrefix](prefixKey, r)); attrPrefix != "" { + return attrPrefix + } + if ctxPrefix := contextualPrefix(ctx); ctxPrefix != "" { + return ctxPrefix + } + return h.prefix +} + +func findKey[T any](key string, r slog.Record) T { + var v T + r.Attrs(func(a slog.Attr) bool { + if a.Key != key { + return true + } + + var ok bool + if v, ok = a.Value.Any().(T); !ok { + return true + } + return false + }) + return v +} + +var ( + String = slog.String + Int64 = slog.Int64 + Int = slog.Int + Bool = slog.Bool + Time = slog.Time + Duration = slog.Duration + Group = slog.Group + Any = slog.Any +) + +// Err returns an Attr that represents an error. +func Err(err error) slog.Attr { + return slog.Any(errKey, err) +} + +type logPrefix string + +// Prefix returns an Attr that represents a prefix. +func Prefix(prefix string) slog.Attr { + return slog.Any(prefixKey, logPrefix("["+prefix+"] ")) +} + +func isLogPrefix(a slog.Attr) bool { + _, ok := a.Value.Any().(logPrefix) + return ok +} + +func levelString(level slog.Level) string { + if level == LevelFatal { + return "FATAL" + } + return level.String() +} + +var bufPool = sync.Pool{ + New: func() any { + b := make([]byte, 0, 1024) + return &b + }, +} + +func allocBuf() *[]byte { + return bufPool.Get().(*[]byte) +} + +func freeBuf(b *[]byte) { + // To reduce peak allocation, return only smaller buffers to the pool. + const maxBufferSize = 16 << 10 + if cap(*b) <= maxBufferSize { + *b = (*b)[:0] + bufPool.Put(b) + } +} diff --git a/pkg/log/handler_test.go b/pkg/log/handler_test.go new file mode 100644 index 000000000000..4d106535b8d8 --- /dev/null +++ b/pkg/log/handler_test.go @@ -0,0 +1,252 @@ +package log_test + +import ( + "bytes" + "context" + "errors" + "fmt" + "github.com/aquasecurity/trivy/pkg/log" + "github.com/stretchr/testify/assert" + "log/slog" + "os" + "strings" + "testing" + "testing/slogtest" + "time" +) + +func TestColorHandler(t *testing.T) { + var buf bytes.Buffer + logger := slog.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelDebug})) + + // Test logging with different levels, attributes, and groups. + logger.Debug("debug message", "key1", "value1", "key2", 2) + logger.Info("info message", "key3", true) + logger.Warn("warn message", slog.Group("group1", slog.Int("key4", 42))) + logger.Error("error message", slog.Group("group2", slog.String("key5", "value5"))) + + got := buf.String() + + wantLines := []string{ + `DEBUG debug message key1="value1" key2=2`, + `INFO info message key3=true`, + `WARN warn message group1.key4=42`, + `ERROR error message group2.key5="value5"`, + } + compareLines(t, got, wantLines) +} + +func TestSlog(t *testing.T) { + logger := slog.New(log.NewHandler(os.Stdout, &log.Options{Level: slog.LevelWarn})) + logger.Info("foo") + logger.Warn("warn message", slog.Group("group2", slog.String("key5", "value5"))) + logger.Error("error", slog.Int("key3", 3), slog.Group("group3", slog.String("key4", "value4"))) +} + +func TestWithAttrsAndWithGroup(t *testing.T) { + t.Run("single group", func(t *testing.T) { + var buf bytes.Buffer + baseLogger := log.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelWarn})) + + // Test logging with WithContextAttrs and WithGroup. + logger := baseLogger. + With("key1", "value1"). + WithGroup("group1"). + With("key2", "value2") + + logger.Debug("debug message") + logger.Info("info message", "key3", true) + logger.Warn("warn message", log.Err(errors.New("error"))) + logger.Error("error message", slog.Group("group2", slog.Int("key4", 4))) + + got := buf.String() + wantLines := []string{ + `WARN warn message key1="value1" group1.key2="value2" group1.err="error"`, + `ERROR error message key1="value1" group1.key2="value2" group1.group2.key4=4`, + } + compareLines(t, got, wantLines) + }) + + t.Run("multiple groups", func(t *testing.T) { + var buf bytes.Buffer + baseLogger := log.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelWarn})) + + // Test logging with WithContextAttrs and WithGroup. + logger := baseLogger. + WithGroup("group1"). + With("key1", "value1"). + WithGroup("group2") + + logger.Error("error message", slog.Group("group3", slog.Int("key2", 2))) + + got := buf.String() + wantLines := []string{ + `ERROR error message group1.key1="value1" group1.group2.group3.key2=2`, + } + compareLines(t, got, wantLines) + }) + + t.Run("prefix", func(t *testing.T) { + var buf bytes.Buffer + logger := log.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelWarn})) + logger.Error("error message", log.Prefix("prefix1"), log.String("key1", "value1")) + + wantLines := []string{ + `ERROR [prefix1] error message key1="value1"`, + } + compareLines(t, buf.String(), wantLines) + + buf.Reset() + log.SetDefault(logger) + log.WithPrefix("prefix2").Error("error message", log.String("key1", "value1")) + + wantLines = []string{ + `ERROR [prefix2] error message key1="value1"`, + } + compareLines(t, buf.String(), wantLines) + }) +} + +func TestContext(t *testing.T) { + t.Run("with context prefix", func(t *testing.T) { + var buf bytes.Buffer + baseLogger := log.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelInfo})) + + // Test logging with WithContextPrefix + ctx := context.Background() + ctx = log.WithContextPrefix(ctx, "prefix1") + + logger := baseLogger.With("key1", "value1").WithGroup("group1") + logger.InfoContext(ctx, "info message", "key2", true) + + got := buf.String() + wantLines := []string{ + `INFO [prefix1] info message key1="value1" group1.key2=true`, + } + compareLines(t, got, wantLines) + }) + + t.Run("with context attrs", func(t *testing.T) { + var buf bytes.Buffer + baseLogger := log.New(log.NewHandler(&buf, &log.Options{Level: slog.LevelInfo})) + + // Test logging with WithContextAttrs + ctx := context.Background() + ctx = log.WithContextAttrs(ctx, log.String("key1", "value1")) + + logger := baseLogger.WithGroup("group1") + logger.InfoContext(ctx, "info message", "key2", true) + + got := buf.String() + wantLines := []string{ + `INFO info message group1.key1="value1" group1.key2=true`, + } + compareLines(t, got, wantLines) + }) +} + +func compareLines(t *testing.T, got string, wantLines []string) { + // Strip color codes from the output. + got = stripColorCodes(got) + + // Split the output into lines. + gotLines := strings.Split(got, "\n") + + assert.Len(t, gotLines, len(wantLines)+1) // Expecting log lines and an empty line. + + for i, wantLine := range wantLines { + if i >= len(gotLines) { + break + } + + ss := strings.Split(gotLines[i], "\t") + gotLevel, gotMessage, gotAttrs := ss[1], ss[2], ss[3] + + ss = strings.Split(wantLine, "\t") + wantLevel, wantMessage, wantAttrs := ss[0], ss[1], ss[2] + + assert.Equal(t, wantLevel, gotLevel) + assert.Equal(t, wantMessage, gotMessage) + assert.Equal(t, wantAttrs, gotAttrs) + } + assert.Empty(t, strings.TrimSpace(gotLines[len(gotLines)-1])) // Last line should be empty. +} + +func stripColorCodes(s string) string { + // This is a simplified version that only handles the color codes used in ColorHandler. + s = strings.ReplaceAll(s, "\x1b[90m", "") // FgHiBlack + s = strings.ReplaceAll(s, "\x1b[94m", "") // FgHiBlue + s = strings.ReplaceAll(s, "\x1b[93m", "") // FgHiYellow + s = strings.ReplaceAll(s, "\x1b[91m", "") // FgHiRed + s = strings.ReplaceAll(s, "\x1b[96m", "") // FgHiCyan + s = strings.ReplaceAll(s, "\x1b[95m", "") // FgHiMagenta + s = strings.ReplaceAll(s, "\x1b[97m", "") // FgWhite + s = strings.ReplaceAll(s, "\x1b[0m", "") // Reset + return s +} + +// TODO: slogtest.Run was added in Go 1.22. Waiting for https://github.com/aquasecurity/trivy/pull/6075. +func TestSlogtest(t *testing.T) { + var buf bytes.Buffer + newHandler := func(*testing.T) slog.Handler { + buf.Reset() + return log.NewHandler(&buf, &log.Options{Level: slog.LevelDebug}) + } + + results := func(*testing.T) map[string]any { + for _, line := range strings.Split(buf.String(), "\n") { + if len(line) == 0 { + continue + } + m, err := parseLogLine(line) + if err != nil { + t.Fatalf("Failed to parse log line: %v", err) + } + return m + } + return nil + } + + slogtest.Run(t, newHandler, results) +} + +func parseLogLine(line string) (map[string]any, error) { + parts := strings.SplitN(line, "\t", 4) + if len(parts) < 2 { + return nil, fmt.Errorf("invalid log line format: %s", line) + } + + m := make(map[string]any) + if t, err := time.Parse(time.RFC3339, parts[0]); err == nil { + m["time"] = t + parts = parts[1:] + } + m["level"] = parts[0] + m["msg"] = parts[1] + + if len(parts) == 3 { + for _, attr := range strings.Split(parts[2], " ") { + kv := strings.SplitN(attr, "=", 2) + if len(kv) == 2 { + parseAttr(m, kv[0], kv[1]) + } + } + } + + return m, nil +} + +func parseAttr(attrs map[string]any, key, value string) { + parts := strings.Split(key, ".") + currentMap := attrs + for i, part := range parts { + if i == len(parts)-1 { + currentMap[part] = strings.Trim(value, `"`) + } else { + if _, ok := currentMap[part]; !ok { + currentMap[part] = make(map[string]any) + } + currentMap = currentMap[part].(map[string]any) + } + } +} diff --git a/pkg/log/logger.go b/pkg/log/logger.go index 89354def9185..efee7ef8800d 100644 --- a/pkg/log/logger.go +++ b/pkg/log/logger.go @@ -1,129 +1,87 @@ package log import ( + "context" + "fmt" + "io" + "log/slog" "os" - "runtime" "strings" - xlog "github.com/masahiro331/go-xfs-filesystem/log" - "go.uber.org/zap" - "go.uber.org/zap/zapcore" - "golang.org/x/xerrors" - - flog "github.com/aquasecurity/trivy/pkg/fanal/log" + "github.com/samber/lo" ) -var ( - // Logger is the global variable for logging - Logger *zap.SugaredLogger - debugOption bool +const ( + LevelDebug = slog.LevelDebug + LevelInfo = slog.LevelInfo + LevelWarn = slog.LevelWarn + LevelError = slog.LevelError + LevelFatal = slog.Level(12) ) -func init() { - // Set the default logger - Logger, _ = NewLogger(false, false) // nolint: errcheck +// Logger is an alias of slog.Logger +type Logger = slog.Logger + +// New creates a new Logger with the given non-nil Handler. +func New(h slog.Handler) *Logger { + return slog.New(h) } // InitLogger initialize the logger variable -func InitLogger(debug, disable bool) (err error) { - debugOption = debug - Logger, err = NewLogger(debug, disable) - if err != nil { - return xerrors.Errorf("failed to initialize a logger: %w", err) - } - - // Set logger for fanal - flog.SetLogger(Logger) - - // Set logger for go-xfs-filesystem - xlog.SetLogger(Logger) - - return nil - +func InitLogger(debug, disable bool) { + level := lo.Ternary(debug, slog.LevelDebug, slog.LevelInfo) + out := lo.Ternary(disable, io.Discard, io.Writer(os.Stderr)) + slog.SetDefault(New(NewHandler(out, &Options{Level: level}))) } -// NewLogger is the factory method to return the instance of logger -func NewLogger(debug, disable bool) (*zap.SugaredLogger, error) { - // First, define our level-handling logic. - errorPriority := zap.LevelEnablerFunc(func(lvl zapcore.Level) bool { - return lvl >= zapcore.ErrorLevel - }) - logPriority := zap.LevelEnablerFunc(func(lvl zapcore.Level) bool { - if debug { - return lvl < zapcore.ErrorLevel - } - // Not enable debug level - return zapcore.DebugLevel < lvl && lvl < zapcore.ErrorLevel - }) - - encoderLevel := zapcore.CapitalColorLevelEncoder - // when running on Windows, don't log with color - if runtime.GOOS == "windows" { - encoderLevel = zapcore.CapitalLevelEncoder - } - - encoderConfig := zapcore.EncoderConfig{ - TimeKey: "Time", - LevelKey: "Level", - NameKey: "Name", - CallerKey: "Caller", - MessageKey: "Msg", - StacktraceKey: "St", - EncodeLevel: encoderLevel, - EncodeTime: zapcore.ISO8601TimeEncoder, - EncodeDuration: zapcore.StringDurationEncoder, - EncodeCaller: zapcore.ShortCallerEncoder, - } - - consoleEncoder := zapcore.NewConsoleEncoder(encoderConfig) - - // High-priority output should also go to standard error, and low-priority - // output should also go to standard out. - consoleLogs := zapcore.Lock(os.Stderr) - consoleErrors := zapcore.Lock(os.Stderr) - if disable { - devNull, err := os.Create(os.DevNull) - if err != nil { - return nil, err - } - // Discard low-priority output - consoleLogs = zapcore.Lock(devNull) - } - - core := zapcore.NewTee( - zapcore.NewCore(consoleEncoder, consoleErrors, errorPriority), - zapcore.NewCore(consoleEncoder, consoleLogs, logPriority), - ) - - opts := []zap.Option{zap.ErrorOutput(zapcore.Lock(os.Stderr))} - if debug { - opts = append(opts, zap.Development()) - } - logger := zap.New(core, opts...) +var ( + // With calls [Logger.With] on the default logger. + With = slog.With + + SetDefault = slog.SetDefault + + Debug = slog.Debug + DebugContext = slog.DebugContext + Info = slog.Info + InfoContext = slog.InfoContext + Warn = slog.Warn + WarnContext = slog.WarnContext + Error = slog.Error + ErrorContext = slog.ErrorContext +) - return logger.Sugar(), nil +// WithPrefix calls [Logger.With] with the prefix on the default logger. +// +// Note: If WithPrefix is called within init() or during global variable +// initialization, it will use the default logger of log/slog package +// before Trivy's logger is set up. In such cases, it's recommended to pass the prefix +// via WithContextPrefix to ensure the correct logger is used. +func WithPrefix(prefix string) *Logger { + return slog.Default().With(Prefix(prefix)) } +func Debugf(format string, args ...any) { slog.Default().Debug(fmt.Sprintf(format, args...)) } +func Infof(format string, args ...any) { slog.Default().Info(fmt.Sprintf(format, args...)) } +func Warnf(format string, args ...any) { slog.Default().Warn(fmt.Sprintf(format, args...)) } +func Errorf(format string, args ...any) { slog.Default().Error(fmt.Sprintf(format, args...)) } + // Fatal for logging fatal errors -func Fatal(err error) { - if debugOption { - Logger.Fatalf("%+v", err) - } - Logger.Fatal(err) +func Fatal(msg string, args ...any) { + slog.Default().Log(context.Background(), LevelFatal, msg, args...) + os.Exit(1) } -func String(key, val string) zap.Field { - if key == "" || val == "" { - return zap.Skip() - } - return zap.String(key, val) +// WriteLogger is a wrapper around Logger to implement io.Writer +type WriteLogger struct { + logger *Logger } -type PrefixedLogger struct { - Name string +// NewWriteLogger creates a new WriteLogger +func NewWriteLogger(logger *Logger) *WriteLogger { + return &WriteLogger{logger: logger} } -func (d *PrefixedLogger) Write(p []byte) (n int, err error) { - Logger.Debugf("[%s] %s", d.Name, strings.TrimSpace(string(p))) +func (l *WriteLogger) Write(p []byte) (n int, err error) { + l.logger.Debug(strings.TrimSpace(string(p))) return len(p), nil } diff --git a/pkg/misconf/scanner.go b/pkg/misconf/scanner.go index 6a30c9b69ec4..950ad73cbca6 100644 --- a/pkg/misconf/scanner.go +++ b/pkg/misconf/scanner.go @@ -153,12 +153,12 @@ func (s *Scanner) Scan(ctx context.Context, fsys fs.FS) ([]types.Misconfiguratio return nil, nil } - log.Logger.Debugf("Scanning %s files for misconfigurations...", s.scanner.Name()) + log.Debug("Scanning files for misconfigurations...", log.String("scanner", s.scanner.Name())) results, err := s.scanner.ScanFS(ctx, newfs, ".") if err != nil { var invalidContentError *cfparser.InvalidContentError if errors.As(err, &invalidContentError) { - log.Logger.Errorf("scan %q was broken with InvalidContentError: %v", s.scanner.Name(), err) + log.Error("scan was broken with InvalidContentError", s.scanner.Name(), log.Err(err)) return nil, nil } return nil, xerrors.Errorf("scan config error: %w", err) @@ -237,7 +237,7 @@ func scannerOptions(t detection.FileType, opt ScannerOption) ([]options.ScannerO ) if opt.Debug { - opts = append(opts, options.ScannerWithDebug(&log.PrefixedLogger{Name: "misconf"})) + opts = append(opts, options.ScannerWithDebug(log.NewWriteLogger(log.WithPrefix("misconf")))) } if opt.Trace { diff --git a/pkg/module/command.go b/pkg/module/command.go index 9114aea1ced6..87a87e5d6209 100644 --- a/pkg/module/command.go +++ b/pkg/module/command.go @@ -22,14 +22,14 @@ func Install(ctx context.Context, dir, repo string, quiet bool, opt types.Regist return xerrors.Errorf("repository parse error: %w", err) } - log.Logger.Infof("Installing the module from %s...", repo) + log.Info("Installing the module from the repository...", log.String("repo", repo)) artifact, err := oci.NewArtifact(repo, quiet, opt) if err != nil { return xerrors.Errorf("module initialize error: %w", err) } dst := filepath.Join(dir, ref.Context().Name()) - log.Logger.Debugf("Installing the module to %s...", dst) + log.Debug("Installing the module...", log.String("dst", dst)) if err = artifact.Download(ctx, dst, oci.DownloadOption{MediaType: mediaType}); err != nil { return xerrors.Errorf("module download error: %w", err) @@ -45,7 +45,7 @@ func Uninstall(_ context.Context, dir, repo string) error { return xerrors.Errorf("repository parse error: %w", err) } - log.Logger.Infof("Uninstalling %s ...", repo) + log.Info("Uninstalling the module ...", log.String("module", repo)) dst := filepath.Join(dir, ref.Context().Name()) if err = os.RemoveAll(dst); err != nil { return xerrors.Errorf("remove error: %w", err) diff --git a/pkg/module/module.go b/pkg/module/module.go index 3d670999e1b9..f573c20597c1 100644 --- a/pkg/module/module.go +++ b/pkg/module/module.go @@ -3,6 +3,7 @@ package module import ( "context" "encoding/json" + "fmt" "io/fs" "os" "path/filepath" @@ -44,7 +45,7 @@ func logDebug(_ context.Context, mod api.Module, params []uint64) { buf := readMemory(mod.Memory(), offset, size) if buf != nil { - log.Logger.Debug(string(buf)) + log.Debug(string(buf)) } return @@ -56,7 +57,7 @@ func logInfo(_ context.Context, mod api.Module, params []uint64) { buf := readMemory(mod.Memory(), offset, size) if buf != nil { - log.Logger.Info(string(buf)) + log.Info(string(buf)) } return @@ -68,7 +69,7 @@ func logWarn(_ context.Context, mod api.Module, params []uint64) { buf := readMemory(mod.Memory(), offset, size) if buf != nil { - log.Logger.Warn(string(buf)) + log.Warn(string(buf)) } return @@ -80,7 +81,7 @@ func logError(_ context.Context, mod api.Module, params []uint64) { buf := readMemory(mod.Memory(), offset, size) if buf != nil { - log.Logger.Error(string(buf)) + log.Error(string(buf)) } return @@ -89,7 +90,8 @@ func logError(_ context.Context, mod api.Module, params []uint64) { func readMemory(mem api.Memory, offset, size uint32) []byte { buf, ok := mem.Read(offset, size) if !ok { - log.Logger.Errorf("Memory.Read(%d, %d) out of range", offset, size) + log.Error("Memory.Read() out of range", + log.Int("offset", int(offset)), log.Int("size", int(size))) return nil } return buf @@ -129,7 +131,7 @@ func (m *Manager) loadModules(ctx context.Context) error { if os.IsNotExist(err) { return nil } - log.Logger.Debugf("Module dir: %s", m.dir) + log.Debug("Module dir", log.String("dir", m.dir)) err = filepath.Walk(m.dir, func(path string, info fs.FileInfo, err error) error { if err != nil { @@ -143,7 +145,7 @@ func (m *Manager) loadModules(ctx context.Context) error { return xerrors.Errorf("failed to get a relative path: %w", err) } - log.Logger.Infof("Reading %s...", rel) + log.Info("Reading a module...", log.String("path", rel)) wasmCode, err := os.ReadFile(path) if err != nil { return xerrors.Errorf("file read error: %w", err) @@ -159,7 +161,7 @@ func (m *Manager) loadModules(ctx context.Context) error { return nil } - log.Logger.Infof("%s loaded", rel) + log.Info("Module loaded", log.String("path", rel)) m.modules = append(m.modules, p) return nil @@ -341,8 +343,9 @@ func newWASMPlugin(ctx context.Context, ccache wazero.CompilationCache, code []b } if apiVersion != tapi.Version { - log.Logger.Infof("Ignore %s@v%d module due to API version mismatch, got: %d, want: %d", - name, version, apiVersion, tapi.Version) + log.Info("Ignore the module due to API version mismatch", + log.String("module", fmt.Sprintf("%s@v%d", name, version)), + log.Int("got", apiVersion), log.Int("want", tapi.Version)) return nil, nil } @@ -403,13 +406,14 @@ func newWASMPlugin(ctx context.Context, ccache wazero.CompilationCache, code []b } func (m *wasmModule) Register() { - log.Logger.Infof("Registering WASM module: %s@v%d", m.name, m.version) + logger := log.With(log.String("name", m.name), log.Int("version", m.version)) + logger.Info("Registering WASM module") if m.isAnalyzer { - log.Logger.Debugf("Registering custom analyzer in %s@v%d", m.name, m.version) + logger.Debug("Registering custom analyzer") analyzer.RegisterAnalyzer(m) } if m.isPostScanner { - log.Logger.Debugf("Registering custom post scanner in %s@v%d", m.name, m.version) + logger.Debug("Registering custom post scanner") post.RegisterPostScanner(m) } } @@ -441,7 +445,7 @@ func (m *wasmModule) Required(filePath string, _ os.FileInfo) bool { func (m *wasmModule) Analyze(ctx context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) { filePath := "/" + filepath.ToSlash(input.FilePath) - log.Logger.Debugf("Module %s: analyzing %s...", m.name, filePath) + log.Debug("Module analyzing...", log.String("module", m.name), log.String("file_path", filePath)) // Wasm module instances are not Goroutine safe, so we take look here since Analyze might be called concurrently. // TODO: This is temporary solution and we could improve the Analyze performance by having module instance pool. diff --git a/pkg/parallel/walk.go b/pkg/parallel/walk.go index 1156560a7212..2797bf98391a 100644 --- a/pkg/parallel/walk.go +++ b/pkg/parallel/walk.go @@ -4,7 +4,6 @@ import ( "context" "io/fs" - "go.uber.org/zap" "golang.org/x/sync/errgroup" "golang.org/x/xerrors" @@ -37,7 +36,7 @@ func WalkDir[T any](ctx context.Context, fsys fs.FS, root string, parallel int, if err != nil { return err } else if info.Size() == 0 { - log.Logger.Debugf("%s is empty, skip this file", path) + log.Debug("Skip the empty file", log.String("file_path", path)) return nil } @@ -106,7 +105,7 @@ func walk[T any](ctx context.Context, fsys fs.FS, path string, c chan T, onFile } res, err := onFile(path, info, rsa) if err != nil { - log.Logger.Debugw("Walk error", zap.String("file_path", path), zap.Error(err)) + log.Debug("Walk error", log.String("file_path", path), log.Err(err)) return nil } diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index f96f02a80c00..cbff3f4d01f7 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -136,7 +136,8 @@ func (p Plugin) selectPlatform() (Platform, error) { selector := platform.Selector if (selector.OS == "" || p.GOOS == selector.OS) && (selector.Arch == "" || p.GOARCH == selector.Arch) { - log.Logger.Debugf("Platform found, os: %s, arch: %s", selector.OS, selector.Arch) + log.Debug("Platform found", + log.String("os", selector.OS), log.String("arch", selector.Arch)) return platform, nil } } @@ -144,13 +145,13 @@ func (p Plugin) selectPlatform() (Platform, error) { } func (p Plugin) install(ctx context.Context, dst, pwd string) error { - log.Logger.Debugf("Installing the plugin to %s...", dst) + log.Debug("Installing the plugin...", log.String("path", dst)) platform, err := p.selectPlatform() if err != nil { return xerrors.Errorf("platform selection error: %w", err) } - log.Logger.Debugf("Downloading the execution file from %s...", platform.URI) + log.Debug("Downloading the execution file...", log.String("uri", platform.URI)) if err = downloader.Download(ctx, platform.URI, dst, pwd); err != nil { return xerrors.Errorf("unable to download the execution file (%s): %w", platform.URI, err) } @@ -181,14 +182,14 @@ func Install(ctx context.Context, url string, force bool) (Plugin, error) { } } - log.Logger.Infof("Installing the plugin from %s...", url) + log.Info("Installing the plugin...", log.String("url", url)) tempDir, err := downloader.DownloadToTempDir(ctx, url) if err != nil { return Plugin{}, xerrors.Errorf("download failed: %w", err) } defer os.RemoveAll(tempDir) - log.Logger.Info("Loading the plugin metadata...") + log.Info("Loading the plugin metadata...") plugin, err := loadMetadata(tempDir) if err != nil { return Plugin{}, xerrors.Errorf("failed to load the plugin metadata: %w", err) @@ -259,16 +260,18 @@ func Update(name string) error { return xerrors.Errorf("plugin load error: %w", err) } - log.Logger.Infof("Updating plugin '%s'", name) + logger := log.With("name", name) + logger.Info("Updating plugin...") updated, err := Install(nil, plugin.Repository, true) if err != nil { return xerrors.Errorf("unable to perform an update installation: %w", err) } if plugin.Version == updated.Version { - log.Logger.Infof("The %s plugin is the latest version. [%s]", name, plugin.Version) + logger.Info("The plugin is up-to-date", log.String("version", plugin.Version)) } else { - log.Logger.Infof("Updated '%s' from %s to %s", name, plugin.Version, updated.Version) + logger.Info("Plugin updated", + log.String("from", plugin.Version), log.String("to", updated.Version)) } return nil } @@ -288,7 +291,7 @@ func LoadAll() ([]Plugin, error) { } plugin, err := loadMetadata(filepath.Join(pluginsDir, d.Name())) if err != nil { - log.Logger.Warnf("plugin load error: %s", err) + log.Warn("Plugin load error", log.Err(err)) continue } plugins = append(plugins, plugin) diff --git a/pkg/policy/policy.go b/pkg/policy/policy.go index 9dc802c8207e..6d7aadfc8e3d 100644 --- a/pkg/policy/policy.go +++ b/pkg/policy/policy.go @@ -91,7 +91,7 @@ func NewClient(cacheDir string, quiet bool, policyBundleRepo string, opts ...Opt func (c *Client) populateOCIArtifact(registryOpts types.RegistryOptions) error { if c.artifact == nil { - log.Logger.Debugf("Using URL: %s to load policy bundle", c.policyBundleRepo) + log.Debug("Loading policy bundle", log.String("repository", c.policyBundleRepo)) art, err := oci.NewArtifact(c.policyBundleRepo, c.quiet, registryOpts) if err != nil { return xerrors.Errorf("OCI artifact error: %w", err) @@ -116,7 +116,7 @@ func (c *Client) DownloadBuiltinPolicies(ctx context.Context, registryOpts types if err != nil { return xerrors.Errorf("digest error: %w", err) } - log.Logger.Debugf("Digest of the built-in policies: %s", digest) + log.Debug("Digest of the built-in policies", log.String("digest", digest)) // Update metadata.json with the new digest and the current date if err = c.updateMetadata(digest, c.clock.Now()); err != nil { @@ -222,14 +222,14 @@ func (c *Client) updateMetadata(digest string, now time.Time) error { func (c *Client) GetMetadata() (*Metadata, error) { f, err := os.Open(c.metadataPath()) if err != nil { - log.Logger.Debugf("Failed to open the policy metadata: %s", err) + log.Debug("Failed to open the policy metadata", log.Err(err)) return nil, err } defer f.Close() var meta Metadata if err = json.NewDecoder(f).Decode(&meta); err != nil { - log.Logger.Warnf("Policy metadata decode error: %s", err) + log.Warn("Policy metadata decode error", log.Err(err)) return nil, err } @@ -237,7 +237,7 @@ func (c *Client) GetMetadata() (*Metadata, error) { } func (c *Client) Clear() error { - log.Logger.Info("Removing policy bundle...") + log.Info("Removing policy bundle...") if err := os.RemoveAll(c.policyDir); err != nil { return xerrors.Errorf("failed to remove policy bundle: %w", err) } diff --git a/pkg/rekor/client.go b/pkg/rekor/client.go index a902c3a6f865..d7b0a35dd781 100644 --- a/pkg/rekor/client.go +++ b/pkg/rekor/client.go @@ -37,9 +37,15 @@ type EntryID struct { func NewEntryID(entryID string) (EntryID, error) { switch len(entryID) { case treeIDLen + uuidLen: - return EntryID{TreeID: entryID[:treeIDLen], UUID: entryID[treeIDLen:]}, nil + return EntryID{ + TreeID: entryID[:treeIDLen], + UUID: entryID[treeIDLen:], + }, nil case uuidLen: - return EntryID{TreeID: "", UUID: entryID}, nil + return EntryID{ + TreeID: "", + UUID: entryID, + }, nil default: return EntryID{}, xerrors.New("invalid Entry ID length") } @@ -71,7 +77,7 @@ func NewClient(rekorURL string) (*Client, error) { } func (c *Client) Search(ctx context.Context, hash string) ([]EntryID, error) { - log.Logger.Debugf("Search for %s in Rekor", hash) + log.Debug("Searching index in Rekor", log.String("hash", hash)) params := index.NewSearchIndexParamsWithContext(ctx).WithQuery(&models.SearchIndex{Hash: hash}) resp, err := c.Index.SearchIndex(params) if err != nil { diff --git a/pkg/remote/remote.go b/pkg/remote/remote.go index 08924704a7d5..b989df18a35b 100644 --- a/pkg/remote/remote.go +++ b/pkg/remote/remote.go @@ -185,7 +185,7 @@ func resolvePlatform(ref name.Reference, p types.Platform, options []remote.Opti switch d.MediaType { case v1types.OCIManifestSchema1, v1types.DockerManifestSchema2: // We want an index but the registry has an image, not multi-arch. We just ignore "--platform". - log.Logger.Debug("Ignore --platform as the image is not multi-arch") + log.Debug("Ignore `--platform` as the image is not multi-arch") return types.Platform{}, nil case v1types.OCIImageIndex, v1types.DockerManifestList: // These are expected. @@ -201,7 +201,7 @@ func resolvePlatform(ref name.Reference, p types.Platform, options []remote.Opti return types.Platform{}, xerrors.Errorf("remote index manifest error: %w", err) } if len(m.Manifests) == 0 { - log.Logger.Debug("Ignore '--platform' as the image is not multi-arch") + log.Debug("Ignore '--platform' as the image is not multi-arch") return types.Platform{}, nil } if m.Manifests[0].Platform != nil { diff --git a/pkg/report/table/vulnerability.go b/pkg/report/table/vulnerability.go index bdfa9bf1af1d..9b85a1f4aab1 100644 --- a/pkg/report/table/vulnerability.go +++ b/pkg/report/table/vulnerability.go @@ -22,7 +22,7 @@ import ( ) var showSuppressedOnce = sync.OnceFunc(func() { - log.Logger.Info(`Some vulnerabilities have been ignored/suppressed. Use the "--show-suppressed" flag to display them.`) + log.Info(`Some vulnerabilities have been ignored/suppressed. Use the "--show-suppressed" flag to display them.`) }) type vulnerabilityRenderer struct { @@ -111,7 +111,7 @@ func (r *vulnerabilityRenderer) setVulnerabilityRows(tw *table.Table, vulns []ty fileName := filepath.Base(pkgPath) lib = fmt.Sprintf("%s (%s)", v.PkgName, fileName) r.once.Do(func() { - log.Logger.Infof("Table result includes only package filenames. Use '--format json' option to get the full path to the package file.") + log.Info("Table result includes only package filenames. Use '--format json' option to get the full path to the package file.") }) } diff --git a/pkg/report/template.go b/pkg/report/template.go index 7a28a65cfbb2..1ebabdb89cfe 100644 --- a/pkg/report/template.go +++ b/pkg/report/template.go @@ -40,7 +40,7 @@ func NewTemplateWriter(output io.Writer, outputTemplate, appVersion string) (*Te templateFuncMap["escapeXML"] = func(input string) string { escaped := &bytes.Buffer{} if err := xml.EscapeText(escaped, []byte(input)); err != nil { - log.Logger.Error("error while escapeString to XML: %s", err) + log.Error("Error while escapeString to XML", log.Err(err)) return input } return escaped.String() diff --git a/pkg/report/writer.go b/pkg/report/writer.go index 274688591151..d732ec397a1f 100644 --- a/pkg/report/writer.go +++ b/pkg/report/writer.go @@ -69,7 +69,7 @@ func Write(ctx context.Context, report types.Report, option flag.Options) (err e case types.FormatTemplate: // We keep `sarif.tpl` template working for backward compatibility for a while. if strings.HasPrefix(option.Template, "@") && strings.HasSuffix(option.Template, "sarif.tpl") { - log.Logger.Warn("Using `--template sarif.tpl` is deprecated. Please migrate to `--format sarif`. See https://github.com/aquasecurity/trivy/discussions/1571") + log.Warn("Using `--template sarif.tpl` is deprecated. Please migrate to `--format sarif`. See https://github.com/aquasecurity/trivy/discussions/1571") writer = &SarifWriter{ Output: output, Version: option.AppVersion, diff --git a/pkg/result/ignore.go b/pkg/result/ignore.go index 25f7d03837d7..941680b3086f 100644 --- a/pkg/result/ignore.go +++ b/pkg/result/ignore.go @@ -92,7 +92,7 @@ func (f *IgnoreFindings) Match(id, path string, pkg *packageurl.PackageURL) *Ign continue } - log.Logger.Debugw("Ignored", log.String("id", id), log.String("path", path)) + log.Debug("Ignored", log.String("id", id), log.String("target", path)) return &finding } @@ -223,7 +223,7 @@ func parseIgnoreYAML(ignoreFile string) (IgnoreConfig, error) { return IgnoreConfig{}, xerrors.Errorf("file open error: %w", err) } defer f.Close() - log.Logger.Debugf("Found an ignore yaml: %s", ignoreFile) + log.Debug("Found an ignore yaml", log.String("path", ignoreFile)) // Parse the YAML content var ignoreConfig IgnoreConfig @@ -239,7 +239,7 @@ func parseIgnore(ignoreFile string) (IgnoreFindings, error) { return nil, xerrors.Errorf("file open error: %w", err) } defer f.Close() - log.Logger.Debugf("Found an ignore file: %s", ignoreFile) + log.Debug("Found an ignore file", log.String("path", ignoreFile)) var ignoredFindings IgnoreFindings scanner := bufio.NewScanner(f) @@ -255,7 +255,7 @@ func parseIgnore(ignoreFile string) (IgnoreFindings, error) { if len(fields) > 1 { exp, err = getExpirationDate(fields) if err != nil { - log.Logger.Warnf("Error while parsing expiration date in .trivyignore file: %s", err) + log.Warn("Error while parsing expiration date in .trivyignore file", log.Err(err)) continue } } diff --git a/pkg/rpc/client/headers.go b/pkg/rpc/client/headers.go index 463fb60149a4..0ebac7bf046e 100644 --- a/pkg/rpc/client/headers.go +++ b/pkg/rpc/client/headers.go @@ -14,7 +14,7 @@ func WithCustomHeaders(ctx context.Context, customHeaders http.Header) context.C // Attach the headers to a context ctxWithToken, err := twirp.WithHTTPRequestHeaders(ctx, customHeaders) if err != nil { - log.Logger.Warnf("twirp error setting headers: %s", err) + log.Warn("twirp error setting headers", log.Err(err)) return ctx } return ctxWithToken diff --git a/pkg/rpc/convert.go b/pkg/rpc/convert.go index 4edec4f1f24e..5e21e4e1ce4c 100644 --- a/pkg/rpc/convert.go +++ b/pkg/rpc/convert.go @@ -107,7 +107,7 @@ func ConvertToRPCCustomResources(resources []ftypes.CustomResource) []*common.Cu for _, r := range resources { data, err := structpb.NewValue(r.Data) if err != nil { - log.Logger.Warn(err) + log.Warn("Custom resource conversion error", log.Err(err)) } rpcResources = append(rpcResources, &common.CustomResource{ Type: r.Type, @@ -242,7 +242,7 @@ func ConvertFromRPCPkgIdentifier(pkg *common.PkgIdentifier) ftypes.PkgIdentifier if pkg.Purl != "" { pu, err := packageurl.FromString(pkg.Purl) if err != nil { - log.Logger.Error("Failed to parse PURL (%s): %s", pkg.Purl, err) + log.Error("Failed to parse PURL", log.String("purl", pkg.Purl), log.Err(err)) } pkgID.PURL = &pu } @@ -267,7 +267,7 @@ func ConvertToRPCVulns(vulns []types.DetectedVulnerability) []*common.Vulnerabil for _, vuln := range vulns { severity, err := dbTypes.NewSeverity(vuln.Severity) if err != nil { - log.Logger.Warn(err) + log.Warn("Severity error", log.Err(err)) } cvssMap := make(map[string]*common.CVSS) // This is needed because protobuf generates a map[string]*CVSS type for vendor, vendorSeverity := range vuln.CVSS { @@ -336,7 +336,7 @@ func ConvertToRPCMisconfs(misconfs []types.DetectedMisconfiguration) []*common.D for _, m := range misconfs { severity, err := dbTypes.NewSeverity(m.Severity) if err != nil { - log.Logger.Warn(err) + log.Warn("Severity conversion error", log.Err(err)) } rpcMisconfs = append(rpcMisconfs, &common.DetectedMisconfiguration{ @@ -834,7 +834,7 @@ func ConvertToRPCArtifactInfo(imageID string, imageInfo ftypes.ArtifactInfo) *ca t := timestamppb.New(imageInfo.Created) if err := t.CheckValid(); err != nil { - log.Logger.Warnf("invalid timestamp: %s", err) + log.Warn("Invalid timestamp", log.Err(err)) } return &cache.PutArtifactRequest{ @@ -973,7 +973,7 @@ func ConvertToRPCLicenses(licenses []types.DetectedLicense) []*common.DetectedLi for _, l := range licenses { severity, err := dbTypes.NewSeverity(l.Severity) if err != nil { - log.Logger.Warn(err) + log.Warn("Severity conversion error", log.Err(err)) } rpcLicenses = append(rpcLicenses, &common.DetectedLicense{ Severity: common.Severity(severity), diff --git a/pkg/rpc/retry.go b/pkg/rpc/retry.go index 1c807a04b465..31a7760cb770 100644 --- a/pkg/rpc/retry.go +++ b/pkg/rpc/retry.go @@ -32,8 +32,8 @@ func Retry(f func() error) error { b := backoff.WithMaxRetries(backoff.NewExponentialBackOff(), maxRetries) err := backoff.RetryNotify(operation, b, func(err error, _ time.Duration) { - log.Logger.Warn(err) - log.Logger.Info("Retrying HTTP request...") + log.Warn("HTTP error", log.Err(err)) + log.Info("Retrying HTTP request...") }) if err != nil { return err diff --git a/pkg/rpc/server/listen.go b/pkg/rpc/server/listen.go index 7433bf20a560..802afe68ae3d 100644 --- a/pkg/rpc/server/listen.go +++ b/pkg/rpc/server/listen.go @@ -63,13 +63,13 @@ func (s Server) ListenAndServe(ctx context.Context, serverCache cache.Cache, ski for { time.Sleep(updateInterval) if err := worker.update(ctx, s.appVersion, s.cacheDir, skipDBUpdate, dbUpdateWg, requestWg, s.RegistryOptions); err != nil { - log.Logger.Errorf("%+v\n", err) + log.Errorf("%+v\n", err) } } }() mux := newServeMux(ctx, serverCache, dbUpdateWg, requestWg, s.token, s.tokenHeader, s.cacheDir) - log.Logger.Infof("Listening %s...", s.addr) + log.Infof("Listening %s...", s.addr) return http.ListenAndServe(s.addr, mux) } @@ -102,7 +102,7 @@ func newServeMux(ctx context.Context, serverCache cache.Cache, dbUpdateWg, reque mux.HandleFunc("/healthz", func(rw http.ResponseWriter, r *http.Request) { if _, err := rw.Write([]byte("ok")); err != nil { - log.Logger.Errorf("health check error: %s", err) + log.Error("Health check error", log.Err(err)) } }) @@ -110,7 +110,7 @@ func newServeMux(ctx context.Context, serverCache cache.Cache, dbUpdateWg, reque w.Header().Add("Content-Type", "application/json") if err := json.NewEncoder(w).Encode(version.NewVersionInfo(cacheDir)); err != nil { - log.Logger.Errorf("get version error: %s", err) + log.Error("Version error", log.Err(err)) } }) @@ -137,7 +137,7 @@ func newDBWorker(dbClient dbc.Operation) dbWorker { func (w dbWorker) update(ctx context.Context, appVersion, cacheDir string, skipDBUpdate bool, dbUpdateWg, requestWg *sync.WaitGroup, opt types.RegistryOptions) error { - log.Logger.Debug("Check for DB update...") + log.Debug("Check for DB update...") needsUpdate, err := w.dbClient.NeedsUpdate(appVersion, skipDBUpdate) if err != nil { return xerrors.Errorf("failed to check if db needs an update") @@ -145,7 +145,7 @@ func (w dbWorker) update(ctx context.Context, appVersion, cacheDir string, return nil } - log.Logger.Info("Updating DB...") + log.Info("Updating DB...") if err = w.hotUpdate(ctx, cacheDir, dbUpdateWg, requestWg, opt); err != nil { return xerrors.Errorf("failed DB hot update: %w", err) } @@ -163,11 +163,11 @@ func (w dbWorker) hotUpdate(ctx context.Context, cacheDir string, dbUpdateWg, re return xerrors.Errorf("failed to download vulnerability DB: %w", err) } - log.Logger.Info("Suspending all requests during DB update") + log.Info("Suspending all requests during DB update") dbUpdateWg.Add(1) defer dbUpdateWg.Done() - log.Logger.Info("Waiting for all requests to be processed before DB update...") + log.Info("Waiting for all requests to be processed before DB update...") requestWg.Wait() if err = db.Close(); err != nil { @@ -184,7 +184,7 @@ func (w dbWorker) hotUpdate(ctx context.Context, cacheDir string, dbUpdateWg, re return xerrors.Errorf("failed to copy the metadata file: %w", err) } - log.Logger.Info("Reopening DB...") + log.Info("Reopening DB...") if err = db.Init(cacheDir); err != nil { return xerrors.Errorf("failed to open DB: %w", err) } diff --git a/pkg/rpc/server/server.go b/pkg/rpc/server/server.go index d4cb91294def..051eab982255 100644 --- a/pkg/rpc/server/server.go +++ b/pkg/rpc/server/server.go @@ -37,7 +37,7 @@ func NewScanServer(s scanner.Driver) *ScanServer { // Log and return an error func teeError(err error) error { - log.Logger.Errorf("%+v", err) + log.Errorf("%+v", err) return err } diff --git a/pkg/sbom/cyclonedx/marshal.go b/pkg/sbom/cyclonedx/marshal.go index 684b1b7d235d..9465b790bdd2 100644 --- a/pkg/sbom/cyclonedx/marshal.go +++ b/pkg/sbom/cyclonedx/marshal.go @@ -268,7 +268,7 @@ func (*Marshaler) Hashes(files []core.File) *[]cdx.Hash { case digest.MD5: alg = cdx.HashAlgoMD5 default: - log.Logger.Debugf("Unable to convert %q algorithm to CycloneDX format", d.Algorithm()) + log.Debug("Unable to convert algorithm to CycloneDX format", log.Any("alg", d.Algorithm())) continue } @@ -390,7 +390,7 @@ func (*Marshaler) cwes(cweIDs []string) *[]int { for _, cweID := range cweIDs { number, err := strconv.Atoi(strings.TrimPrefix(strings.ToLower(cweID), "cwe-")) if err != nil { - log.Logger.Debugf("cwe id parse error: %s", err) + log.Debug("CWE-ID parse error", log.Err(err)) continue } ret = append(ret, number) diff --git a/pkg/sbom/cyclonedx/unmarshal.go b/pkg/sbom/cyclonedx/unmarshal.go index 8821fe8b111a..9450a78a455c 100644 --- a/pkg/sbom/cyclonedx/unmarshal.go +++ b/pkg/sbom/cyclonedx/unmarshal.go @@ -9,7 +9,6 @@ import ( cdx "github.com/CycloneDX/cyclonedx-go" "github.com/package-url/packageurl-go" "github.com/samber/lo" - "go.uber.org/zap" "golang.org/x/xerrors" "github.com/aquasecurity/trivy/pkg/digest" @@ -35,7 +34,7 @@ func DecodeJSON(r io.Reader) (*cdx.BOM, error) { } func (b *BOM) UnmarshalJSON(data []byte) error { - log.Logger.Debug("Unmarshalling CycloneDX JSON...") + log.Debug("Unmarshalling CycloneDX JSON...") if b.BOM == nil { b.BOM = core.NewBOM(core.Options{GenerateBOMRef: true}) } @@ -46,8 +45,8 @@ func (b *BOM) UnmarshalJSON(data []byte) error { } if !IsTrivySBOM(cdxBOM) { - log.Logger.Warnf("Third-party SBOM may lead to inaccurate vulnerability detection") - log.Logger.Warnf("Recommend using Trivy to generate SBOMs") + log.Warn("Third-party SBOM may lead to inaccurate vulnerability detection") + log.Warn("Recommend using Trivy to generate SBOMs") } if err = b.parseBOM(cdxBOM); err != nil { @@ -108,11 +107,11 @@ func (b *BOM) parseComponents(cdxComponents *[]cdx.Component) map[string]*core.C for _, component := range lo.FromPtr(cdxComponents) { c, err := b.parseComponent(component) if errors.Is(err, ErrUnsupportedType) { - log.Logger.Infow("Skipping the component with the unsupported type", - zap.String("bom-ref", component.BOMRef), zap.String("type", string(component.Type))) + log.Info("Skipping the component with the unsupported type", + log.String("bom-ref", component.BOMRef), log.String("type", string(component.Type))) continue } else if err != nil { - log.Logger.Warnw("Failed to parse component: %s", zap.Error(err)) + log.Warn("Failed to parse component", log.Err(err)) continue } @@ -217,7 +216,7 @@ func (b *BOM) unmarshalHashes(hashes *[]cdx.Hash) []digest.Digest { case cdx.HashAlgoMD5: alg = digest.MD5 default: - log.Logger.Warnf("Unsupported hash algorithm: %s", h.Algorithm) + log.Warn("Unsupported hash algorithm", log.String("algorithm", string(h.Algorithm))) } digests = append(digests, digest.NewDigestFromString(alg, h.Value)) } diff --git a/pkg/sbom/io/decode.go b/pkg/sbom/io/decode.go index af61f41b5a8c..3ebc06d30362 100644 --- a/pkg/sbom/io/decode.go +++ b/pkg/sbom/io/decode.go @@ -9,7 +9,6 @@ import ( debver "github.com/knqyf263/go-deb-version" rpmver "github.com/knqyf263/go-rpm-version" "github.com/package-url/packageurl-go" - "go.uber.org/zap" "golang.org/x/exp/maps" "golang.org/x/xerrors" @@ -178,15 +177,15 @@ func (m *Decoder) decodeApplication(c *core.Component) *ftypes.Application { func (m *Decoder) decodeLibrary(c *core.Component) (*ftypes.Package, error) { p := (*purl.PackageURL)(c.PkgID.PURL) if p == nil { - log.Logger.Debugw("Skipping a component without PURL", - zap.String("name", c.Name), zap.String("version", c.Version)) + log.Debug("Skipping a component without PURL", + log.String("name", c.Name), log.String("version", c.Version)) return nil, ErrPURLEmpty } pkg := p.Package() if p.Class() == types.ClassUnknown { - log.Logger.Debugw("Skipping a component with an unsupported type", - zap.String("name", c.Name), zap.String("version", c.Version), zap.String("type", p.Type)) + log.Debug("Skipping a component with an unsupported type", + log.String("name", c.Name), log.String("version", c.Version), log.String("type", p.Type)) return nil, ErrUnsupportedType } pkg.Name = m.pkgName(pkg, c) @@ -292,7 +291,7 @@ func (m *Decoder) parseSrcVersion(pkg *ftypes.Package, ver string) { case packageurl.TypeDebian: v, err := debver.NewVersion(ver) if err != nil { - log.Logger.Debugw("Failed to parse Debian version", zap.Error(err)) + log.Debug("Failed to parse Debian version", log.Err(err)) return } pkg.SrcEpoch = v.Epoch() @@ -356,7 +355,7 @@ func (m *Decoder) addOrphanPkgs(sbom *types.SBOM) error { // Add OS packages only when OS is detected. for _, pkgs := range osPkgMap { if sbom.Metadata.OS == nil || !sbom.Metadata.OS.Detected() { - log.Logger.Warn("Ignore the OS package as no OS is detected.") + log.Warn("Ignore the OS package as no OS is detected.") break } diff --git a/pkg/sbom/spdx/marshal.go b/pkg/sbom/spdx/marshal.go index 6c1490fe1aec..3f72c6d69a20 100644 --- a/pkg/sbom/spdx/marshal.go +++ b/pkg/sbom/spdx/marshal.go @@ -531,7 +531,7 @@ func NormalizeLicense(licenses []string) string { s, err := expression.Normalize(license, licensing.Normalize, expression.NormalizeForSPDX) if err != nil { // Not fail on the invalid license - log.Logger.Warnf("Unable to marshal SPDX licenses %q", license) + log.Warn("Unable to marshal SPDX licenses", log.String("license", license)) return "" } return s diff --git a/pkg/scanner/langpkg/scan.go b/pkg/scanner/langpkg/scan.go index 9480c8a13614..a0ea1de5e68e 100644 --- a/pkg/scanner/langpkg/scan.go +++ b/pkg/scanner/langpkg/scan.go @@ -52,7 +52,7 @@ func (s *scanner) Packages(target types.ScanTarget, _ types.ScanOptions) types.R func (s *scanner) Scan(target types.ScanTarget, _ types.ScanOptions) (types.Results, error) { apps := target.Applications - log.Logger.Infof("Number of language-specific files: %d", len(apps)) + log.Info("Number of language-specific files", log.Int("num", len(apps))) if len(apps) == 0 { return nil, nil } @@ -64,13 +64,15 @@ func (s *scanner) Scan(target types.ScanTarget, _ types.ScanOptions) (types.Resu continue } + logger := log.WithPrefix(string(app.Type)) + // Prevent the same log messages from being displayed many times for the same type. if _, ok := printedTypes[app.Type]; !ok { - log.Logger.Infof("Detecting %s vulnerabilities...", app.Type) + logger.Info("Detecting vulnerabilities...") printedTypes[app.Type] = struct{}{} } - log.Logger.Debugf("Detecting library vulnerabilities, type: %s, path: %s", app.Type, app.FilePath) + logger.Debug("Scanning packages from the file", log.String("file_path", app.FilePath)) vulns, err := library.Detect(app.Type, app.Libraries) if err != nil { return nil, xerrors.Errorf("failed vulnerability detection of libraries: %w", err) diff --git a/pkg/scanner/local/scan.go b/pkg/scanner/local/scan.go index b5437549c5cc..19f673f54158 100644 --- a/pkg/scanner/local/scan.go +++ b/pkg/scanner/local/scan.go @@ -63,7 +63,7 @@ func (s Scanner) Scan(ctx context.Context, targetName, artifactKey string, blobK detail, err := s.applier.ApplyLayers(artifactKey, blobKeys) switch { case errors.Is(err, analyzer.ErrUnknownOS): - log.Logger.Debug("OS is not detected.") + log.Debug("OS is not detected.") // Packages may contain OS-independent binary information even though OS is not detected. if len(detail.Packages) != 0 { @@ -72,16 +72,18 @@ func (s Scanner) Scan(ctx context.Context, targetName, artifactKey string, blobK // If OS is not detected and repositories are detected, we'll try to use repositories as OS. if detail.Repository != nil { - log.Logger.Debugf("Package repository: %s %s", detail.Repository.Family, detail.Repository.Release) - log.Logger.Debugf("Assuming OS is %s %s.", detail.Repository.Family, detail.Repository.Release) + log.Debug("Package repository", log.String("family", string(detail.Repository.Family)), + log.String("version", detail.Repository.Release)) + log.Debug("Assuming OS", log.String("family", string(detail.Repository.Family)), + log.String("version", detail.Repository.Release)) detail.OS = ftypes.OS{ Family: detail.Repository.Family, Name: detail.Repository.Release, } } case errors.Is(err, analyzer.ErrNoPkgsDetected): - log.Logger.Warn("No OS package is detected. Make sure you haven't deleted any files that contain information about the installed packages.") - log.Logger.Warn(`e.g. files under "/lib/apk/db/", "/var/lib/dpkg/" and "/var/lib/rpm"`) + log.Warn("No OS package is detected. Make sure you haven't deleted any files that contain information about the installed packages.") + log.Warn(`e.g. files under "/lib/apk/db/", "/var/lib/dpkg/" and "/var/lib/rpm"`) case err != nil: return nil, ftypes.OS{}, xerrors.Errorf("failed to apply layers: %w", err) } @@ -222,10 +224,10 @@ func (s Scanner) misconfsToResults(misconfs []ftypes.Misconfiguration, options t // MisconfsToResults is exported for trivy-plugin-aqua purposes only func (s Scanner) MisconfsToResults(misconfs []ftypes.Misconfiguration) types.Results { - log.Logger.Infof("Detected config files: %d", len(misconfs)) + log.Info("Detected config files", log.Int("num", len(misconfs))) var results types.Results for _, misconf := range misconfs { - log.Logger.Debugf("Scanned config file: %s", misconf.FilePath) + log.Debug("Scanned config file", log.String("path", misconf.FilePath)) var detected []types.DetectedMisconfiguration @@ -264,7 +266,7 @@ func (s Scanner) secretsToResults(secrets []ftypes.Secret, options types.ScanOpt var results types.Results for _, secret := range secrets { - log.Logger.Debugf("Secret file: %s", secret.FilePath) + log.Debug("Secret file", log.String("path", secret.FilePath)) results = append(results, types.Result{ Target: secret.FilePath, @@ -367,7 +369,7 @@ func toDetectedMisconfiguration(res ftypes.MisconfResult, defaultSeverity dbType severity := defaultSeverity sev, err := dbTypes.NewSeverity(res.Severity) if err != nil { - log.Logger.Warnf("severity must be %s, but %s", dbTypes.SeverityNames, res.Severity) + log.Warn("Unsupported severity", log.String("severity", res.Severity)) } else { severity = sev } @@ -429,7 +431,7 @@ func excludeDevDeps(apps []ftypes.Application, include bool) { } onceInfo := sync.OnceFunc(func() { - log.Logger.Info("Suppressing dependencies for development and testing. To display them, try the '--include-dev-deps' flag.") + log.Info("Suppressing dependencies for development and testing. To display them, try the '--include-dev-deps' flag.") }) for i := range apps { apps[i].Libraries = lo.Filter(apps[i].Libraries, func(lib ftypes.Package, index int) bool { diff --git a/pkg/scanner/ospkg/scan.go b/pkg/scanner/ospkg/scan.go index ebc94b1dab9c..8edfc1b1d786 100644 --- a/pkg/scanner/ospkg/scan.go +++ b/pkg/scanner/ospkg/scan.go @@ -41,10 +41,11 @@ func (s *scanner) Packages(target types.ScanTarget, _ types.ScanOptions) types.R func (s *scanner) Scan(ctx context.Context, target types.ScanTarget, _ types.ScanOptions) (types.Result, bool, error) { if !target.OS.Detected() { - log.Logger.Debug("Detected OS: unknown") + log.Debug("Detected OS: unknown") return types.Result{}, false, nil } - log.Logger.Infof("Detected OS: %s", target.OS.Family) + log.Info("Detected OS", log.String("family", + string(target.OS.Family)), log.String("version", target.OS.Name)) if target.OS.Extended { // TODO: move the logic to each detector diff --git a/pkg/scanner/scan.go b/pkg/scanner/scan.go index 4cf647b66d13..4964ef1f2ad5 100644 --- a/pkg/scanner/scan.go +++ b/pkg/scanner/scan.go @@ -149,7 +149,8 @@ func (s Scanner) ScanArtifact(ctx context.Context, options types.ScanOptions) (t } defer func() { if err := s.artifact.Clean(artifactInfo); err != nil { - log.Logger.Warnf("Failed to clean the artifact %q: %v", artifactInfo.Name, err) + log.Warn("Failed to clean the artifact", + log.String("artifact", artifactInfo.Name), log.Err(err)) } }() @@ -160,8 +161,9 @@ func (s Scanner) ScanArtifact(ctx context.Context, options types.ScanOptions) (t ptros := &osFound if osFound.Detected() && osFound.Eosl { - log.Logger.Warnf("This OS version is no longer supported by the distribution: %s %s", osFound.Family, osFound.Name) - log.Logger.Warnf("The vulnerability detection may be insufficient because security updates are not provided") + log.Warn("This OS version is no longer supported by the distribution", + log.String("family", string(osFound.Family)), log.String("version", osFound.Name)) + log.Warn("The vulnerability detection may be insufficient because security updates are not provided") } else if !osFound.Detected() { ptros = nil } diff --git a/pkg/utils/fsutils/fs.go b/pkg/utils/fsutils/fs.go index 8e15a575a753..915581f08ad9 100644 --- a/pkg/utils/fsutils/fs.go +++ b/pkg/utils/fsutils/fs.go @@ -7,7 +7,6 @@ import ( "os" "path/filepath" - "go.uber.org/zap" "golang.org/x/exp/slices" "golang.org/x/xerrors" @@ -104,7 +103,7 @@ func WalkDir(fsys fs.FS, root string, required WalkDirRequiredFunc, fn WalkDirFu defer f.Close() if err = fn(path, d, f); err != nil { - log.Logger.Debugw("Walk error", zap.String("file_path", path), zap.Error(err)) + log.Debug("Walk error", log.String("file_path", path), log.Err(err)) } return nil }) diff --git a/pkg/version/version.go b/pkg/version/version.go index 421fff6f1e6e..54914c563c87 100644 --- a/pkg/version/version.go +++ b/pkg/version/version.go @@ -55,7 +55,7 @@ func NewVersionInfo(cacheDir string) VersionInfo { mc := metadata.NewClient(cacheDir) meta, err := mc.Get() if err != nil { - log.Logger.Debugw("Failed to get DB metadata", "error", err) + log.Debug("Failed to get DB metadata", log.Err(err)) } if !meta.UpdatedAt.IsZero() && !meta.NextUpdate.IsZero() && meta.Version != 0 { dbMeta = &metadata.Metadata{ @@ -69,7 +69,7 @@ func NewVersionInfo(cacheDir string) VersionInfo { mcJava := javadb.NewMetadata(filepath.Join(cacheDir, "java-db")) metaJava, err := mcJava.Get() if err != nil { - log.Logger.Debugw("Failed to get Java DB metadata", "error", err) + log.Debug("Failed to get Java DB metadata", log.Err(err)) } if !metaJava.UpdatedAt.IsZero() && !metaJava.NextUpdate.IsZero() && metaJava.Version != 0 { javadbMeta = &metadata.Metadata{ @@ -83,13 +83,13 @@ func NewVersionInfo(cacheDir string) VersionInfo { var pbMeta *policy.Metadata pc, err := policy.NewClient(cacheDir, false, "") if err != nil { - log.Logger.Debugw("Failed to instantiate policy client", "error", err) + log.Debug("Failed to instantiate policy client", log.Err(err)) } if pc != nil && err == nil { pbMetaRaw, err := pc.GetMetadata() if err != nil { - log.Logger.Debugw("Failed to get policy metadata", "error", err) + log.Debug("Failed to get policy metadata", log.Err(err)) } else { pbMeta = &policy.Metadata{ Digest: pbMetaRaw.Digest, diff --git a/pkg/vex/csaf.go b/pkg/vex/csaf.go index d5d68f76adb9..3e43503b042b 100644 --- a/pkg/vex/csaf.go +++ b/pkg/vex/csaf.go @@ -4,7 +4,6 @@ import ( csaf "github.com/csaf-poc/csaf_distribution/v3/csaf" "github.com/package-url/packageurl-go" "github.com/samber/lo" - "go.uber.org/zap" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/purl" @@ -14,13 +13,13 @@ import ( type CSAF struct { advisory csaf.Advisory - logger *zap.SugaredLogger + logger *log.Logger } func newCSAF(advisory csaf.Advisory) VEX { return &CSAF{ advisory: advisory, - logger: log.Logger.With(zap.String("VEX format", "CSAF")), + logger: log.WithPrefix("vex").With(log.String("format", "CSAF")), } } @@ -63,17 +62,17 @@ func (v *CSAF) match(vuln *csaf.Vulnerability, pkgURL *packageurl.PackageURL) ty for status, productRange := range productStatusMap { for _, product := range productRange { if matchProduct(v.getProductPurls(lo.FromPtr(product)), pkgURL) { - v.logger.Infow("Filtered out the detected vulnerability", - zap.String("vulnerability-id", string(*vuln.CVE)), - zap.String("status", string(status))) + v.logger.Info("Filtered out the detected vulnerability", + log.String("vulnerability-id", string(*vuln.CVE)), + log.String("status", string(status))) return status } for relationship, purls := range v.inspectProductRelationships(lo.FromPtr(product)) { if matchProduct(purls, pkgURL) { - v.logger.Warnw("Filtered out the detected vulnerability", - zap.String("vulnerability-id", string(*vuln.CVE)), - zap.String("status", string(status)), - zap.String("relationship", string(relationship))) + v.logger.Warn("Filtered out the detected vulnerability", + log.String("vulnerability-id", string(*vuln.CVE)), + log.String("status", string(status)), + log.String("relationship", string(relationship))) return status } } @@ -130,7 +129,7 @@ func purlsFromProductIdentificationHelpers(helpers []*csaf.ProductIdentification } p, err := purl.FromString(string(*helper.PURL)) if err != nil { - log.Logger.Errorw("Invalid PURL", zap.String("purl", string(*helper.PURL)), zap.Error(err)) + log.Error("Invalid PURL", log.String("purl", string(*helper.PURL)), log.Err(err)) return nil, false } return p, true diff --git a/pkg/vex/cyclonedx.go b/pkg/vex/cyclonedx.go index 685fefebf304..7bee16d32c81 100644 --- a/pkg/vex/cyclonedx.go +++ b/pkg/vex/cyclonedx.go @@ -3,7 +3,6 @@ package vex import ( cdx "github.com/CycloneDX/cyclonedx-go" "github.com/samber/lo" - "go.uber.org/zap" "github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/sbom/core" @@ -13,7 +12,7 @@ import ( type CycloneDX struct { sbom *core.BOM statements []Statement - logger *zap.SugaredLogger + logger *log.Logger } type Statement struct { @@ -41,7 +40,7 @@ func newCycloneDX(sbom *core.BOM, vex *cdx.BOM) *CycloneDX { return &CycloneDX{ sbom: sbom, statements: stmts, - logger: log.Logger.With(zap.String("VEX format", "CycloneDX")), + logger: log.WithPrefix("vex").With(log.String("format", "CycloneDX")), } } @@ -67,13 +66,13 @@ func (v *CycloneDX) affected(vuln types.DetectedVulnerability, stmt Statement) b // Affect must be BOM-Link at the moment link, err := cdx.ParseBOMLink(affect) if err != nil { - v.logger.Warnw("Unable to parse BOM-Link", zap.String("affect", affect)) + v.logger.Warn("Unable to parse BOM-Link", log.String("affect", affect)) continue } if v.sbom.SerialNumber != link.SerialNumber() || v.sbom.Version != link.Version() { - v.logger.Warnw("URN doesn't match with SBOM", - zap.String("serial number", link.SerialNumber()), - zap.Int("version", link.Version())) + v.logger.Warn("URN doesn't match with SBOM", + log.String("serial number", link.SerialNumber()), + log.Int("version", link.Version())) continue } if vuln.PkgIdentifier.Match(link.Reference()) && (stmt.Status == types.FindingStatusNotAffected || stmt.Status == types.FindingStatusFixed) { diff --git a/pkg/vulnerability/vulnerability.go b/pkg/vulnerability/vulnerability.go index 504d9293e873..56dfb7e1f1b3 100644 --- a/pkg/vulnerability/vulnerability.go +++ b/pkg/vulnerability/vulnerability.go @@ -72,7 +72,7 @@ func (c Client) FillInfo(vulns []types.DetectedVulnerability) { vulnID := vulns[i].VulnerabilityID vuln, err := c.dbc.GetVulnerability(vulnID) if err != nil { - log.Logger.Warnf("Error while getting vulnerability details: %s", err) + log.Warn("Error while getting vulnerability details", log.Err(err)) continue } From 183eaafb4e42593c8e56fe3388519af7776b790c Mon Sep 17 00:00:00 2001 From: Nikita Pivkin Date: Fri, 12 Apr 2024 22:52:50 +0300 Subject: [PATCH 57/57] docs: fix links to OPA docs (#6480) --- docs/docs/advanced/air-gap.md | 2 +- docs/docs/scanner/misconfiguration/custom/index.md | 3 +-- docs/docs/scanner/misconfiguration/custom/schema.md | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/docs/advanced/air-gap.md b/docs/docs/advanced/air-gap.md index 8793defbb5c5..3cb7eff65c9f 100644 --- a/docs/docs/advanced/air-gap.md +++ b/docs/docs/advanced/air-gap.md @@ -137,6 +137,6 @@ $ trivy conf --skip-policy-update /path/to/conf ``` [allowlist]: ../references/troubleshooting.md -[oras]: https://oras.land/cli/ +[oras]: https://oras.land/docs/installation [^1]: This is only required to scan `jar` files. More information about `Java index db` [here](../coverage/language/java.md) diff --git a/docs/docs/scanner/misconfiguration/custom/index.md b/docs/docs/scanner/misconfiguration/custom/index.md index b1c16219e8f8..8b08c5e41292 100644 --- a/docs/docs/scanner/misconfiguration/custom/index.md +++ b/docs/docs/scanner/misconfiguration/custom/index.md @@ -101,9 +101,8 @@ In this case, `user.*` will be evaluated. Any package prefixes such as `main` and `user` are allowed. ### Metadata -Metadata helps enrich Trivy's scan results with useful information. -The annotation format is described in the [OPA documentation](https://www.openpolicyagent.org/docs/latest/annotations/). +The check must contain a [Rego Metadata](https://www.openpolicyagent.org/docs/latest/policy-language/#metadata) section. Trivy uses standard rego metadata to define the new policy and general information about it. Trivy supports extra fields in the `custom` section as described below. diff --git a/docs/docs/scanner/misconfiguration/custom/schema.md b/docs/docs/scanner/misconfiguration/custom/schema.md index 99527ffd9920..ea5efeb7b3e2 100644 --- a/docs/docs/scanner/misconfiguration/custom/schema.md +++ b/docs/docs/scanner/misconfiguration/custom/schema.md @@ -88,4 +88,4 @@ To use such a policy with Trivy, use the `--config-policy` flag that points to t $ trivy --config-policy=/Users/user/my-custom-policies ``` -For more details on how to define schemas within Rego policies, please see the [OPA guide](https://www.openpolicyagent.org/docs/latest/schemas/#schema-annotations) that describes it in more detail. \ No newline at end of file +For more details on how to define schemas within Rego policies, please see the [OPA guide](https://www.openpolicyagent.org/docs/latest/policy-language/#schema-annotations) that describes it in more detail. \ No newline at end of file