diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 82d1f53e..d0720a3f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -9,7 +9,7 @@ jobs: - uses: actions/checkout@main - uses: actions/setup-go@v2 with: - go-version: '1.14' + go-version: '1.15' - name: Run tests run: go test -v -covermode=count -coverprofile=profile.cov ./... - name: Send coverage report to coveralls diff --git a/.gitignore b/.gitignore index 38da0f8c..042880dd 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,5 @@ .DS_Store scratch/* *.swp +profile.cov diff --git a/README.md b/README.md index 3dd0e732..f4a2200d 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,8 @@ tools-golang provides the following packages: * *tvsaver* - tag-value document saver * *rdfloader* - RDF document loader * *json* - JSON document parser and writer +* *spreadsheet* - Spreadsheet (XLS/XLSX) parser and writer +* *yaml* - YAML document parser and writer * *builder* - builds "empty" SPDX document (with hashes) for directory contents * *idsearcher* - searches for [SPDX short-form IDs](https://spdx.org/ids/) and builds SPDX document * *licensediff* - compares concluded licenses between files in two packages diff --git a/builder/build_test.go b/builder/build_test.go index be4fd42b..072d2e2b 100644 --- a/builder/build_test.go +++ b/builder/build_test.go @@ -84,7 +84,7 @@ func TestBuild2_1CreatesDocument(t *testing.T) { if pkg.PackageDownloadLocation != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", pkg.PackageDownloadLocation) } - if pkg.FilesAnalyzed != true { + if *pkg.FilesAnalyzed != true { t.Errorf("expected %v, got %v", true, pkg.FilesAnalyzed) } if pkg.PackageVerificationCode.Value != wantVerificationCode.Value { @@ -495,7 +495,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if pkg.PackageDownloadLocation != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", pkg.PackageDownloadLocation) } - if pkg.FilesAnalyzed != true { + if !*pkg.FilesAnalyzed { t.Errorf("expected %v, got %v", true, pkg.FilesAnalyzed) } if pkg.PackageVerificationCode.Value != wantVerificationCode.Value { diff --git a/builder/builder2v1/build_package.go b/builder/builder2v1/build_package.go index f39bb536..d28e97d6 100644 --- a/builder/builder2v1/build_package.go +++ b/builder/builder2v1/build_package.go @@ -57,13 +57,13 @@ func BuildPackageSection2_1(packageName string, dirRoot string, pathsIgnore []st return nil, err } + analyzed := true // now build the package section pkg := &spdx.Package2_1{ PackageName: packageName, PackageSPDXIdentifier: spdx.ElementID(fmt.Sprintf("Package-%s", packageName)), PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + FilesAnalyzed: &analyzed, PackageVerificationCode: code, PackageLicenseConcluded: "NOASSERTION", PackageLicenseInfoFromFiles: []string{}, diff --git a/builder/builder2v1/build_package_test.go b/builder/builder2v1/build_package_test.go index 14586371..b9ef7cb8 100644 --- a/builder/builder2v1/build_package_test.go +++ b/builder/builder2v1/build_package_test.go @@ -32,12 +32,9 @@ func TestBuilder2_1CanBuildPackageSection(t *testing.T) { if pkg.PackageDownloadLocation != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", pkg.PackageDownloadLocation) } - if pkg.FilesAnalyzed != true { + if *pkg.FilesAnalyzed != true { t.Errorf("expected %v, got %v", true, pkg.FilesAnalyzed) } - if pkg.IsFilesAnalyzedTagPresent != true { - t.Errorf("expected %v, got %v", true, pkg.IsFilesAnalyzedTagPresent) - } if pkg.PackageVerificationCode.Value != wantVerificationCode.Value { t.Errorf("expected %v, got %v", wantVerificationCode, pkg.PackageVerificationCode) } diff --git a/builder/builder2v2/build_package.go b/builder/builder2v2/build_package.go index 9c460da8..a5834e37 100644 --- a/builder/builder2v2/build_package.go +++ b/builder/builder2v2/build_package.go @@ -59,12 +59,12 @@ func BuildPackageSection2_2(packageName string, dirRoot string, pathsIgnore []st } // now build the package section + truthy := true pkg := &spdx.Package2_2{ PackageName: packageName, PackageSPDXIdentifier: spdx.ElementID(fmt.Sprintf("Package-%s", packageName)), PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + FilesAnalyzed: &truthy, PackageVerificationCode: code, PackageLicenseConcluded: "NOASSERTION", PackageLicenseInfoFromFiles: []string{}, diff --git a/builder/builder2v2/build_package_test.go b/builder/builder2v2/build_package_test.go index 3ab88d24..c2d82d70 100644 --- a/builder/builder2v2/build_package_test.go +++ b/builder/builder2v2/build_package_test.go @@ -32,12 +32,9 @@ func TestBuilder2_2CanBuildPackageSection(t *testing.T) { if pkg.PackageDownloadLocation != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", pkg.PackageDownloadLocation) } - if pkg.FilesAnalyzed != true { + if !*pkg.FilesAnalyzed { t.Errorf("expected %v, got %v", true, pkg.FilesAnalyzed) } - if pkg.IsFilesAnalyzedTagPresent != true { - t.Errorf("expected %v, got %v", true, pkg.IsFilesAnalyzedTagPresent) - } if pkg.PackageVerificationCode.Value != wantVerificationCode.Value { t.Errorf("expected %v, got %v", wantVerificationCode, pkg.PackageVerificationCode) } diff --git a/examples/1-load/example_load.go b/examples/1-load/example_load.go index 328d349c..1374660d 100644 --- a/examples/1-load/example_load.go +++ b/examples/1-load/example_load.go @@ -83,7 +83,7 @@ func main() { pkgID := pkg.PackageSPDXIdentifier // check whether the package had its files analyzed - if !pkg.FilesAnalyzed { + if pkg.FilesAnalyzed != nil && !*pkg.FilesAnalyzed { fmt.Printf("Package %s (%s) had FilesAnalyzed: false\n", string(pkgID), pkg.PackageName) continue } diff --git a/examples/5-report/example_report.go b/examples/5-report/example_report.go index 1197547a..da1337b5 100644 --- a/examples/5-report/example_report.go +++ b/examples/5-report/example_report.go @@ -75,7 +75,7 @@ func main() { pkgID := pkg.PackageSPDXIdentifier // check whether the package had its files analyzed - if !pkg.FilesAnalyzed { + if !*pkg.FilesAnalyzed { fmt.Printf("Package %s (%s) had FilesAnalyzed: false\n", string(pkgID), pkg.PackageName) return } diff --git a/go.mod b/go.mod index 213c1297..1a546af4 100644 --- a/go.mod +++ b/go.mod @@ -5,4 +5,6 @@ go 1.13 require ( github.com/google/go-cmp v0.5.7 github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb + github.com/xuri/excelize/v2 v2.6.0 + sigs.k8s.io/yaml v1.3.0 ) diff --git a/go.sum b/go.sum index 4355c263..83c095a6 100644 --- a/go.sum +++ b/go.sum @@ -1,6 +1,51 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/richardlehane/mscfb v1.0.4 h1:WULscsljNPConisD5hR0+OyZjwK46Pfyr6mPu5ZawpM= +github.com/richardlehane/mscfb v1.0.4/go.mod h1:YzVpcZg9czvAuhk9T+a3avCpcFPMUWm7gK3DypaEsUk= +github.com/richardlehane/msoleps v1.0.1 h1:RfrALnSNXzmXLbGct/P2b4xkFz4e8Gmj/0Vj9M9xC1o= +github.com/richardlehane/msoleps v1.0.1/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb h1:bLo8hvc8XFm9J47r690TUKBzcjSWdJDxmjXJZ+/f92U= github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb/go.mod h1:uKWaldnbMnjsSAXRurWqqrdyZen1R7kxl8TkmWk2OyM= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/xuri/efp v0.0.0-20220407160117-ad0f7a785be8 h1:3X7aE0iLKJ5j+tz58BpvIZkXNV7Yq4jC93Z/rbN2Fxk= +github.com/xuri/efp v0.0.0-20220407160117-ad0f7a785be8/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= +github.com/xuri/excelize/v2 v2.6.0 h1:m/aXAzSAqxgt74Nfd+sNzpzVKhTGl7+S9nbG4A57mF4= +github.com/xuri/excelize/v2 v2.6.0/go.mod h1:Q1YetlHesXEKwGFfeJn7PfEZz2IvHb6wdOeYjBxVcVs= +github.com/xuri/nfp v0.0.0-20220409054826-5e722a1d9e22 h1:OAmKAfT06//esDdpi/DZ8Qsdt4+M5+ltca05dA5bG2M= +github.com/xuri/nfp v0.0.0-20220409054826-5e722a1d9e22/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= +golang.org/x/crypto v0.0.0-20220408190544-5352b0902921 h1:iU7T1X1J6yxDr0rda54sWGkHgOp5XJrqm79gcNlC2VM= +golang.org/x/crypto v0.0.0-20220408190544-5352b0902921/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/image v0.0.0-20211028202545-6944b10bf410 h1:hTftEOvwiOq2+O8k2D5/Q7COC7k5Qcrgc2TFURJYnvQ= +golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220407224826-aac1ed45d8e3 h1:EN5+DfgmRMvRUrMGERW2gQl3Vc+Z7ZMnI/xdEpPSf0c= +golang.org/x/net v0.0.0-20220407224826-aac1ed45d8e3/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/json/json_test.go b/json/json_test.go index c78013cb..86f8d07a 100644 --- a/json/json_test.go +++ b/json/json_test.go @@ -55,13 +55,16 @@ func TestWrite2_2(t *testing.T) { } } +var truthy = true +var falsy = false + // want is handwritten translation of the official example JSON SPDX v2.2 document into a Go struct. // We expect that the result of parsing the official document should be this value. // We expect that the result of writing this struct should match the official example document. var want = spdx.Document2_2{ DataLicense: "CC0-1.0", SPDXVersion: "SPDX-2.2", - SPDXIdentifier: "SPDXRef-DOCUMENT", + SPDXIdentifier: "DOCUMENT", DocumentName: "SPDX-Tools-v2.0", DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", CreationInfo: &spdx.CreationInfo2_2{ @@ -77,7 +80,7 @@ var want = spdx.Document2_2{ DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", ExternalDocumentReferences: []spdx.ExternalDocumentRef2_2{ { - DocumentRefID: "DocumentRef-spdx-tool-1.2", + DocumentRefID: spdx.MakeDocElementID("spdx-tool-1.2", ""), URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", Checksum: spdx.Checksum{ Algorithm: spdx.SHA1, @@ -148,7 +151,7 @@ var want = spdx.Document2_2{ Packages: []*spdx.Package2_2{ { PackageName: "glibc", - PackageSPDXIdentifier: "SPDXRef-Package", + PackageSPDXIdentifier: "Package", PackageVersion: "2.11.1", PackageFileName: "glibc-2.11.1.tar.gz", PackageSupplier: &spdx.Supplier{ @@ -160,7 +163,7 @@ var want = spdx.Document2_2{ OriginatorType: "Organization", }, PackageDownloadLocation: "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz", - FilesAnalyzed: true, + FilesAnalyzed: &truthy, PackageVerificationCode: spdx.PackageVerificationCode{ Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", ExcludedFiles: []string{"./package.spdx"}, @@ -223,10 +226,10 @@ var want = spdx.Document2_2{ }, }, { - PackageSPDXIdentifier: "SPDXRef-fromDoap-1", + PackageSPDXIdentifier: "fromDoap-1", PackageCopyrightText: "NOASSERTION", PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: false, + FilesAnalyzed: &falsy, PackageHomePage: "http://commons.apache.org/proper/commons-lang/", PackageLicenseConcluded: "NOASSERTION", PackageLicenseDeclared: "NOASSERTION", @@ -234,7 +237,7 @@ var want = spdx.Document2_2{ }, { PackageName: "Jena", - PackageSPDXIdentifier: "SPDXRef-fromDoap-0", + PackageSPDXIdentifier: "fromDoap-0", PackageCopyrightText: "NOASSERTION", PackageDownloadLocation: "https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz", PackageExternalReferences: []*spdx.PackageExternalReference2_2{ @@ -244,14 +247,14 @@ var want = spdx.Document2_2{ Locator: "pkg:maven/org.apache.jena/apache-jena@3.12.0", }, }, - FilesAnalyzed: false, + FilesAnalyzed: &falsy, PackageHomePage: "http://www.openjena.org/", PackageLicenseConcluded: "NOASSERTION", PackageLicenseDeclared: "NOASSERTION", PackageVersion: "3.12.0", }, { - PackageSPDXIdentifier: "SPDXRef-Saxon", + PackageSPDXIdentifier: "Saxon", PackageChecksums: []spdx.Checksum{ { Algorithm: "SHA1", @@ -261,7 +264,7 @@ var want = spdx.Document2_2{ PackageCopyrightText: "Copyright Saxonica Ltd", PackageDescription: "The Saxon package is a collection of tools for processing XML documents.", PackageDownloadLocation: "https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download", - FilesAnalyzed: false, + FilesAnalyzed: &falsy, PackageHomePage: "http://saxon.sourceforge.net/", PackageLicenseComments: "Other versions available for a commercial license", PackageLicenseConcluded: "MPL-1.0", @@ -274,7 +277,7 @@ var want = spdx.Document2_2{ Files: []*spdx.File2_2{ { FileName: "./src/org/spdx/parser/DOAPProject.java", - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", FileTypes: []string{ "SOURCE", }, @@ -298,7 +301,7 @@ var want = spdx.Document2_2{ }, }, { - FileSPDXIdentifier: "SPDXRef-CommonsLangSrc", + FileSPDXIdentifier: "CommonsLangSrc", Checksums: []spdx.Checksum{ { Algorithm: "SHA1", @@ -315,7 +318,7 @@ var want = spdx.Document2_2{ FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", }, { - FileSPDXIdentifier: "SPDXRef-JenaLib", + FileSPDXIdentifier: "JenaLib", Checksums: []spdx.Checksum{ { Algorithm: "SHA1", @@ -332,7 +335,7 @@ var want = spdx.Document2_2{ LicenseInfoInFiles: []string{"LicenseRef-1"}, }, { - FileSPDXIdentifier: "SPDXRef-File", + FileSPDXIdentifier: "File", Annotations: []spdx.Annotation2_2{ { Annotator: spdx.Annotator{ @@ -367,27 +370,27 @@ var want = spdx.Document2_2{ }, Snippets: []spdx.Snippet2_2{ { - SnippetSPDXIdentifier: "SPDXRef-Snippet", - SnippetFromFileSPDXIdentifier: "SPDXRef-DoapSource", + SnippetSPDXIdentifier: "Snippet", + SnippetFromFileSPDXIdentifier: "DoapSource", Ranges: []spdx.SnippetRange{ { StartPointer: spdx.SnippetRangePointer{ Offset: 310, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, EndPointer: spdx.SnippetRangePointer{ Offset: 420, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, }, { StartPointer: spdx.SnippetRangePointer{ LineNumber: 5, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, EndPointer: spdx.SnippetRangePointer{ LineNumber: 23, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, }, }, diff --git a/licensediff/licensediff_test.go b/licensediff/licensediff_test.go index 2142efc0..d8afec5f 100644 --- a/licensediff/licensediff_test.go +++ b/licensediff/licensediff_test.go @@ -116,13 +116,13 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { FileCopyrightText: "NOASSERTION", } + truthy := true // create Packages p1 := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: &truthy, // fake the verification code for present purposes PackageVerificationCode: spdx.PackageVerificationCode{Value: "abc123abc123"}, PackageLicenseConcluded: "NOASSERTION", @@ -140,11 +140,10 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { }, } p2 := &spdx.Package2_1{ - PackageName: "p2", - PackageSPDXIdentifier: spdx.ElementID("p2"), - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + PackageName: "p2", + PackageSPDXIdentifier: spdx.ElementID("p2"), + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: &truthy, // fake the verification code for present purposes PackageVerificationCode: spdx.PackageVerificationCode{Value: "def456def456"}, PackageLicenseConcluded: "NOASSERTION", @@ -355,13 +354,13 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { FileCopyrightText: "NOASSERTION", } + truthy := true // create Packages p1 := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: &truthy, // fake the verification code for present purposes PackageVerificationCode: spdx.PackageVerificationCode{Value: "abc123abc123"}, PackageLicenseConcluded: "NOASSERTION", @@ -379,11 +378,10 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { }, } p2 := &spdx.Package2_1{ - PackageName: "p2", - PackageSPDXIdentifier: spdx.ElementID("p2"), - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + PackageName: "p2", + PackageSPDXIdentifier: spdx.ElementID("p2"), + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: &truthy, // fake the verification code for present purposes PackageVerificationCode: spdx.PackageVerificationCode{Value: "def456def456"}, PackageLicenseConcluded: "NOASSERTION", @@ -638,13 +636,13 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { FileCopyrightText: "NOASSERTION", } + truthy := true // create Packages p1 := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: &truthy, // fake the verification code for present purposes PackageVerificationCode: spdx.PackageVerificationCode{Value: "abc123abc123"}, PackageLicenseConcluded: "NOASSERTION", @@ -662,11 +660,10 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { }, } p2 := &spdx.Package2_2{ - PackageName: "p2", - PackageSPDXIdentifier: spdx.ElementID("p2"), - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + PackageName: "p2", + PackageSPDXIdentifier: spdx.ElementID("p2"), + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: &truthy, // fake the verification code for present purposes PackageVerificationCode: spdx.PackageVerificationCode{Value: "def456def456"}, PackageLicenseConcluded: "NOASSERTION", @@ -909,13 +906,13 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { FileCopyrightText: "NOASSERTION", } + truthy := true // create Packages p1 := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: &truthy, // fake the verification code for present purposes PackageVerificationCode: spdx.PackageVerificationCode{Value: "abc123abc123"}, PackageLicenseConcluded: "NOASSERTION", @@ -933,11 +930,10 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { }, } p2 := &spdx.Package2_2{ - PackageName: "p2", - PackageSPDXIdentifier: spdx.ElementID("p2"), - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + PackageName: "p2", + PackageSPDXIdentifier: spdx.ElementID("p2"), + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: &truthy, // fake the verification code for present purposes PackageVerificationCode: spdx.PackageVerificationCode{Value: "def456def456"}, PackageLicenseConcluded: "NOASSERTION", diff --git a/rdfloader/parser2v2/parse_package.go b/rdfloader/parser2v2/parse_package.go index 41ccab30..f6110500 100644 --- a/rdfloader/parser2v2/parse_package.go +++ b/rdfloader/parser2v2/parse_package.go @@ -11,7 +11,8 @@ import ( ) func (parser *rdfParser2_2) getPackageFromNode(packageNode *gordfParser.Node) (pkg *spdx.Package2_2, err error) { - pkg = &spdx.Package2_2{} // new package which will be returned + truthy := true + pkg = &spdx.Package2_2{FilesAnalyzed: &truthy} // new package which will be returned currState := parser.cache[packageNode.ID] if currState == nil { @@ -310,10 +311,14 @@ func setDocumentLocationFromURI(pkg *spdx.Package2_2, locationURI string) error // sets the FilesAnalyzed attribute to the given package // boolValue is a string of type "true" or "false" -func setFilesAnalyzed(pkg *spdx.Package2_2, boolValue string) (err error) { - pkg.IsFilesAnalyzedTagPresent = true - pkg.FilesAnalyzed, err = boolFromString(boolValue) - return err +func setFilesAnalyzed(pkg *spdx.Package2_2, boolValue string) error { + value, err := boolFromString(boolValue) + if err != nil { + return err + } + + pkg.FilesAnalyzed = &value + return nil } func (parser *rdfParser2_2) setPackageChecksum(pkg *spdx.Package2_2, node *gordfParser.Node) error { diff --git a/rdfloader/parser2v2/parse_package_test.go b/rdfloader/parser2v2/parse_package_test.go index c1bc7ed6..18d5c24a 100644 --- a/rdfloader/parser2v2/parse_package_test.go +++ b/rdfloader/parser2v2/parse_package_test.go @@ -758,10 +758,7 @@ func Test_setFilesAnalyzed(t *testing.T) { if err != nil { t.Fatalf("unexpected error: %v", err) } - if !pkg.IsFilesAnalyzedTagPresent { - t.Errorf("should've set IsFilesAnalyzedTagPresent, got: %t", pkg.IsFilesAnalyzedTagPresent) - } - if !pkg.FilesAnalyzed { - t.Errorf("expected: %t, got: %t", true, pkg.FilesAnalyzed) + if !*pkg.FilesAnalyzed { + t.Errorf("expected: %t, got: %t", true, *pkg.FilesAnalyzed) } } diff --git a/rdfloader/parser2v2/parse_spdx_document.go b/rdfloader/parser2v2/parse_spdx_document.go index 61593172..17f16696 100644 --- a/rdfloader/parser2v2/parse_spdx_document.go +++ b/rdfloader/parser2v2/parse_spdx_document.go @@ -94,7 +94,7 @@ func (parser *rdfParser2_2) getExternalDocumentRefFromNode(node *gordfParser.Nod switch triple.Predicate.ID { case SPDX_EXTERNAL_DOCUMENT_ID: // cardinality: exactly 1 - edr.DocumentRefID = triple.Object.ID + edr.DocumentRefID = spdx.MakeDocElementID(triple.Object.ID, "") case SPDX_SPDX_DOCUMENT: // cardinality: exactly 1 // assumption: "spdxDocument" property of an external document diff --git a/reporter/reporter.go b/reporter/reporter.go index acb47b5b..d6bd783e 100644 --- a/reporter/reporter.go +++ b/reporter/reporter.go @@ -19,8 +19,8 @@ import ( // io.Writer, and outputs to the io.Writer a tabulated count of // the number of Files for each unique LicenseConcluded in the set. func Generate2_1(pkg *spdx.Package2_1, w io.Writer) error { - if pkg.FilesAnalyzed == false { - return fmt.Errorf("Package FilesAnalyzed is false") + if pkg.FilesAnalyzed != nil && !*pkg.FilesAnalyzed { + return fmt.Errorf("package FilesAnalyzed is false") } totalFound, totalNotFound, foundCounts := countLicenses2_1(pkg) @@ -82,7 +82,7 @@ func countLicenses2_1(pkg *spdx.Package2_1) (int, int, map[string]int) { // io.Writer, and outputs to the io.Writer a tabulated count of // the number of Files for each unique LicenseConcluded in the set. func Generate2_2(pkg *spdx.Package2_2, w io.Writer) error { - if pkg.FilesAnalyzed == false { + if pkg.FilesAnalyzed != nil && !*pkg.FilesAnalyzed { return fmt.Errorf("Package FilesAnalyzed is false") } totalFound, totalNotFound, foundCounts := countLicenses2_2(pkg) diff --git a/reporter/reporter_test.go b/reporter/reporter_test.go index eceeb7b0..22213289 100644 --- a/reporter/reporter_test.go +++ b/reporter/reporter_test.go @@ -11,8 +11,9 @@ import ( // ===== 2.1 Reporter top-level function tests ===== func Test2_1ReporterCanMakeReportFromPackage(t *testing.T) { + truthy := true pkg := &spdx.Package2_1{ - FilesAnalyzed: true, + FilesAnalyzed: &truthy, Files: []*spdx.File2_1{ {FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, {FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, @@ -54,8 +55,9 @@ func Test2_1ReporterCanMakeReportFromPackage(t *testing.T) { } func Test2_1ReporterReturnsErrorIfPackageFilesNotAnalyzed(t *testing.T) { + falsy := false pkg := &spdx.Package2_1{ - FilesAnalyzed: false, + FilesAnalyzed: &falsy, } // render as buffer of bytes @@ -69,8 +71,9 @@ func Test2_1ReporterReturnsErrorIfPackageFilesNotAnalyzed(t *testing.T) { // ===== 2.1 Utility functions ===== func Test2_1CanGetCountsOfLicenses(t *testing.T) { + truthy := true pkg := &spdx.Package2_1{ - FilesAnalyzed: true, + FilesAnalyzed: &truthy, Files: []*spdx.File2_1{ {FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, {FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, @@ -135,8 +138,9 @@ func Test2_1NilPackageReturnsZeroCountsOfLicenses(t *testing.T) { // ===== 2.2 Reporter top-level function tests ===== func Test2_2ReporterCanMakeReportFromPackage(t *testing.T) { + truthy := true pkg := &spdx.Package2_2{ - FilesAnalyzed: true, + FilesAnalyzed: &truthy, Files: []*spdx.File2_2{ {FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, {FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, @@ -178,8 +182,9 @@ func Test2_2ReporterCanMakeReportFromPackage(t *testing.T) { } func Test2_2ReporterReturnsErrorIfPackageFilesNotAnalyzed(t *testing.T) { + falsy := false pkg := &spdx.Package2_2{ - FilesAnalyzed: false, + FilesAnalyzed: &falsy, } // render as buffer of bytes @@ -193,8 +198,9 @@ func Test2_2ReporterReturnsErrorIfPackageFilesNotAnalyzed(t *testing.T) { // ===== 2.2 Utility functions ===== func Test2_2CanGetCountsOfLicenses(t *testing.T) { + truthy := true pkg := &spdx.Package2_2{ - FilesAnalyzed: true, + FilesAnalyzed: &truthy, Files: []*spdx.File2_2{ {FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, {FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, diff --git a/spdx/annotation.go b/spdx/annotation.go index 560b6f00..37958efd 100644 --- a/spdx/annotation.go +++ b/spdx/annotation.go @@ -14,17 +14,22 @@ type Annotator struct { AnnotatorType string } -// UnmarshalJSON takes an annotator in the typical one-line format and parses it into an Annotator struct. -// This function is also used when unmarshalling YAML -func (a *Annotator) UnmarshalJSON(data []byte) error { - // annotator will simply be a string - annotatorStr := string(data) - annotatorStr = strings.Trim(annotatorStr, "\"") +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (a Annotator) Validate() error { + if a.Annotator == "" || a.AnnotatorType == "" { + return fmt.Errorf("invalid Annotator, missing fields. %+v", a) + } - annotatorFields := strings.SplitN(annotatorStr, ": ", 2) + return nil +} + +// FromString parses an Annotator string into an Annotator struct. +func (a *Annotator) FromString(value string) error { + annotatorFields := strings.SplitN(value, ": ", 2) if len(annotatorFields) != 2 { - return fmt.Errorf("failed to parse Annotator '%s'", annotatorStr) + return fmt.Errorf("failed to parse Annotator '%s'", value) } a.AnnotatorType = annotatorFields[0] @@ -33,14 +38,29 @@ func (a *Annotator) UnmarshalJSON(data []byte) error { return nil } +// String converts the receiver into a string. +func (a Annotator) String() string { + return fmt.Sprintf("%s: %s", a.AnnotatorType, a.Annotator) +} + +// UnmarshalJSON takes an annotator in the typical one-line format and parses it into an Annotator struct. +// This function is also used when unmarshalling YAML +func (a *Annotator) UnmarshalJSON(data []byte) error { + // annotator will simply be a string + annotatorStr := string(data) + annotatorStr = strings.Trim(annotatorStr, "\"") + + return a.FromString(annotatorStr) +} + // MarshalJSON converts the receiver into a slice of bytes representing an Annotator in string form. // This function is also used when marshalling to YAML func (a Annotator) MarshalJSON() ([]byte, error) { - if a.Annotator != "" { - return json.Marshal(fmt.Sprintf("%s: %s", a.AnnotatorType, a.Annotator)) + if err := a.Validate(); err != nil { + return nil, err } - return []byte{}, nil + return json.Marshal(a.String()) } // Annotation2_1 is an Annotation section of an SPDX Document for version 2.1 of the spec. diff --git a/spdx/checksum.go b/spdx/checksum.go index 3295969a..43f43ceb 100644 --- a/spdx/checksum.go +++ b/spdx/checksum.go @@ -2,6 +2,11 @@ package spdx +import ( + "fmt" + "strings" +) + // ChecksumAlgorithm represents the algorithm used to generate the file checksum in the Checksum struct. type ChecksumAlgorithm string @@ -18,9 +23,69 @@ const ( MD6 ChecksumAlgorithm = "MD6" ) +// Validate verifies that the ChecksumAlgorithm is valid (i.e. that it is one of the known checksum types) +func (c ChecksumAlgorithm) Validate() error { + validChecksumAlgorithms := []ChecksumAlgorithm{ + SHA224, + SHA1, + SHA256, + SHA384, + SHA512, + MD2, + MD4, + MD5, + MD6, + } + + var found bool + for _, alg := range validChecksumAlgorithms { + if c == alg { + found = true + break + } + } + + if !found { + return fmt.Errorf("checksum algorithm %s is not supported", c) + } + + return nil +} + // Checksum provides a unique identifier to match analysis information on each specific file in a package. // The Algorithm field describes the ChecksumAlgorithm used and the Value represents the file checksum type Checksum struct { Algorithm ChecksumAlgorithm `json:"algorithm"` Value string `json:"checksumValue"` } + +// FromString parses a Checksum string into a spdx.Checksum. +// These strings take the following form: +// SHA1: d6a770ba38583ed4bb4525bd96e50461655d2759 +func (c *Checksum) FromString(value string) error { + fields := strings.Split(value, ": ") + if len(fields) != 2 { + return fmt.Errorf("invalid checksum: %s", value) + } + + c.Algorithm = ChecksumAlgorithm(fields[0]) + c.Value = fields[1] + + return nil +} + +// String converts the Checksum to its string form. +// e.g. "SHA1: d6a770ba38583ed4bb4525bd96e50461655d2759" +func (c Checksum) String() string { + return fmt.Sprintf("%s: %s", c.Algorithm, c.Value) +} + +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (c Checksum) Validate() error { + if c.Algorithm == "" || c.Value == "" { + return fmt.Errorf("invalid checksum, missing field(s). %+v", c) + } + + return nil +} diff --git a/spdx/creation_info.go b/spdx/creation_info.go index c0b6f636..3f121173 100644 --- a/spdx/creation_info.go +++ b/spdx/creation_info.go @@ -16,11 +16,18 @@ type Creator struct { CreatorType string } -// UnmarshalJSON takes an annotator in the typical one-line format and parses it into a Creator struct. -// This function is also used when unmarshalling YAML -func (c *Creator) UnmarshalJSON(data []byte) error { - str := string(data) - str = strings.Trim(str, "\"") +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (c Creator) Validate() error { + if c.CreatorType == "" || c.Creator == "" { + return fmt.Errorf("invalid Creator, missing fields. %+v", c) + } + + return nil +} + +// FromString takes a Creator in the typical one-line format and parses it into a Creator struct. +func (c *Creator) FromString(str string) error { fields := strings.SplitN(str, ": ", 2) if len(fields) != 2 { @@ -33,14 +40,27 @@ func (c *Creator) UnmarshalJSON(data []byte) error { return nil } +// String converts the Creator into a string. +func (c Creator) String() string { + return fmt.Sprintf("%s: %s", c.CreatorType, c.Creator) +} + +// UnmarshalJSON takes a Creator in the typical one-line string format and parses it into a Creator struct. +func (c *Creator) UnmarshalJSON(data []byte) error { + str := string(data) + str = strings.Trim(str, "\"") + + return c.FromString(str) +} + // MarshalJSON converts the receiver into a slice of bytes representing a Creator in string form. // This function is also used with marshalling to YAML func (c Creator) MarshalJSON() ([]byte, error) { - if c.Creator != "" { - return json.Marshal(fmt.Sprintf("%s: %s", c.CreatorType, c.Creator)) + if err := c.Validate(); err != nil { + return nil, err } - return []byte{}, nil + return json.Marshal(c.String()) } // CreationInfo2_1 is a Document Creation Information section of an @@ -48,7 +68,7 @@ func (c Creator) MarshalJSON() ([]byte, error) { type CreationInfo2_1 struct { // 2.7: License List Version // Cardinality: optional, one - LicenseListVersion string `json:"licenseListVersion"` + LicenseListVersion string `json:"licenseListVersion,omitempty"` // 2.8: Creators: may have multiple keys for Person, Organization // and/or Tool @@ -61,7 +81,7 @@ type CreationInfo2_1 struct { // 2.10: Creator Comment // Cardinality: optional, one - CreatorComment string `json:"comment"` + CreatorComment string `json:"comment,omitempty"` } // CreationInfo2_2 is a Document Creation Information section of an diff --git a/spdx/document.go b/spdx/document.go index a3117cb7..a14104f0 100644 --- a/spdx/document.go +++ b/spdx/document.go @@ -3,13 +3,18 @@ // SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later package spdx +import ( + "fmt" + "strings" +) + // ExternalDocumentRef2_1 is a reference to an external SPDX document // as defined in section 2.6 for version 2.1 of the spec. type ExternalDocumentRef2_1 struct { // DocumentRefID is the ID string defined in the start of the // reference. It should _not_ contain the "DocumentRef-" part // of the mandatory ID string. - DocumentRefID string `json:"externalDocumentId"` + DocumentRefID DocElementID `json:"externalDocumentId"` // URI is the URI defined for the external document URI string `json:"spdxDocument"` @@ -24,7 +29,7 @@ type ExternalDocumentRef2_2 struct { // DocumentRefID is the ID string defined in the start of the // reference. It should _not_ contain the "DocumentRef-" part // of the mandatory ID string. - DocumentRefID string `json:"externalDocumentId"` + DocumentRefID DocElementID `json:"externalDocumentId"` // URI is the URI defined for the external document URI string `json:"spdxDocument"` @@ -33,6 +38,92 @@ type ExternalDocumentRef2_2 struct { Checksum Checksum `json:"checksum"` } +// Validate verifies that all the required fields are present. +// Returns true if the object is valid, returns false and an error if it is invalid. +func (e ExternalDocumentRef2_1) Validate() error { + if err := e.Checksum.Validate(); err != nil { + return fmt.Errorf("invalid Checksum in External Document Reference: %w", err) + } + + if e.DocumentRefID.Validate() != nil || e.URI == "" { + return fmt.Errorf("invalid DocElementID, missing fields. %+v", e) + } + + return nil +} + +// Validate verifies that all the required fields are present. +// Returns true if the object is valid, returns false and an error if it is invalid. +func (e ExternalDocumentRef2_2) Validate() error { + if err := e.Checksum.Validate(); err != nil { + return fmt.Errorf("invalid Checksum in External Document Reference: %w", err) + } + + if e.DocumentRefID.Validate() != nil || e.URI == "" { + return fmt.Errorf("invalid DocElementID, missing fields. %+v", e) + } + + return nil +} + +// String converts a ExternalDocumentRef2_1 object to a string. +// These strings take the form: " " +func (e ExternalDocumentRef2_1) String() string { + return fmt.Sprintf("%s %s %s", e.DocumentRefID, e.URI, e.Checksum) +} + +// String converts a ExternalDocumentRef2_2 object to a string. +// These strings take the form: " " +func (e ExternalDocumentRef2_2) String() string { + return fmt.Sprintf("%s %s %s", e.DocumentRefID, e.URI, e.Checksum) +} + +// FromString parses a string into a spdx.ExternalDocumentRef2_1. +// These strings take the following form: " " +func (e *ExternalDocumentRef2_1) FromString(value string) error { + fields := strings.SplitN(value, " ", 3) + if len(fields) != 3 { + return fmt.Errorf("invalid external document reference: %s", value) + } + + e.DocumentRefID = MakeDocElementID(fields[0], "") + e.URI = fields[1] + + // the checksum is special and needs further processing + var checksum Checksum + err := checksum.FromString(fields[2]) + if err != nil { + return err + } + + e.Checksum = checksum + + return nil +} + +// FromString parses a string into a spdx.ExternalDocumentRef2_2. +// These strings take the following form: " " +func (e *ExternalDocumentRef2_2) FromString(value string) error { + fields := strings.SplitN(value, " ", 3) + if len(fields) != 3 { + return fmt.Errorf("invalid external document reference: %s", value) + } + + e.DocumentRefID = MakeDocElementID(fields[0], "") + e.URI = fields[1] + + // the checksum is special and needs further processing + var checksum Checksum + err := checksum.FromString(fields[2]) + if err != nil { + return err + } + + e.Checksum = checksum + + return nil +} + // Document2_1 is an SPDX Document for version 2.1 of the spec. // See https://spdx.org/sites/cpstandard/files/pages/files/spdxversion2.1.pdf type Document2_1 struct { diff --git a/spdx/identifier.go b/spdx/identifier.go index 56f8ffc8..64cbf339 100644 --- a/spdx/identifier.go +++ b/spdx/identifier.go @@ -14,13 +14,63 @@ import ( // ElementIDs should NOT contain the mandatory 'SPDXRef-' portion. type ElementID string +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (e ElementID) Validate() error { + if e == "" { + return fmt.Errorf("invalid ElementID, must not be blank") + } + + return nil +} + +func (e ElementID) String() string { + return fmt.Sprintf("SPDXRef-%s", string(e)) +} + +// FromString parses an SPDX Identifier string into an ElementID. +// These strings take the form: "SPDXRef-some-identifier" +func (e *ElementID) FromString(idStr string) error { + idFields := strings.SplitN(idStr, "SPDXRef-", 2) + switch len(idFields) { + case 2: + // "SPDXRef-" prefix was present + *e = ElementID(idFields[1]) + case 1: + // prefix was not present + *e = ElementID(idFields[0]) + } + + return nil +} + +// UnmarshalJSON takes a SPDX Identifier string parses it into an ElementID. +// This function is also used when unmarshalling YAML +func (e *ElementID) UnmarshalJSON(data []byte) error { + // SPDX identifier will simply be a string + idStr := string(data) + idStr = strings.Trim(idStr, "\"") + + return e.FromString(idStr) +} + +// MarshalJSON converts the receiver into a slice of bytes representing an ElementID in string form. +// This function is also used when marshalling to YAML +func (e ElementID) MarshalJSON() ([]byte, error) { + if err := e.Validate(); err != nil { + return nil, err + } + + return json.Marshal(e.String()) +} + // DocElementID represents an SPDX element identifier that could be defined // in a different SPDX document, and therefore could have a "DocumentRef-" // portion, such as Relationships and Annotations. // ElementID is used for attributes in which a "DocumentRef-" portion cannot // appear, such as a Package or File definition (since it is necessarily // being defined in the present document). -// DocumentRefID will be the empty string for elements defined in the +// DocumentRefID will be an empty string for elements defined in the // present document. // DocElementIDs should NOT contain the mandatory 'DocumentRef-' or // 'SPDXRef-' portions. @@ -34,13 +84,22 @@ type DocElementID struct { SpecialID string } -// UnmarshalJSON takes a SPDX Identifier string parses it into a DocElementID struct. -// This function is also used when unmarshalling YAML -func (d *DocElementID) UnmarshalJSON(data []byte) error { - // SPDX identifier will simply be a string - idStr := string(data) - idStr = strings.Trim(idStr, "\"") +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (d DocElementID) Validate() error { + if d.DocumentRefID == "" && d.ElementRefID.Validate() != nil && d.SpecialID == "" { + return fmt.Errorf("invalid DocElementID, missing fields. %+v", d) + } + return nil +} + +// FromString parses an SPDX Identifier string into a DocElementID struct. +// These strings take one of the following forms: +// - "DocumentRef-other-document:SPDXRef-some-identifier" +// - "SPDXRef-some-identifier" +// - "NOASSERTION" or "NONE" +func (d *DocElementID) FromString(idStr string) error { // handle special cases if idStr == "NONE" || idStr == "NOASSERTION" { d.SpecialID = idStr @@ -66,37 +125,57 @@ func (d *DocElementID) UnmarshalJSON(data []byte) error { } // handle SPDXRef- - idFields = strings.SplitN(idStr, "SPDXRef-", 2) - if len(idFields) != 2 { - return fmt.Errorf("failed to parse SPDX Identifier '%s'", idStr) + err := d.ElementRefID.FromString(idStr) + if err != nil { + return err } - d.ElementRefID = ElementID(idFields[1]) - return nil } -// MarshalJSON converts the receiver into a slice of bytes representing a DocElementID in string form. -// This function is also used when marshalling to YAML -func (d DocElementID) MarshalJSON() ([]byte, error) { +// MarshalString converts the receiver into a string representing a DocElementID. +// This is used when writing a spreadsheet SPDX file, for example. +func (d DocElementID) String() string { if d.DocumentRefID != "" && d.ElementRefID != "" { - return json.Marshal(fmt.Sprintf("DocumentRef-%s:SPDXRef-%s", d.DocumentRefID, d.ElementRefID)) + return fmt.Sprintf("DocumentRef-%s:%s", d.DocumentRefID, d.ElementRefID) + } else if d.DocumentRefID != "" { + return fmt.Sprintf("DocumentRef-%s", d.DocumentRefID) } else if d.ElementRefID != "" { - return json.Marshal(fmt.Sprintf("SPDXRef-%s", d.ElementRefID)) + return d.ElementRefID.String() } else if d.SpecialID != "" { - return json.Marshal(d.SpecialID) + return d.SpecialID } - return []byte{}, fmt.Errorf("failed to marshal empty DocElementID") + return "" } -// TODO: add equivalents for LicenseRef- identifiers +// UnmarshalJSON takes a SPDX Identifier string parses it into a DocElementID struct. +// This function is also used when unmarshalling YAML +func (d *DocElementID) UnmarshalJSON(data []byte) error { + // SPDX identifier will simply be a string + idStr := string(data) + idStr = strings.Trim(idStr, "\"") -// MakeDocElementID takes strings (without prefixes) for the DocumentRef- -// and SPDXRef- identifiers, and returns a DocElementID. An empty string -// should be used for the DocumentRef- portion if it is referring to the -// present document. + return d.FromString(idStr) +} + +// MarshalJSON converts the receiver into a slice of bytes representing a DocElementID in string form. +// This function is also used when marshalling to YAML +func (d DocElementID) MarshalJSON() ([]byte, error) { + if err := d.Validate(); err != nil { + return nil, err + } + + return json.Marshal(d.String()) +} + +// MakeDocElementID takes strings for the DocumentRef- and SPDXRef- identifiers (these prefixes will be stripped if present), +// and returns a DocElementID. +// An empty string should be used for the DocumentRef- portion if it is referring to the present document. func MakeDocElementID(docRef string, eltRef string) DocElementID { + docRef = strings.Replace(docRef, "DocumentRef-", "", 1) + eltRef = strings.Replace(eltRef, "SPDXRef-", "", 1) + return DocElementID{ DocumentRefID: docRef, ElementRefID: ElementID(eltRef), @@ -110,24 +189,3 @@ func MakeDocElementID(docRef string, eltRef string) DocElementID { func MakeDocElementSpecial(specialID string) DocElementID { return DocElementID{SpecialID: specialID} } - -// RenderElementID takes an ElementID and returns the string equivalent, -// with the SPDXRef- prefix reinserted. -func RenderElementID(eID ElementID) string { - return "SPDXRef-" + string(eID) -} - -// RenderDocElementID takes a DocElementID and returns the string equivalent, -// with the SPDXRef- prefix (and, if applicable, the DocumentRef- prefix) -// reinserted. If a SpecialID is present, it will be rendered verbatim and -// DocumentRefID and ElementRefID will be ignored. -func RenderDocElementID(deID DocElementID) string { - if deID.SpecialID != "" { - return deID.SpecialID - } - prefix := "" - if deID.DocumentRefID != "" { - prefix = "DocumentRef-" + deID.DocumentRefID + ":" - } - return prefix + "SPDXRef-" + string(deID.ElementRefID) -} diff --git a/spdx/identifier_test.go b/spdx/identifier_test.go new file mode 100644 index 00000000..1c9d0059 --- /dev/null +++ b/spdx/identifier_test.go @@ -0,0 +1,38 @@ +package spdx + +import ( + "encoding/json" + "testing" +) + +func TestMakeDocElementID(t *testing.T) { + // without DocRef + docElementID := MakeDocElementID("", "Package") + if docElementID.String() != "SPDXRef-Package" { + t.Errorf("expected 'SPDXRef-Package', got %s", docElementID) + return + } + + // with DocRef + docElementID = MakeDocElementID("OtherDoc", "Package") + if docElementID.String() != "DocumentRef-OtherDoc:SPDXRef-Package" { + t.Errorf("expected 'DocumentRef-OtherDoc:SPDXRef-Package', got %s", docElementID) + return + } +} + +func TestDocElementID_UnmarshalJSON(t *testing.T) { + rawJSON := json.RawMessage("\"DocumentRef-some-doc\"") + docElementID := DocElementID{} + + err := json.Unmarshal(rawJSON, &docElementID) + if err != nil { + t.Errorf(err.Error()) + return + } + + if docElementID.DocumentRefID != "some-doc" { + t.Errorf("Bad!") + return + } +} \ No newline at end of file diff --git a/spdx/package.go b/spdx/package.go index e6c45223..885c5da3 100644 --- a/spdx/package.go +++ b/spdx/package.go @@ -15,22 +15,29 @@ type Supplier struct { SupplierType string } -// UnmarshalJSON takes a supplier in the typical one-line format and parses it into a Supplier struct. -// This function is also used when unmarshalling YAML -func (s *Supplier) UnmarshalJSON(data []byte) error { - // the value is just a string presented as a slice of bytes - supplierStr := string(data) - supplierStr = strings.Trim(supplierStr, "\"") +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (s Supplier) Validate() error { + // SupplierType is allowed to be empty if Supplier is "NOASSERTION" + if s.Supplier == "" || (s.SupplierType == "" && s.Supplier != "NOASSERTION") { + return fmt.Errorf("invalid Supplier, missing fields. %+v", s) + } - if supplierStr == "NOASSERTION" { - s.Supplier = supplierStr + return nil +} + +// FromString parses a string into a Supplier. +// These stings take the form: ": " +func (s *Supplier) FromString(value string) error { + if value == "NOASSERTION" { + s.Supplier = value return nil } - supplierFields := strings.SplitN(supplierStr, ": ", 2) + supplierFields := strings.SplitN(value, ": ", 2) if len(supplierFields) != 2 { - return fmt.Errorf("failed to parse Supplier '%s'", supplierStr) + return fmt.Errorf("failed to parse Supplier '%s'", value) } s.SupplierType = supplierFields[0] @@ -39,16 +46,33 @@ func (s *Supplier) UnmarshalJSON(data []byte) error { return nil } +// String converts the Supplier to a string in the form ": " +func (s Supplier) String() string { + if s.Supplier == "NOASSERTION" { + return s.Supplier + } + + return fmt.Sprintf("%s: %s", s.SupplierType, s.Supplier) +} + +// UnmarshalJSON takes a supplier in the typical one-line format and parses it into a Supplier struct. +// This function is also used when unmarshalling YAML +func (s *Supplier) UnmarshalJSON(data []byte) error { + // the value is just a string presented as a slice of bytes + supplierStr := string(data) + supplierStr = strings.Trim(supplierStr, "\"") + + return s.FromString(supplierStr) +} + // MarshalJSON converts the receiver into a slice of bytes representing a Supplier in string form. // This function is also used when marshalling to YAML func (s Supplier) MarshalJSON() ([]byte, error) { - if s.Supplier == "NOASSERTION" { - return json.Marshal(s.Supplier) - } else if s.SupplierType != "" && s.Supplier != "" { - return json.Marshal(fmt.Sprintf("%s: %s", s.SupplierType, s.Supplier)) + if err := s.Validate(); err != nil { + return nil, err } - return []byte{}, fmt.Errorf("failed to marshal invalid Supplier: %+v", s) + return json.Marshal(s.String()) } type Originator struct { @@ -58,40 +82,64 @@ type Originator struct { OriginatorType string } -// UnmarshalJSON takes an originator in the typical one-line format and parses it into an Originator struct. -// This function is also used when unmarshalling YAML -func (o *Originator) UnmarshalJSON(data []byte) error { - // the value is just a string presented as a slice of bytes - originatorStr := string(data) - originatorStr = strings.Trim(originatorStr, "\"") +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (o Originator) Validate() error { + // Originator is allowed to be empty if Originator is "NOASSERTION" + if o.Originator == "" || (o.OriginatorType == "" && o.Originator != "NOASSERTION") { + return fmt.Errorf("invalid Originator, missing fields. %+v", o) + } + + return nil +} - if originatorStr == "NOASSERTION" { - o.Originator = originatorStr +// FromString parses a string into a Originator. +// These stings take the form: ": " +func (o *Originator) FromString(value string) error { + if value == "NOASSERTION" { + o.Originator = value return nil } - originatorFields := strings.SplitN(originatorStr, ": ", 2) + fields := strings.SplitN(value, ": ", 2) - if len(originatorFields) != 2 { - return fmt.Errorf("failed to parse Originator '%s'", originatorStr) + if len(fields) != 2 { + return fmt.Errorf("failed to parse Originator '%s'", value) } - o.OriginatorType = originatorFields[0] - o.Originator = originatorFields[1] + o.OriginatorType = fields[0] + o.Originator = fields[1] return nil } +// String converts the Originator to a string in the form ": " +func (o Originator) String() string { + if o.Originator == "NOASSERTION" { + return o.Originator + } + + return fmt.Sprintf("%s: %s", o.OriginatorType, o.Originator) +} + +// UnmarshalJSON takes an originator in the typical one-line format and parses it into an Originator struct. +// This function is also used when unmarshalling YAML +func (o *Originator) UnmarshalJSON(data []byte) error { + // the value is just a string presented as a slice of bytes + originatorStr := string(data) + originatorStr = strings.Trim(originatorStr, "\"") + + return o.FromString(originatorStr) +} + // MarshalJSON converts the receiver into a slice of bytes representing an Originator in string form. // This function is also used when marshalling to YAML func (o Originator) MarshalJSON() ([]byte, error) { - if o.Originator == "NOASSERTION" { - return json.Marshal(o.Originator) - } else if o.Originator != "" { - return json.Marshal(fmt.Sprintf("%s: %s", o.OriginatorType, o.Originator)) + if err := o.Validate(); err != nil { + return nil, err } - return []byte{}, nil + return json.Marshal(o.String()) } type PackageVerificationCode struct { @@ -138,9 +186,7 @@ type Package2_1 struct { // 3.8: FilesAnalyzed // Cardinality: optional, one; default value is "true" if omitted - FilesAnalyzed bool `json:"filesAnalyzed,omitempty"` - // NOT PART OF SPEC: did FilesAnalyzed tag appear? - IsFilesAnalyzedTagPresent bool `json:"-"` + FilesAnalyzed *bool `json:"filesAnalyzed,omitempty"` // 3.9: Package Verification Code PackageVerificationCode PackageVerificationCode `json:"packageVerificationCode"` @@ -258,9 +304,7 @@ type Package2_2 struct { // 3.8: FilesAnalyzed // Cardinality: optional, one; default value is "true" if omitted - FilesAnalyzed bool `json:"filesAnalyzed,omitempty"` - // NOT PART OF SPEC: did FilesAnalyzed tag appear? - IsFilesAnalyzedTagPresent bool + FilesAnalyzed *bool `json:"filesAnalyzed,omitempty"` // 3.9: Package Verification Code PackageVerificationCode PackageVerificationCode `json:"packageVerificationCode"` @@ -328,6 +372,44 @@ type Package2_2 struct { Annotations []Annotation2_2 `json:"annotations"` } +// UnmarshalJSON unmarshalls a Package2_1 as usual, but performs some additional processing around the FilesAnalyzed field. +// Per the spec, this field can be omitted from a package, which should be interpreted as "true". +// This function is also used when unmarshalling YAML +func (p *Package2_1) UnmarshalJSON(data []byte) error { + // define a type alias to get default unmarshalling behavior + type package2_1_copy *Package2_1 + if err := json.Unmarshal(data, (package2_1_copy)(p)); err != nil { + return err + } + + // do additional processing + if p.FilesAnalyzed == nil { + truthy := true + p.FilesAnalyzed = &truthy + } + + return nil +} + +// UnmarshalJSON unmarshalls a Package2_2 as usual, but performs some additional processing around the FilesAnalyzed field. +// Per the spec, this field can be omitted from a package, which should be interpreted as "true". +// This function is also used when unmarshalling YAML +func (p *Package2_2) UnmarshalJSON(data []byte) error { + // define a type alias to get default unmarshalling behavior + type package2_2_copy *Package2_2 + if err := json.Unmarshal(data, (package2_2_copy)(p)); err != nil { + return err + } + + // do additional processing + if p.FilesAnalyzed == nil { + truthy := true + p.FilesAnalyzed = &truthy + } + + return nil +} + // PackageExternalReference2_2 is an External Reference to additional info // about a Package, as defined in section 3.21 in version 2.2 of the spec. type PackageExternalReference2_2 struct { diff --git a/spdx/snippet.go b/spdx/snippet.go index 6bffb8c8..8bd2a4d9 100644 --- a/spdx/snippet.go +++ b/spdx/snippet.go @@ -2,6 +2,13 @@ package spdx +import ( + "errors" + "fmt" + "strconv" + "strings" +) + type SnippetRangePointer struct { // 5.3: Snippet Byte Range: [start byte]:[end byte] // Cardinality: mandatory, one @@ -19,6 +26,50 @@ type SnippetRange struct { EndPointer SnippetRangePointer `json:"endPointer"` } +func (s SnippetRange) Validate() error { + if s.StartPointer.Offset == 0 && s.StartPointer.LineNumber == 0 && + s.EndPointer.Offset == 0 && s.EndPointer.LineNumber == 0 { + return errors.New("no range info present in SnippetRange") + } + + return nil +} + +func (s SnippetRange) String() string { + if s.EndPointer.Offset != 0 { + return fmt.Sprintf("%d:%d", s.StartPointer.Offset, s.EndPointer.Offset) + } + + return fmt.Sprintf("%d:%d", s.StartPointer.LineNumber, s.EndPointer.LineNumber) +} + +func (s *SnippetRange) FromString(value string, isByteRange bool) error { + strValues := strings.Split(value, ":") + if len(strValues) != 2 { + return fmt.Errorf("invalid SnippetRange: %s", value) + } + + values := make([]int, 2) + for ii, value := range strValues { + valueInt, err := strconv.ParseInt(value, 10, 64) + if err != nil { + return fmt.Errorf("couldn't parse integer from SnippetRange value '%s': %v", value, err.Error()) + } + + values[ii] = int(valueInt) + } + + if isByteRange { + s.StartPointer.Offset = values[0] + s.EndPointer.Offset = values[1] + } else { + s.StartPointer.LineNumber = values[0] + s.EndPointer.LineNumber = values[1] + } + + return nil +} + // Snippet2_1 is a Snippet section of an SPDX Document for version 2.1 of the spec. type Snippet2_1 struct { diff --git a/spreadsheet/common/annotations_columns.go b/spreadsheet/common/annotations_columns.go new file mode 100644 index 00000000..eed2b880 --- /dev/null +++ b/spreadsheet/common/annotations_columns.go @@ -0,0 +1,9 @@ +package common + +const ( + AnnotationsSPDXIdentifier = "SPDX Identifier being Annotated" + AnnotationsComment = "Annotation Comment" + AnnotationsDate = "Annotation Date" + AnnotationsAnnotator = "Annotator" + AnnotationsType = "Annotation Type" +) diff --git a/spreadsheet/common/document_info_columns.go b/spreadsheet/common/document_info_columns.go new file mode 100644 index 00000000..dcae061d --- /dev/null +++ b/spreadsheet/common/document_info_columns.go @@ -0,0 +1,15 @@ +package common + +const ( + DocumentInfoSPDXVersion = "SPDX Version" + DocumentInfoDataLicense = "Data License" + DocumentInfoSPDXIdentifier = "SPDX Identifier" + DocumentInfoLicenseListVersion = "License List Version" + DocumentInfoDocumentName = "Document Name" + DocumentInfoDocumentNamespace = "Document Namespace" + DocumentInfoExternalDocumentReferences = "External Document References" + DocumentInfoDocumentComment = "Document Comment" + DocumentInfoCreator = "Creator" + DocumentInfoCreated = "Created" + DocumentInfoCreatorComment = "Creator Comment" +) diff --git a/spreadsheet/common/external_refs_columns.go b/spreadsheet/common/external_refs_columns.go new file mode 100644 index 00000000..4759faa7 --- /dev/null +++ b/spreadsheet/common/external_refs_columns.go @@ -0,0 +1,9 @@ +package common + +const ( + ExternalRefPackageID = "Package ID" + ExternalRefCategory = "Category" + ExternalRefType = "Type" + ExternalRefLocator = "Locator" + ExternalRefComment = "Comment" +) diff --git a/spreadsheet/common/extracted_license_info_columns.go b/spreadsheet/common/extracted_license_info_columns.go new file mode 100644 index 00000000..52b1b5f8 --- /dev/null +++ b/spreadsheet/common/extracted_license_info_columns.go @@ -0,0 +1,9 @@ +package common + +const ( + LicenseInfoIdentifier = "Identifier" + LicenseInfoExtractedText = "Extracted Text" + LicenseInfoLicenseName = "License Name" + LicenseInfoCrossReferenceURLs = "Cross Reference URLs" + LicenseInfoComment = "Comment" +) diff --git a/spreadsheet/common/package_info_columns.go b/spreadsheet/common/package_info_columns.go new file mode 100644 index 00000000..bce1b444 --- /dev/null +++ b/spreadsheet/common/package_info_columns.go @@ -0,0 +1,26 @@ +package common + +const ( + PackageName = "Package Name" + PackageSPDXIdentifier = "SPDX Identifier" + PackageVersion = "Package Version" + PackageFileName = "Package FileName" + PackageSupplier = "Package Supplier" + PackageOriginator = "Package Originator" + PackageHomePage = "Home Page" + PackageDownloadLocation = "Package Download Location" + PackageChecksum = "Package Checksum" + PackageVerificationCode = "Package Verification Code" + PackageVerificationCodeExcludedFiles = "Verification Code Excluded Files" + PackageSourceInfo = "Source Info" + PackageLicenseDeclared = "License Declared" + PackageLicenseConcluded = "License Concluded" + PackageLicenseInfoFromFiles = "License Info From Files" + PackageLicenseComments = "License Comments" + PackageCopyrightText = "Package Copyright Text" + PackageSummary = "Summary" + PackageDescription = "Description" + PackageAttributionText = "Attribution Text" + PackageFilesAnalyzed = "Files Analyzed" + PackageComments = "Comments" +) diff --git a/spreadsheet/common/per_file_info_columns.go b/spreadsheet/common/per_file_info_columns.go new file mode 100644 index 00000000..d624cebe --- /dev/null +++ b/spreadsheet/common/per_file_info_columns.go @@ -0,0 +1,21 @@ +package common + +const ( + FileInfoFileName = "File Name" + FileInfoSPDXIdentifier = "SPDX Identifier" + FileInfoPackageIdentifier = "Package Identifier" + FileInfoFileTypes = "File Type(s)" + FileInfoFileChecksums = "File Checksum(s)" + FileInfoLicenseConcluded = "License Concluded" + FileInfoLicenseInfoInFile = "License Info in File" + FileInfoLicenseComments = "License Comments" + FileInfoFileCopyrightText = "File Copyright Text" + FileInfoNoticeText = "Notice Text" + FileInfoArtifactOfProject = "Artifact of Project" + FileInfoArtifactOfHomepage = "Artifact of Homepage" + FileInfoArtifactOfURL = "Artifact of URL" + FileInfoContributors = "Contributors" + FileInfoFileComment = "File Comment" + FileInfoFileDependencies = "File Dependencies" + FileInfoAttributionText = "Attribution Text" +) diff --git a/spreadsheet/common/position_to_axis.go b/spreadsheet/common/position_to_axis.go new file mode 100644 index 00000000..55bd8e42 --- /dev/null +++ b/spreadsheet/common/position_to_axis.go @@ -0,0 +1,11 @@ +package common + +import "fmt" + +// PositionToAxis takes a column string and a row integer to combines them into an "axis" +// to be used with the Excelize module. +// An "axis" is the word Excelize uses to describe a coordinate position within a spreadsheet, +// e.g. "A1", "B14", etc. +func PositionToAxis(column string, row int) string { + return fmt.Sprintf("%s%d", column, row) +} diff --git a/spreadsheet/common/relationships_columns.go b/spreadsheet/common/relationships_columns.go new file mode 100644 index 00000000..f98db9dd --- /dev/null +++ b/spreadsheet/common/relationships_columns.go @@ -0,0 +1,8 @@ +package common + +const ( + RelationshipsRefA = "SPDX Identifier A" + RelationshipsRelationship = "Relationship" + RelationshipsRefB = "SPDX Identifier B" + RelationshipsComment = "Relationship Comment" +) diff --git a/spreadsheet/common/sheet_names.go b/spreadsheet/common/sheet_names.go new file mode 100644 index 00000000..1f5c98cf --- /dev/null +++ b/spreadsheet/common/sheet_names.go @@ -0,0 +1,12 @@ +package common + +const ( + SheetNameDocumentInfo = "Document Info" + SheetNamePackageInfo = "Package Info" + SheetNameExtractedLicenseInfo = "Extracted License Info" + SheetNameFileInfo = "Per File Info" + SheetNameRelationships = "Relationships" + SheetNameAnnotations = "Annotations" + SheetNameSnippets = "Snippets" + SheetNameExternalRefs = "External Refs" +) diff --git a/spreadsheet/common/snippets_columns.go b/spreadsheet/common/snippets_columns.go new file mode 100644 index 00000000..df77a8fb --- /dev/null +++ b/spreadsheet/common/snippets_columns.go @@ -0,0 +1,14 @@ +package common + +const ( + SnippetsID = "ID" + SnippetsName = "Name" + SnippetsFromFileID = "From File ID" + SnippetsByteRange = "Byte Range" + SnippetsLineRange = "Line Range" + SnippetsLicenseConcluded = "License Concluded" + SnippetsLicenseInfoInSnippet = "License Info in Snippet" + SnippetsLicenseComments = "License Comments" + SnippetsCopyrightText = "Snippet Copyright Text" + SnippetsComment = "Comment" +) diff --git a/spreadsheet/parse/annotations.go b/spreadsheet/parse/annotations.go new file mode 100644 index 00000000..c81c240e --- /dev/null +++ b/spreadsheet/parse/annotations.go @@ -0,0 +1,88 @@ +package parse + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" +) + +func ProcessAnnotationsRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + + newAnnotation := spdx.Annotation2_2{} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.AnnotationsSPDXIdentifier: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.AnnotationsSPDXIdentifier, rowNum, err.Error()) + } + + newAnnotation.AnnotationSPDXIdentifier = id + case common.AnnotationsComment: + newAnnotation.AnnotationComment = value + case common.AnnotationsDate: + newAnnotation.AnnotationDate = value + case common.AnnotationsAnnotator: + annotator := spdx.Annotator{} + err := annotator.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.AnnotationsAnnotator, rowNum, err.Error()) + } + + newAnnotation.Annotator = annotator + case common.AnnotationsType: + newAnnotation.AnnotationType = value + } + } + + // TODO: validate? + + // an annotation can be at the Document level, File level, or Package level + if newAnnotation.AnnotationSPDXIdentifier.DocumentRefID == "" && newAnnotation.AnnotationSPDXIdentifier.ElementRefID != doc.SPDXIdentifier { + var found bool + for ii, pkg := range doc.Packages { + if newAnnotation.AnnotationSPDXIdentifier.ElementRefID == pkg.PackageSPDXIdentifier { + // package level + found = true + doc.Packages[ii].Annotations = append(doc.Packages[ii].Annotations, newAnnotation) + break + } + } + + if !found { + for ii, file := range doc.Files { + if newAnnotation.AnnotationSPDXIdentifier.ElementRefID == file.FileSPDXIdentifier { + // file level + found = true + doc.Files[ii].Annotations = append(doc.Files[ii].Annotations, newAnnotation) + break + } + } + } + + if !found { + return fmt.Errorf("annotation SPDX Identifier from row %d not found in document: %s", rowNum, newAnnotation.AnnotationSPDXIdentifier) + } + } else { + // document level + doc.Annotations = append(doc.Annotations, &newAnnotation) + } + } + + return nil +} diff --git a/spreadsheet/parse/document_info.go b/spreadsheet/parse/document_info.go new file mode 100644 index 00000000..646607a3 --- /dev/null +++ b/spreadsheet/parse/document_info.go @@ -0,0 +1,70 @@ +package parse + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" +) + +func ProcessDocumentInfoRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.DocumentInfoSPDXVersion: + doc.SPDXVersion = value + case common.DocumentInfoDataLicense: + doc.DataLicense = value + case common.DocumentInfoSPDXIdentifier: + var id spdx.DocElementID + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid SPDX Identifier in row %d: %v", rowNum, err.Error()) + } + + doc.SPDXIdentifier = id.ElementRefID + case common.DocumentInfoLicenseListVersion: + doc.CreationInfo.LicenseListVersion = value + case common.DocumentInfoDocumentName: + doc.DocumentName = value + case common.DocumentInfoDocumentNamespace: + doc.DocumentNamespace = value + case common.DocumentInfoDocumentComment: + doc.DocumentComment = value + case common.DocumentInfoExternalDocumentReferences: + externalDocRef := spdx.ExternalDocumentRef2_2{} + err := externalDocRef.FromString(value) + if err != nil { + return fmt.Errorf("invalid External Document Ref in row %d: %v", rowNum, err.Error()) + } + + doc.ExternalDocumentReferences = append(doc.ExternalDocumentReferences, externalDocRef) + case common.DocumentInfoCreated: + doc.CreationInfo.Created = value + case common.DocumentInfoCreatorComment: + doc.CreationInfo.CreatorComment = value + case common.DocumentInfoCreator: + creator := spdx.Creator{} + err := creator.FromString(value) + if err != nil { + return fmt.Errorf("invalid Creator in row %d: %v", rowNum, err.Error()) + } + + doc.CreationInfo.Creators = append(doc.CreationInfo.Creators, creator) + } + } + } + + return nil +} diff --git a/spreadsheet/parse/extracted_license_info.go b/spreadsheet/parse/extracted_license_info.go new file mode 100644 index 00000000..18be90e4 --- /dev/null +++ b/spreadsheet/parse/extracted_license_info.go @@ -0,0 +1,43 @@ +package parse + +import ( + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "strings" +) + +func ProcessExtractedLicenseInfoRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for _, row := range rows[1:] { + newLicense := spdx.OtherLicense2_2{} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.LicenseInfoIdentifier: + newLicense.LicenseIdentifier = value + case common.LicenseInfoExtractedText: + newLicense.ExtractedText = value + case common.LicenseInfoLicenseName: + newLicense.LicenseName = value + case common.LicenseInfoCrossReferenceURLs: + newLicense.LicenseCrossReferences = strings.Split(value, ", ") + case common.LicenseInfoComment: + newLicense.LicenseComment = value + } + } + + // TODO: validate? + doc.OtherLicenses = append(doc.OtherLicenses, &newLicense) + } + + return nil +} diff --git a/spreadsheet/parse/package_external_refs.go b/spreadsheet/parse/package_external_refs.go new file mode 100644 index 00000000..1cdb2385 --- /dev/null +++ b/spreadsheet/parse/package_external_refs.go @@ -0,0 +1,68 @@ +package parse + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" +) + +func ProcessPackageExternalRefsRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + // each external ref is related to a package, make sure we figure out which package + var packageSPDXID spdx.ElementID + newExternalRef := spdx.PackageExternalReference2_2{} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.ExternalRefPackageID: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid Package SPDX Identifier for External Ref in row %d: %v", rowNum, err.Error()) + } + + packageSPDXID = id.ElementRefID + case common.ExternalRefCategory: + newExternalRef.Category = value + case common.ExternalRefType: + newExternalRef.RefType = value + case common.ExternalRefLocator: + newExternalRef.Locator = value + case common.ExternalRefComment: + newExternalRef.ExternalRefComment = value + } + } + + if packageSPDXID == "" { + return fmt.Errorf("no SPDX ID given for package external ref in row %d", rowNum) + } + + // find the package this external ref is related to + var packageFound bool + for ii, pkg := range doc.Packages { + if pkg.PackageSPDXIdentifier == packageSPDXID { + packageFound = true + doc.Packages[ii].PackageExternalReferences = append(doc.Packages[ii].PackageExternalReferences, &newExternalRef) + break + } + } + + if !packageFound { + return fmt.Errorf("package external ref assigned to non-existent package %s in row %d", packageSPDXID, rowNum) + } + } + + return nil +} diff --git a/spreadsheet/parse/package_info.go b/spreadsheet/parse/package_info.go new file mode 100644 index 00000000..54188ce3 --- /dev/null +++ b/spreadsheet/parse/package_info.go @@ -0,0 +1,128 @@ +package parse + +import ( + "encoding/csv" + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "strconv" + "strings" +) + +func ProcessPackageInfoRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + var truthy = true + newPackage := spdx.Package2_2{FilesAnalyzed: &truthy} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.PackageName: + newPackage.PackageName = value + case common.PackageSPDXIdentifier: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid Package SPDX Identifier in row %d: %v", rowNum, err.Error()) + } + + newPackage.PackageSPDXIdentifier = id.ElementRefID + case common.PackageVersion: + newPackage.PackageVersion = value + case common.PackageFileName: + newPackage.PackageFileName = value + case common.PackageSupplier: + supplier := spdx.Supplier{} + err := supplier.FromString(value) + if err != nil { + return fmt.Errorf("invalid Package Supplier in row %d: %v", rowNum, err.Error()) + } + + newPackage.PackageSupplier = &supplier + case common.PackageOriginator: + originator := spdx.Originator{} + err := originator.FromString(value) + if err != nil { + return fmt.Errorf("invalid Package Originator in row %d: %v", rowNum, err.Error()) + } + + newPackage.PackageOriginator = &originator + case common.PackageHomePage: + newPackage.PackageHomePage = value + case common.PackageDownloadLocation: + newPackage.PackageDownloadLocation = value + case common.PackageChecksum: + checksums := strings.Split(value, "\n") + for _, checksumStr := range checksums { + checksum := spdx.Checksum{} + err := checksum.FromString(checksumStr) + if err != nil { + return fmt.Errorf("invalid Package Checksum in row %d: %v", rowNum, err.Error()) + } + + newPackage.PackageChecksums = append(newPackage.PackageChecksums, checksum) + } + case common.PackageVerificationCode: + newPackage.PackageVerificationCode.Value = value + case common.PackageVerificationCodeExcludedFiles: + excludedFiles := strings.Split(value, "\n") + newPackage.PackageVerificationCode.ExcludedFiles = append(newPackage.PackageVerificationCode.ExcludedFiles, excludedFiles...) + case common.PackageSourceInfo: + newPackage.PackageSourceInfo = value + case common.PackageLicenseDeclared: + newPackage.PackageLicenseDeclared = value + case common.PackageLicenseConcluded: + newPackage.PackageLicenseConcluded = value + case common.PackageLicenseInfoFromFiles: + files := strings.Split(value, ",") + newPackage.PackageLicenseInfoFromFiles = append(newPackage.PackageLicenseInfoFromFiles, files...) + case common.PackageLicenseComments: + newPackage.PackageLicenseComments = value + case common.PackageCopyrightText: + newPackage.PackageCopyrightText = value + case common.PackageSummary: + newPackage.PackageSummary = value + case common.PackageDescription: + newPackage.PackageDescription = value + case common.PackageAttributionText: + attributionTexts, err := csv.NewReader(strings.NewReader(value)).Read() + if err != nil { + return fmt.Errorf("invalid Package Attribution Text in row %d: %s", rowNum, err.Error()) + } + newPackage.PackageAttributionTexts = attributionTexts + case common.PackageFilesAnalyzed: + filesAnalyzed, err := strconv.ParseBool(value) + if err != nil { + // sometimes the excelize library gives funny values for booleans + if value == "0x160da28" { + filesAnalyzed = false + } else if value == "0x1512300" { + filesAnalyzed = true + } else { + return fmt.Errorf("invalid boolean '%s' for Files Analyzed in row %d (should be 'true' or 'false')", value, rowNum) + } + } + + newPackage.FilesAnalyzed = &filesAnalyzed + case common.PackageComments: + newPackage.PackageComment = value + } + } + + // TODO: validate? + doc.Packages = append(doc.Packages, &newPackage) + } + + return nil +} diff --git a/spreadsheet/parse/per_file_info.go b/spreadsheet/parse/per_file_info.go new file mode 100644 index 00000000..a8b9161c --- /dev/null +++ b/spreadsheet/parse/per_file_info.go @@ -0,0 +1,110 @@ +package parse + +import ( + "encoding/csv" + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "strings" +) + +func ProcessPerFileInfoRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + newFile := spdx.File2_2{} + var associatedPackageSPDXID spdx.ElementID + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.FileInfoFileName: + newFile.FileName = value + case common.FileInfoSPDXIdentifier: + var id spdx.DocElementID + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid SPDX Identifier in row %d: %v", rowNum, err.Error()) + } + + newFile.FileSPDXIdentifier = id.ElementRefID + case common.FileInfoPackageIdentifier: + // in spreadsheet formats, file<->package relationships are dictated by this column. + // if there is no value in this column, the file is not associated with a particular package + var id spdx.DocElementID + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid Package SPDX Identifier in row %d: %v", rowNum, err.Error()) + } + + associatedPackageSPDXID = id.ElementRefID + case common.FileInfoFileTypes: + newFile.FileTypes = strings.Split(value, ", ") + case common.FileInfoFileChecksums: + checksums := strings.Split(value, "\n") + for _, checksumStr := range checksums { + checksum := spdx.Checksum{} + err := checksum.FromString(checksumStr) + if err != nil { + return fmt.Errorf("invalid File Checksum in row %d: %v", rowNum, err.Error()) + } + + newFile.Checksums = append(newFile.Checksums, checksum) + } + case common.FileInfoLicenseConcluded: + newFile.LicenseConcluded = value + case common.FileInfoLicenseInfoInFile: + newFile.LicenseInfoInFiles = strings.Split(value, ", ") + case common.FileInfoLicenseComments: + newFile.LicenseComments = value + case common.FileInfoFileCopyrightText: + newFile.FileCopyrightText = value + case common.FileInfoNoticeText: + newFile.FileNotice = value + case common.FileInfoArtifactOfProject: + // ignored + case common.FileInfoArtifactOfHomepage: + // ignored + case common.FileInfoArtifactOfURL: + // ignored + case common.FileInfoContributors: + contributors, err := csv.NewReader(strings.NewReader(value)).Read() + if err != nil { + return fmt.Errorf("invalid File Contributors in row %d: %s", rowNum, err.Error()) + } + newFile.FileContributors = contributors + case common.FileInfoFileComment: + newFile.FileComment = value + case common.FileInfoFileDependencies: + // ignored + case common.FileInfoAttributionText: + newFile.FileAttributionTexts = strings.Split(value, ", ") + } + } + + // TODO: validate? + + // add this file to the associated package, if it is associated with a package + if associatedPackageSPDXID != "" { + for ii, pkg := range doc.Packages { + if pkg.PackageSPDXIdentifier == associatedPackageSPDXID { + doc.Packages[ii].Files = append(doc.Packages[ii].Files, &newFile) + break + } + } + } else { + doc.Files = append(doc.Files, &newFile) + } + } + + return nil +} diff --git a/spreadsheet/parse/relationships.go b/spreadsheet/parse/relationships.go new file mode 100644 index 00000000..7a2d3c02 --- /dev/null +++ b/spreadsheet/parse/relationships.go @@ -0,0 +1,55 @@ +package parse + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" +) + +func ProcessRelationshipsRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + newRelationship := spdx.Relationship2_2{} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.RelationshipsRefA: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.RelationshipsRefA, rowNum, err.Error()) + } + + newRelationship.RefA = id + case common.RelationshipsRelationship: + newRelationship.Relationship = value + case common.RelationshipsRefB: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.RelationshipsRefB, rowNum, err.Error()) + } + + newRelationship.RefB = id + case common.RelationshipsComment: + newRelationship.RelationshipComment = value + } + } + + // TODO: validate? + doc.Relationships = append(doc.Relationships, &newRelationship) + } + + return nil +} diff --git a/spreadsheet/parse/snippets.go b/spreadsheet/parse/snippets.go new file mode 100644 index 00000000..5129a887 --- /dev/null +++ b/spreadsheet/parse/snippets.go @@ -0,0 +1,80 @@ +package parse + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "strings" +) + +func ProcessSnippetsRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + newSnippet := spdx.Snippet2_2{} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.SnippetsID: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.SnippetsID, rowNum, err.Error()) + } + + newSnippet.SnippetSPDXIdentifier = id.ElementRefID + case common.SnippetsName: + newSnippet.SnippetName = value + case common.SnippetsFromFileID: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.SnippetsFromFileID, rowNum, err.Error()) + } + + newSnippet.SnippetFromFileSPDXIdentifier = id.ElementRefID + case common.SnippetsByteRange: + snippetRange := spdx.SnippetRange{} + err := snippetRange.FromString(value, true) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.SnippetsByteRange, rowNum, err.Error()) + } + + newSnippet.Ranges = append(newSnippet.Ranges, snippetRange) + case common.SnippetsLineRange: + snippetRange := spdx.SnippetRange{} + err := snippetRange.FromString(value, false) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.SnippetsLineRange, rowNum, err.Error()) + } + + newSnippet.Ranges = append(newSnippet.Ranges, snippetRange) + case common.SnippetsLicenseConcluded: + newSnippet.SnippetLicenseConcluded = value + case common.SnippetsLicenseInfoInSnippet: + newSnippet.LicenseInfoInSnippet = strings.Split(value, ", ") + case common.SnippetsLicenseComments: + newSnippet.SnippetLicenseComments = value + case common.SnippetsCopyrightText: + newSnippet.SnippetCopyrightText = value + case common.SnippetsComment: + newSnippet.SnippetComment = value + } + } + + // TODO: validate? + doc.Snippets = append(doc.Snippets, newSnippet) + } + + return nil +} diff --git a/spreadsheet/parser.go b/spreadsheet/parser.go new file mode 100644 index 00000000..55fd4c56 --- /dev/null +++ b/spreadsheet/parser.go @@ -0,0 +1,68 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_xls + +import ( + "errors" + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/xuri/excelize/v2" + "io" +) + +// Load2_2 takes in an io.Reader and returns an SPDX document. +func Load2_2(content io.Reader) (*spdx.Document2_2, error) { + workbook, err := excelize.OpenReader(content) + if err != nil { + return nil, err + } + + doc, err := parseWorkbook(workbook) + if err != nil { + return nil, err + } + + return doc, nil +} + +func parseWorkbook(workbook *excelize.File) (*spdx.Document2_2, error) { + doc := spdx.Document2_2{ + // ensure this pointer is not nil + CreationInfo: &spdx.CreationInfo2_2{}, + } + + for _, sheetHandlingInfo := range sheetHandlers { + rows, err := workbook.GetRows(sheetHandlingInfo.SheetName) + if err != nil { + // if the sheet doesn't exist and is required, that's a problem + if errors.As(err, &excelize.ErrSheetNotExist{}) { + if sheetHandlingInfo.SheetIsRequired { + return nil, fmt.Errorf("sheet '%s' is required but is not present", sheetHandlingInfo.SheetName) + } else { + // if it is not required, skip it + continue + } + } else { + // some other error happened + return nil, err + } + } + + // the first row is column headers, and the next row would contain actual data. + // if there are less than 2 rows present, there is no actual data in the sheet. + if len(rows) < 2 { + if sheetHandlingInfo.SheetIsRequired { + return nil, fmt.Errorf("sheet '%s' is required but contains no data", sheetHandlingInfo.SheetName) + } + + continue + } + + err = sheetHandlingInfo.ParserFunc(rows, &doc) + if err != nil { + return nil, fmt.Errorf("failed to parse sheet '%s': %w", sheetHandlingInfo.SheetName, err) + } + } + + return &doc, nil +} diff --git a/spreadsheet/sheet_handling.go b/spreadsheet/sheet_handling.go new file mode 100644 index 00000000..dbff80d3 --- /dev/null +++ b/spreadsheet/sheet_handling.go @@ -0,0 +1,100 @@ +package spdx_xls + +import ( + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/spdx/tools-golang/spreadsheet/parse" + "github.com/spdx/tools-golang/spreadsheet/write" + "github.com/xuri/excelize/v2" +) + +// sheetParserFunc is a function that takes in the data from a sheet as a slice of rows and iterates through them to +// fill in information in the given spdx.Document2_2. +// Returns an error if any occurred. +type sheetParserFunc func(rows [][]string, doc *spdx.Document2_2) error + +// sheetWriterFunc is a function that takes in a spdx.Document2_2 and a spreadsheet as a *excelize.File and iterates +// through particular section of the Document spdx.Document2_2 in order to write out data to the spreadsheet. +// Returns an error if any occurred. +type sheetWriterFunc func(doc *spdx.Document2_2, spreadsheet *excelize.File) error + +// sheetHandlingInformation defines info that is needed for parsing individual sheets in a workbook. +type sheetHandlingInformation struct { + // SheetName is the name of the sheet + SheetName string + + // HeadersByColumn is a map of header names to which column the header should go in. + // This is used only when writing/exporting a spreadsheet. + // During parsing/imports, the header positions are parsed dynamically. + HeadersByColumn map[string]string + + // ParserFunc is the function that should be used to parse a particular sheet + ParserFunc sheetParserFunc + + // WriterFunc is the function that should be used to write a particular sheet + WriterFunc sheetWriterFunc + + // SheetIsRequired denotes whether the sheet is required to be present in the workbook, or if it is optional (false) + SheetIsRequired bool +} + +// sheetHandlers contains handling information for each sheet in the workbook. +// The order of this slice determines the order in which the sheets are processed. +var sheetHandlers = []sheetHandlingInformation{ + { + SheetName: common.SheetNameDocumentInfo, + HeadersByColumn: write.DocumentInfoHeadersByColumn, + ParserFunc: parse.ProcessDocumentInfoRows, + WriterFunc: write.WriteDocumentInfoRows, + SheetIsRequired: true, + }, + { + SheetName: common.SheetNamePackageInfo, + HeadersByColumn: write.PackageInfoHeadersByColumn, + ParserFunc: parse.ProcessPackageInfoRows, + WriterFunc: write.WritePackageInfoRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameExternalRefs, + HeadersByColumn: write.ExternalRefsHeadersByColumn, + ParserFunc: parse.ProcessPackageExternalRefsRows, + WriterFunc: write.WriteExternalRefsRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameExtractedLicenseInfo, + HeadersByColumn: write.ExtractedLicenseInfoHeadersByColumn, + ParserFunc: parse.ProcessExtractedLicenseInfoRows, + WriterFunc: write.WriteExtractedLicenseInfoRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameFileInfo, + HeadersByColumn: write.FileInfoHeadersByColumn, + ParserFunc: parse.ProcessPerFileInfoRows, + WriterFunc: write.WriteFileInfoRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameRelationships, + HeadersByColumn: write.RelationshipsHeadersByColumn, + ParserFunc: parse.ProcessRelationshipsRows, + WriterFunc: write.WriteRelationshipsRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameAnnotations, + HeadersByColumn: write.AnnotationsHeadersByColumn, + ParserFunc: parse.ProcessAnnotationsRows, + WriterFunc: write.WriteAnnotationsRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameSnippets, + HeadersByColumn: write.SnippetsHeadersByColumn, + ParserFunc: parse.ProcessSnippetsRows, + WriterFunc: write.WriteSnippetsRows, + SheetIsRequired: false, + }, +} diff --git a/spreadsheet/spreadsheet_test.go b/spreadsheet/spreadsheet_test.go new file mode 100644 index 00000000..b656a25e --- /dev/null +++ b/spreadsheet/spreadsheet_test.go @@ -0,0 +1,454 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_xls + +import ( + "bytes" + "fmt" + "github.com/google/go-cmp/cmp" + "os" + "testing" + + "github.com/spdx/tools-golang/spdx" +) + +func TestLoad2_2(t *testing.T) { + file, err := os.Open("../examples/sample-docs/xls/SPDXSpreadsheetExample-v2.2.xlsx") + if err != nil { + panic(fmt.Errorf("error opening File: %s", err)) + } + + got, err := Load2_2(file) + if err != nil { + t.Errorf("xls.parser.Load2_2() error = %v", err) + return + } + + // get a copy of the handwritten struct so we don't mutate it on accident + handwrittenExample := want + + if cmp.Equal(handwrittenExample, got) { + t.Errorf("Got incorrect struct after parsing XLSX example") + return + } +} + +func TestWrite2_2(t *testing.T) { + w := &bytes.Buffer{} + // get a copy of the handwritten struct so we don't mutate it on accident + handwrittenExample := want + if err := Save2_2(&handwrittenExample, w); err != nil { + t.Errorf("Save2_2() error = %v", err.Error()) + return + } + + // we should be able to parse what the writer wrote, and it should be identical to the original handwritten struct + parsedDoc, err := Load2_2(bytes.NewReader(w.Bytes())) + if err != nil { + t.Errorf("failed to parse written document: %v", err.Error()) + return + } + + if cmp.Equal(handwrittenExample, parsedDoc) { + t.Errorf("Got incorrect struct after writing and re-parsing XLSX example") + return + } +} + +var truthy = true +var falsy = false + +// want is handwritten translation of the official example xls SPDX v2.2 document into a Go struct. +// We expect that the result of parsing the official document should be this value. +// We expect that the result of writing this struct should match the official example document. +var want = spdx.Document2_2{ + DataLicense: "CC0-1.0", + SPDXVersion: "SPDX-2.2", + SPDXIdentifier: "DOCUMENT", + DocumentName: "SPDX-Tools-v2.0", + DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", + CreationInfo: &spdx.CreationInfo2_2{ + LicenseListVersion: "3.9", + Creators: []spdx.Creator{ + {CreatorType: "Tool", Creator: "LicenseFind-1.0"}, + {CreatorType: "Organization", Creator: "ExampleCodeInspect ()"}, + {CreatorType: "Person", Creator: "Jane Doe ()"}, + }, + Created: "1/29/10 18:30", + CreatorComment: "This package has been shipped in source and binary form.\nThe binaries were created with gcc 4.5.1 and expect to link to\ncompatible system run time libraries.", + }, + DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", + ExternalDocumentReferences: []spdx.ExternalDocumentRef2_2{ + { + DocumentRefID: spdx.MakeDocElementID("spdx-tool-1.2", ""), + URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", + Checksum: spdx.Checksum{ + Algorithm: spdx.SHA1, + Value: "d6a770ba38583ed4bb4525bd96e50461655d2759", + }, + }, + }, + OtherLicenses: []*spdx.OtherLicense2_2{ + { + LicenseIdentifier: "LicenseRef-1", + ExtractedText: "/*\n * (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions\n * are met:\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright\n * notice, this list of conditions and the following disclaimer in the\n * documentation and/or other materials provided with the distribution.\n * 3. The name of the author may not be used to endorse or promote products\n * derived from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n*/", + }, + { + LicenseIdentifier: "LicenseRef-2", + ExtractedText: "This package includes the GRDDL parser developed by Hewlett Packard under the following license:\n� Copyright 2007 Hewlett-Packard Development Company, LP\n\nRedistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: \n\nRedistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. \nRedistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. \nThe name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. \nTHIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + }, + { + LicenseIdentifier: "LicenseRef-4", + ExtractedText: "/*\n * (c) Copyright 2009 University of Bristol\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions\n * are met:\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright\n * notice, this list of conditions and the following disclaimer in the\n * documentation and/or other materials provided with the distribution.\n * 3. The name of the author may not be used to endorse or promote products\n * derived from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n*/", + }, + { + LicenseIdentifier: "LicenseRef-Beerware-4.2", + ExtractedText: "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp", + LicenseComment: "The beerware license has a couple of other standard variants.", + LicenseName: "Beer-Ware License (Version 42)", + LicenseCrossReferences: []string{"http://people.freebsd.org/~phk/"}, + }, + { + LicenseIdentifier: "LicenseRef-3", + ExtractedText: "The CyberNeko Software License, Version 1.0\n\n \n(C) Copyright 2002-2005, Andy Clark. All rights reserved.\n \nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions\nare met:\n\n1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer. \n\n2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in\n the documentation and/or other materials provided with the\n distribution.\n\n3. The end-user documentation included with the redistribution,\n if any, must include the following acknowledgment: \n \"This product includes software developed by Andy Clark.\"\n Alternately, this acknowledgment may appear in the software itself,\n if and wherever such third-party acknowledgments normally appear.\n\n4. The names \"CyberNeko\" and \"NekoHTML\" must not be used to endorse\n or promote products derived from this software without prior \n written permission. For written permission, please contact \n andyc@cyberneko.net.\n\n5. Products derived from this software may not be called \"CyberNeko\",\n nor may \"CyberNeko\" appear in their name, without prior written\n permission of the author.\n\nTHIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED\nWARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\nOF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS\nBE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, \nOR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT \nOF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR \nBUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, \nWHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE \nOR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, \nEVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + LicenseName: "CyberNeko License", + LicenseCrossReferences: []string{ + "http://people.apache.org/~andyc/neko/LICENSE", + "http://justasample.url.com", + }, + LicenseComment: "This is tye CyperNeko License", + }, + }, + Annotations: []*spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "Jane Doe ()", + AnnotatorType: "Person", + }, + AnnotationDate: "2010-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "Document level annotation", + AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), + }, + { + Annotator: spdx.Annotator{ + Annotator: "Joe Reviewer", + AnnotatorType: "Person", + }, + AnnotationDate: "2010-02-10T00:00:00Z", + AnnotationType: "REVIEW", + AnnotationComment: "This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses", + AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), + }, + { + Annotator: spdx.Annotator{ + Annotator: "Suzanne Reviewer", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-03-13T00:00:00Z", + AnnotationType: "REVIEW", + AnnotationComment: "Another example reviewer.", + AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), + }, + }, + Packages: []*spdx.Package2_2{ + { + PackageSPDXIdentifier: "fromDoap-1", + PackageCopyrightText: "NOASSERTION", + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: &falsy, + PackageHomePage: "http://commons.apache.org/proper/commons-lang/", + PackageLicenseConcluded: "NOASSERTION", + PackageLicenseDeclared: "NOASSERTION", + PackageName: "Apache Commons Lang", + }, + { + PackageName: "glibc", + PackageSPDXIdentifier: "Package", + PackageVersion: "2.11.1", + PackageFileName: "glibc-2.11.1.tar.gz", + PackageSupplier: &spdx.Supplier{ + Supplier: "Jane Doe (jane.doe@example.com)", + SupplierType: "Person", + }, + PackageOriginator: &spdx.Originator{ + Originator: "ExampleCodeInspect (contact@example.com)", + OriginatorType: "Organization", + }, + PackageDownloadLocation: "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz", + FilesAnalyzed: &truthy, + PackageVerificationCode: spdx.PackageVerificationCode{ + Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", + ExcludedFiles: []string{"./package.spdx"}, + }, + PackageChecksums: []spdx.Checksum{ + { + Algorithm: "MD5", + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + { + Algorithm: "SHA1", + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + { + Algorithm: "SHA256", + Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", + }, + }, + PackageHomePage: "http://ftp.gnu.org/gnu/glibc", + PackageSourceInfo: "uses glibc-2_11-branch from git://sourceware.org/git/glibc.git.", + PackageLicenseConcluded: "(LGPL-2.0-only OR LicenseRef-3)", + PackageLicenseInfoFromFiles: []string{ + "GPL-2.0-only", + "LicenseRef-2", + "LicenseRef-1", + }, + PackageLicenseDeclared: "(LGPL-2.0-only AND LicenseRef-3)", + PackageLicenseComments: "The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change.", + PackageCopyrightText: "Copyright 2008-2010 John Smith", + PackageSummary: "GNU C library.", + PackageDescription: "The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems.", + PackageComment: "", + PackageExternalReferences: []*spdx.PackageExternalReference2_2{ + { + Category: "SECURITY", + RefType: "cpe23Type", + Locator: "cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*", + }, + { + Category: "OTHER", + RefType: "LocationRef-acmeforge", + Locator: "acmecorp/acmenator/4.1.3-alpha", + ExternalRefComment: "This is the external ref for Acme", + }, + }, + PackageAttributionTexts: []string{ + "The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually.", + }, + Files: []*spdx.File2_2{ + { + FileSPDXIdentifier: "CommonsLangSrc", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "c2b4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file is used by Jena", + FileCopyrightText: "Copyright 2001-2011 The Apache Software Foundation", + FileContributors: []string{"Apache Software Foundation"}, + FileName: "./lib-source/commons-lang3-3.1-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{"Apache-2.0"}, + FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", + }, + { + FileSPDXIdentifier: "JenaLib", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "3ab4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file belongs to Jena", + FileCopyrightText: "(c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP", + FileContributors: []string{"Apache Software Foundation", "Hewlett Packard Inc."}, + FileName: "./lib-source/jena-2.6.3-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseComments: "This license is used by Jena", + LicenseConcluded: "LicenseRef-1", + LicenseInfoInFiles: []string{"LicenseRef-1"}, + }, + { + FileName: "./src/org/spdx/parser/DOAPProject.java", + FileSPDXIdentifier: "DoapSource", + FileTypes: []string{ + "SOURCE", + }, + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", + }, + }, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{ + "Apache-2.0", + }, + FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", + FileContributors: []string{ + "Protecode Inc.", + "SPDX Technical Team Members", + "Open Logic Inc.", + "Source Auditor Inc.", + "Black Duck Software In.c", + }, + }, + }, + Annotations: []spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "Package Commenter", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "Package level annotation", + AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "Package"), + }, + }, + }, + { + PackageName: "Jena", + PackageSPDXIdentifier: "fromDoap-0", + PackageCopyrightText: "NOASSERTION", + PackageDownloadLocation: "https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz", + PackageExternalReferences: []*spdx.PackageExternalReference2_2{ + { + Category: "PACKAGE_MANAGER", + RefType: "purl", + Locator: "pkg:maven/org.apache.jena/apache-jena@3.12.0", + }, + }, + FilesAnalyzed: &falsy, + PackageHomePage: "http://www.openjena.org/", + PackageLicenseConcluded: "NOASSERTION", + PackageLicenseDeclared: "NOASSERTION", + PackageVersion: "3.12.0", + }, + { + PackageSPDXIdentifier: "Saxon", + PackageChecksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + }, + PackageCopyrightText: "Copyright Saxonica Ltd", + PackageDescription: "The Saxon package is a collection of tools for processing XML documents.", + PackageDownloadLocation: "https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download", + FilesAnalyzed: &falsy, + PackageHomePage: "http://saxon.sourceforge.net/", + PackageLicenseComments: "Other versions available for a commercial license", + PackageLicenseConcluded: "MPL-1.0", + PackageLicenseDeclared: "MPL-1.0", + PackageName: "Saxon", + PackageFileName: "saxonB-8.8.zip", + PackageVersion: "8.8", + }, + }, + Files: []*spdx.File2_2{ + { + FileSPDXIdentifier: "File", + Annotations: []spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "File Commenter", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "File level annotation", + AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "File"), + }, + }, + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", + }, + { + Algorithm: "MD5", + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + }, + FileComment: "The concluded license was taken from the package level that the file was included in.\nThis information was found in the COPYING.txt file in the xyz directory.", + FileCopyrightText: "Copyright 2008-2010 John Smith", + FileContributors: []string{"The Regents of the University of California", "Modified by Paul Mundt lethal@linux-sh.org", "IBM Corporation"}, + FileName: "./package/foo.c", + FileTypes: []string{"SOURCE"}, + LicenseComments: "The concluded license was taken from the package level that the file was included in.", + LicenseConcluded: "(LGPL-2.0-only OR LicenseRef-2)", + LicenseInfoInFiles: []string{"GPL-2.0-only", "LicenseRef-2"}, + FileNotice: "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: \nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", + }, + }, + Snippets: []spdx.Snippet2_2{ + { + SnippetSPDXIdentifier: "Snippet", + SnippetFromFileSPDXIdentifier: "DoapSource", + Ranges: []spdx.SnippetRange{ + { + StartPointer: spdx.SnippetRangePointer{ + Offset: 310, + }, + EndPointer: spdx.SnippetRangePointer{ + Offset: 420, + }, + }, + { + StartPointer: spdx.SnippetRangePointer{ + LineNumber: 5, + }, + EndPointer: spdx.SnippetRangePointer{ + LineNumber: 23, + }, + }, + }, + SnippetLicenseConcluded: "GPL-2.0-only", + LicenseInfoInSnippet: []string{"GPL-2.0-only"}, + SnippetLicenseComments: "The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.", + SnippetCopyrightText: "Copyright 2008-2010 John Smith", + SnippetComment: "This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.", + SnippetName: "from linux kernel", + }, + }, + Relationships: []*spdx.Relationship2_2{ + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("spdx-tool-1.2", "ToolsElement"), + Relationship: "COPY_OF", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "File"), + Relationship: "DESCRIBES", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "DESCRIBES", + }, + { + RefA: spdx.MakeDocElementID("", "Package"), + RefB: spdx.MakeDocElementID("", "JenaLib"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "Package"), + RefB: spdx.MakeDocElementID("", "Saxon"), + Relationship: "DYNAMIC_LINK", + }, + { + RefA: spdx.MakeDocElementID("", "CommonsLangSrc"), + RefB: spdx.MakeDocElementSpecial("NOASSERTION"), + Relationship: "GENERATED_FROM", + }, + { + RefA: spdx.MakeDocElementID("", "JenaLib"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "File"), + RefB: spdx.MakeDocElementID("", "fromDoap-0"), + Relationship: "GENERATED_FROM", + }, + }, +} diff --git a/spreadsheet/write/annotations.go b/spreadsheet/write/annotations.go new file mode 100644 index 00000000..bb466baa --- /dev/null +++ b/spreadsheet/write/annotations.go @@ -0,0 +1,94 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" +) + +var AnnotationsHeadersByColumn = map[string]string{ + "A": common.AnnotationsSPDXIdentifier, + "B": common.AnnotationsComment, + "C": common.AnnotationsDate, + "D": common.AnnotationsAnnotator, + "E": common.AnnotationsType, +} + +func WriteAnnotationsRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + rowNum := 2 + + // annotations can be document-level, package-level, or file-level + + // document-level + for _, annotation := range doc.Annotations { + err := processAnnotation(annotation, spdx.MakeDocElementID("", string(doc.SPDXIdentifier)), spreadsheet, rowNum) + if err != nil { + return fmt.Errorf("failed to process document-level annotation: %s", err.Error()) + } + + rowNum += 1 + } + + // package-level + for _, pkg := range doc.Packages { + for _, annotation := range pkg.Annotations { + err := processAnnotation(&annotation, spdx.MakeDocElementID("", string(pkg.PackageSPDXIdentifier)), spreadsheet, rowNum) + if err != nil { + return fmt.Errorf("failed to process package-level annotation: %s", err.Error()) + } + + rowNum += 1 + } + } + + // file-level + for _, file := range doc.Files { + for _, annotation := range file.Annotations { + err := processAnnotation(&annotation, spdx.MakeDocElementID("", string(file.FileSPDXIdentifier)), spreadsheet, rowNum) + if err != nil { + return fmt.Errorf("failed to process file-level annotation: %s", err.Error()) + } + + rowNum += 1 + } + } + + return nil +} + +func processAnnotation(annotation *spdx.Annotation2_2, spdxID spdx.DocElementID, spreadsheet *excelize.File, rowNum int) error { + for column, valueType := range AnnotationsHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.AnnotationsSPDXIdentifier: + value = spdxID + case common.AnnotationsComment: + value = annotation.AnnotationComment + case common.AnnotationsDate: + value = annotation.AnnotationDate + case common.AnnotationsAnnotator: + err = annotation.Annotator.Validate() + value = annotation.Annotator.String() + case common.AnnotationsType: + value = annotation.AnnotationType + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameAnnotations, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + + return nil +} diff --git a/spreadsheet/write/document_info.go b/spreadsheet/write/document_info.go new file mode 100644 index 00000000..c807ed0f --- /dev/null +++ b/spreadsheet/write/document_info.go @@ -0,0 +1,110 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" +) + +var DocumentInfoHeadersByColumn = map[string]string{ + "A": common.DocumentInfoSPDXVersion, + "B": common.DocumentInfoDataLicense, + "C": common.DocumentInfoSPDXIdentifier, + "D": common.DocumentInfoLicenseListVersion, + "E": common.DocumentInfoDocumentName, + "F": common.DocumentInfoDocumentNamespace, + "G": common.DocumentInfoExternalDocumentReferences, + "H": common.DocumentInfoDocumentComment, + "I": common.DocumentInfoCreator, + "J": common.DocumentInfoCreated, + "K": common.DocumentInfoCreatorComment, +} + +func WriteDocumentInfoRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + if doc.CreationInfo == nil { + return fmt.Errorf("document is missing CreationInfo") + } + + // some data in this sheet gets split across rows, instead of being split up by newlines or commas. + // the two columns where this happens are Creators and External Document Refs. + // figure out how many rows we're going to need + numCreators := len(doc.CreationInfo.Creators) + numExternalDocRefs := len(doc.ExternalDocumentReferences) + rowsNeeded := 1 + if numCreators > numExternalDocRefs { + rowsNeeded = numCreators + } else if numExternalDocRefs > 1 { + rowsNeeded = numExternalDocRefs + } + + for rowNum := 2; rowNum-2 < rowsNeeded; rowNum++ { + for column, valueType := range DocumentInfoHeadersByColumn { + // only certain columns are used past the first data row + if rowNum > 2 && valueType != common.DocumentInfoCreator && valueType != common.DocumentInfoExternalDocumentReferences { + continue + } + + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.DocumentInfoSPDXVersion: + value = doc.SPDXVersion + case common.DocumentInfoDataLicense: + value = doc.DataLicense + case common.DocumentInfoSPDXIdentifier: + value = doc.SPDXIdentifier + case common.DocumentInfoLicenseListVersion: + value = doc.CreationInfo.LicenseListVersion + case common.DocumentInfoDocumentName: + value = doc.DocumentName + case common.DocumentInfoDocumentNamespace: + value = doc.DocumentNamespace + case common.DocumentInfoExternalDocumentReferences: + if rowNum-2 > numExternalDocRefs-1 { + continue + } + + ref := doc.ExternalDocumentReferences[rowNum-2] + if err = ref.Validate(); err != nil { + break + } + + value = ref.String() + case common.DocumentInfoDocumentComment: + value = doc.DocumentComment + case common.DocumentInfoCreator: + if rowNum-2 > numCreators-1 { + continue + } + + creator := doc.CreationInfo.Creators[rowNum-2] + if err = creator.Validate(); err != nil { + break + } + + value = creator.String() + case common.DocumentInfoCreated: + value = doc.CreationInfo.Created + case common.DocumentInfoCreatorComment: + value = doc.CreationInfo.CreatorComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameDocumentInfo, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + } + + return nil +} diff --git a/spreadsheet/write/extracted_license_info.go b/spreadsheet/write/extracted_license_info.go new file mode 100644 index 00000000..c0202e45 --- /dev/null +++ b/spreadsheet/write/extracted_license_info.go @@ -0,0 +1,57 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" + "strings" +) + +var ExtractedLicenseInfoHeadersByColumn = map[string]string{ + "A": common.LicenseInfoIdentifier, + "B": common.LicenseInfoExtractedText, + "C": common.LicenseInfoLicenseName, + "D": common.LicenseInfoCrossReferenceURLs, + "E": common.LicenseInfoComment, +} + +func WriteExtractedLicenseInfoRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + for ii, license := range doc.OtherLicenses { + // get correct row number. first row is headers (+1) and Go slices are zero-indexed (+1) + rowNum := ii + 2 + + for column, valueType := range ExtractedLicenseInfoHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.LicenseInfoIdentifier: + value = license.LicenseIdentifier + case common.LicenseInfoExtractedText: + value = license.ExtractedText + case common.LicenseInfoLicenseName: + value = license.LicenseName + case common.LicenseInfoCrossReferenceURLs: + value = strings.Join(license.LicenseCrossReferences, ", ") + case common.LicenseInfoComment: + value = license.LicenseComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameExtractedLicenseInfo, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + } + + return nil +} diff --git a/spreadsheet/write/package_external_refs.go b/spreadsheet/write/package_external_refs.go new file mode 100644 index 00000000..0fe94835 --- /dev/null +++ b/spreadsheet/write/package_external_refs.go @@ -0,0 +1,59 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" +) + +var ExternalRefsHeadersByColumn = map[string]string{ + "A": common.ExternalRefPackageID, + "B": common.ExternalRefCategory, + "C": common.ExternalRefType, + "D": common.ExternalRefLocator, + "E": common.ExternalRefComment, +} + +func WriteExternalRefsRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + rowNum := 2 + + for _, pkg := range doc.Packages { + for _, externalRef := range pkg.PackageExternalReferences { + for column, valueType := range ExternalRefsHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.ExternalRefPackageID: + value = pkg.PackageSPDXIdentifier + case common.ExternalRefCategory: + value = externalRef.Category + case common.ExternalRefType: + value = externalRef.RefType + case common.ExternalRefLocator: + value = externalRef.Locator + case common.ExternalRefComment: + value = externalRef.ExternalRefComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameExternalRefs, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + + rowNum += 1 + } + } + + return nil +} diff --git a/spreadsheet/write/package_info.go b/spreadsheet/write/package_info.go new file mode 100644 index 00000000..a541ea50 --- /dev/null +++ b/spreadsheet/write/package_info.go @@ -0,0 +1,132 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" + "strings" +) + +var PackageInfoHeadersByColumn = map[string]string{ + "A": common.PackageName, + "B": common.PackageSPDXIdentifier, + "C": common.PackageVersion, + "D": common.PackageFileName, + "E": common.PackageSupplier, + "F": common.PackageOriginator, + "G": common.PackageHomePage, + "H": common.PackageDownloadLocation, + "I": common.PackageChecksum, + "J": common.PackageVerificationCode, + "K": common.PackageVerificationCodeExcludedFiles, + "L": common.PackageSourceInfo, + "M": common.PackageLicenseDeclared, + "N": common.PackageLicenseConcluded, + "O": common.PackageLicenseInfoFromFiles, + "P": common.PackageLicenseComments, + "Q": common.PackageCopyrightText, + "R": common.PackageSummary, + "S": common.PackageDescription, + "T": common.PackageAttributionText, + "U": common.PackageFilesAnalyzed, + "V": common.PackageComments, +} + +func WritePackageInfoRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + for ii, pkg := range doc.Packages { + // get correct row number. first row is headers (+1) and Go slices are zero-indexed (+1) + rowNum := ii + 2 + + for column, valueType := range PackageInfoHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.PackageName: + value = pkg.PackageName + case common.PackageSPDXIdentifier: + value = pkg.PackageSPDXIdentifier + case common.PackageVersion: + value = pkg.PackageVersion + case common.PackageFileName: + value = pkg.PackageFileName + case common.PackageSupplier: + if pkg.PackageSupplier == nil { + continue + } + + err = pkg.PackageSupplier.Validate() + value = pkg.PackageSupplier.String() + case common.PackageOriginator: + if pkg.PackageOriginator == nil { + continue + } + + err = pkg.PackageOriginator.Validate() + value = pkg.PackageOriginator.String() + case common.PackageHomePage: + value = pkg.PackageHomePage + case common.PackageDownloadLocation: + value = pkg.PackageDownloadLocation + case common.PackageChecksum: + checksums := make([]string, 0, len(pkg.PackageChecksums)) + for _, checksum := range pkg.PackageChecksums { + if err = checksum.Validate(); err != nil { + break + } + + checksums = append(checksums, checksum.String()) + } + + value = strings.Join(checksums, "\n") + case common.PackageVerificationCode: + value = pkg.PackageVerificationCode.Value + case common.PackageVerificationCodeExcludedFiles: + value = strings.Join(pkg.PackageVerificationCode.ExcludedFiles, "\n") + case common.PackageSourceInfo: + value = pkg.PackageSourceInfo + case common.PackageLicenseDeclared: + value = pkg.PackageLicenseDeclared + case common.PackageLicenseConcluded: + value = pkg.PackageLicenseConcluded + case common.PackageLicenseInfoFromFiles: + value = strings.Join(pkg.PackageLicenseInfoFromFiles, ",") + case common.PackageLicenseComments: + value = pkg.PackageLicenseComments + case common.PackageCopyrightText: + value = pkg.PackageCopyrightText + case common.PackageSummary: + value = pkg.PackageSummary + case common.PackageDescription: + value = pkg.PackageDescription + case common.PackageAttributionText: + texts := make([]string, 0, len(pkg.PackageAttributionTexts)) + for _, text := range pkg.PackageAttributionTexts { + // these get wrapped in quotes + texts = append(texts, fmt.Sprintf("\"%s\"", text)) + } + value = strings.Join(texts, "\n") + case common.PackageFilesAnalyzed: + value = pkg.FilesAnalyzed + case common.PackageComments: + value = pkg.PackageComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNamePackageInfo, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + } + + return nil +} diff --git a/spreadsheet/write/per_file_info.go b/spreadsheet/write/per_file_info.go new file mode 100644 index 00000000..3c5f2829 --- /dev/null +++ b/spreadsheet/write/per_file_info.go @@ -0,0 +1,141 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" + "strings" +) + +var FileInfoHeadersByColumn = map[string]string{ + "A": common.FileInfoFileName, + "B": common.FileInfoSPDXIdentifier, + "C": common.FileInfoPackageIdentifier, + "D": common.FileInfoFileTypes, + "E": common.FileInfoFileChecksums, + "F": common.FileInfoLicenseConcluded, + "G": common.FileInfoLicenseInfoInFile, + "H": common.FileInfoLicenseComments, + "I": common.FileInfoFileCopyrightText, + "J": common.FileInfoNoticeText, + "K": common.FileInfoArtifactOfProject, + "L": common.FileInfoArtifactOfHomepage, + "M": common.FileInfoArtifactOfURL, + "N": common.FileInfoContributors, + "O": common.FileInfoFileComment, + "P": common.FileInfoFileDependencies, + "Q": common.FileInfoAttributionText, +} + +func WriteFileInfoRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + rowNum := 2 + + // files can appear at the document level, or the package level + + // document-level + for _, file := range doc.Files { + err := processFileInfo(file, "", spreadsheet, rowNum) + if err != nil { + return fmt.Errorf("failed to process document-level file info: %s", err.Error()) + } + + rowNum += 1 + } + + // package-level + for _, pkg := range doc.Packages { + for _, file := range pkg.Files { + err := processFileInfo(file, pkg.PackageSPDXIdentifier, spreadsheet, rowNum) + if err != nil { + return fmt.Errorf("failed to process package-level file info: %s", err.Error()) + } + + rowNum += 1 + } + } + + return nil +} + +func processFileInfo(file *spdx.File2_2, packageIdentifier spdx.ElementID, spreadsheet *excelize.File, rowNum int) error { + for column, valueType := range FileInfoHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.FileInfoFileName: + value = file.FileName + case common.FileInfoSPDXIdentifier: + value = file.FileSPDXIdentifier + case common.FileInfoPackageIdentifier: + // a file can optionally be associated with a package + value = "" + if packageIdentifier != "" { + value = packageIdentifier + } + case common.FileInfoFileTypes: + value = strings.Join(file.FileTypes, "\n") + case common.FileInfoFileChecksums: + checksums := make([]string, 0, len(file.Checksums)) + for _, checksum := range file.Checksums { + if err = checksum.Validate(); err != nil { + break + } + + checksums = append(checksums, checksum.String()) + } + + value = strings.Join(checksums, "\n") + case common.FileInfoLicenseConcluded: + value = file.LicenseConcluded + case common.FileInfoLicenseInfoInFile: + value = strings.Join(file.LicenseInfoInFiles, ", ") + case common.FileInfoLicenseComments: + value = file.LicenseComments + case common.FileInfoFileCopyrightText: + value = file.FileCopyrightText + case common.FileInfoNoticeText: + value = file.FileNotice + case common.FileInfoArtifactOfProject: + // ignored + case common.FileInfoArtifactOfHomepage: + // ignored + case common.FileInfoArtifactOfURL: + // ignored + case common.FileInfoContributors: + contributors := make([]string, 0, len(file.FileContributors)) + for _, contributor := range file.FileContributors { + // these get wrapped in quotes + contributors = append(contributors, fmt.Sprintf("\"%s\"", contributor)) + } + value = strings.Join(contributors, ",") + case common.FileInfoFileComment: + value = file.FileComment + case common.FileInfoFileDependencies: + // ignored + case common.FileInfoAttributionText: + texts := make([]string, 0, len(file.FileAttributionTexts)) + for _, text := range file.FileAttributionTexts { + // these get wrapped in quotes + texts = append(texts, fmt.Sprintf("\"%s\"", text)) + } + value = strings.Join(texts, "\n") + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameFileInfo, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + + return nil +} diff --git a/spreadsheet/write/relationships.go b/spreadsheet/write/relationships.go new file mode 100644 index 00000000..66792465 --- /dev/null +++ b/spreadsheet/write/relationships.go @@ -0,0 +1,53 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" +) + +var RelationshipsHeadersByColumn = map[string]string{ + "A": common.RelationshipsRefA, + "B": common.RelationshipsRelationship, + "C": common.RelationshipsRefB, + "D": common.RelationshipsComment, +} + +func WriteRelationshipsRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + for ii, relationship := range doc.Relationships { + // get correct row number. first row is headers (+1) and Go slices are zero-indexed (+1) + rowNum := ii + 2 + + for column, valueType := range RelationshipsHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.RelationshipsRefA: + value = relationship.RefA + case common.RelationshipsRelationship: + value = relationship.Relationship + case common.RelationshipsRefB: + value = relationship.RefB + case common.RelationshipsComment: + value = relationship.RelationshipComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameRelationships, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + } + + return nil +} diff --git a/spreadsheet/write/snippets.go b/spreadsheet/write/snippets.go new file mode 100644 index 00000000..f280a89c --- /dev/null +++ b/spreadsheet/write/snippets.go @@ -0,0 +1,86 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" + "strings" +) + +var SnippetsHeadersByColumn = map[string]string{ + "A": common.SnippetsID, + "B": common.SnippetsName, + "C": common.SnippetsFromFileID, + "D": common.SnippetsByteRange, + "E": common.SnippetsLineRange, + "F": common.SnippetsLicenseConcluded, + "G": common.SnippetsLicenseInfoInSnippet, + "H": common.SnippetsLicenseComments, + "I": common.SnippetsCopyrightText, + "J": common.SnippetsComment, +} + +func WriteSnippetsRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + for ii, snippet := range doc.Snippets { + // get correct row number. first row is headers (+1) and Go slices are zero-indexed (+1) + rowNum := ii + 2 + + for column, valueType := range SnippetsHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.SnippetsID: + value = snippet.SnippetSPDXIdentifier + case common.SnippetsName: + value = snippet.SnippetName + case common.SnippetsFromFileID: + value = snippet.SnippetFromFileSPDXIdentifier + case common.SnippetsByteRange: + // find a byte range, if there is one + value = "" + for _, snippetRange := range snippet.Ranges { + if snippetRange.EndPointer.Offset != 0 { + value = snippetRange.String() + break + } + } + case common.SnippetsLineRange: + // find a line range, if there is one + value = "" + for _, snippetRange := range snippet.Ranges { + if snippetRange.EndPointer.LineNumber != 0 { + value = snippetRange.String() + break + } + } + case common.SnippetsLicenseConcluded: + value = snippet.SnippetLicenseConcluded + case common.SnippetsLicenseInfoInSnippet: + value = strings.Join(snippet.LicenseInfoInSnippet, ", ") + case common.SnippetsLicenseComments: + value = snippet.SnippetLicenseComments + case common.SnippetsCopyrightText: + value = snippet.SnippetCopyrightText + case common.SnippetsComment: + value = snippet.SnippetComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameSnippets, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + } + + return nil +} diff --git a/spreadsheet/writer.go b/spreadsheet/writer.go new file mode 100644 index 00000000..91263d58 --- /dev/null +++ b/spreadsheet/writer.go @@ -0,0 +1,45 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_xls + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" + "io" +) + +// Save2_2 takes an SPDX Document (version 2.2) and an io.Writer, and writes the document to the writer as an XLSX file. +func Save2_2(doc *spdx.Document2_2, w io.Writer) error { + spreadsheet := excelize.NewFile() + + for _, sheetHandlingInfo := range sheetHandlers { + spreadsheet.NewSheet(sheetHandlingInfo.SheetName) + + err := writeHeaders(spreadsheet, sheetHandlingInfo.SheetName, sheetHandlingInfo.HeadersByColumn) + + err = sheetHandlingInfo.WriterFunc(doc, spreadsheet) + if err != nil { + return fmt.Errorf("failed to write data for sheet %s: %s", sheetHandlingInfo.SheetName, err.Error()) + } + } + + err := spreadsheet.Write(w) + if err != nil { + return err + } + + return nil +} + +func writeHeaders(spreadsheet *excelize.File, sheetName string, headersByColumn map[string]string) error { + for column, header := range headersByColumn { + err := spreadsheet.SetCellValue(sheetName, common.PositionToAxis(column, 1), header) + if err != nil { + return err + } + } + + return nil +} diff --git a/tvloader/parser2v1/parse_creation_info.go b/tvloader/parser2v1/parse_creation_info.go index df16008b..f6b9a9e0 100644 --- a/tvloader/parser2v1/parse_creation_info.go +++ b/tvloader/parser2v1/parse_creation_info.go @@ -54,10 +54,8 @@ func (parser *tvParser2_1) parsePairFromCreationInfo2_1(tag string, value string return fmt.Errorf("file with FileName %s does not have SPDX identifier", parser.file.FileName) } parser.st = psPackage2_1 - parser.pkg = &spdx.Package2_1{ - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: false, - } + truthy := true + parser.pkg = &spdx.Package2_1{FilesAnalyzed: &truthy} return parser.parsePairFromPackage2_1(tag, value) // tag for going on to _unpackaged_ file section case "FileName": @@ -102,7 +100,7 @@ func (parser *tvParser2_1) parsePairFromCreationInfo2_1(tag string, value string // ===== Helper functions ===== -func extractExternalDocumentReference(value string) (string, string, string, string, error) { +func extractExternalDocumentReference(value string) (spdx.DocElementID, string, string, string, error) { sp := strings.Split(value, " ") // remove any that are just whitespace keepSp := []string{} @@ -113,42 +111,33 @@ func extractExternalDocumentReference(value string) (string, string, string, str } } - var documentRefID, uri, alg, checksum string + var documentRefID spdx.DocElementID + var uri, alg, checksum string // now, should have 4 items (or 3, if Alg and Checksum were joined) // and should be able to map them if len(keepSp) == 4 { - documentRefID = keepSp[0] + documentRefID = spdx.MakeDocElementID(keepSp[0], "") uri = keepSp[1] alg = keepSp[2] // check that colon is present for alg, and remove it if !strings.HasSuffix(alg, ":") { - return "", "", "", "", fmt.Errorf("algorithm does not end with colon") + return documentRefID, "", "", "", fmt.Errorf("algorithm does not end with colon") } alg = strings.TrimSuffix(alg, ":") checksum = keepSp[3] } else if len(keepSp) == 3 { - documentRefID = keepSp[0] + documentRefID = spdx.MakeDocElementID(keepSp[0], "") uri = keepSp[1] // split on colon into alg and checksum parts := strings.SplitN(keepSp[2], ":", 2) if len(parts) != 2 { - return "", "", "", "", fmt.Errorf("missing colon separator between algorithm and checksum") + return documentRefID, "", "", "", fmt.Errorf("missing colon separator between algorithm and checksum") } alg = parts[0] checksum = parts[1] } else { - return "", "", "", "", fmt.Errorf("expected 4 elements, got %d", len(keepSp)) - } - - // additionally, we should be able to parse the first element as a - // DocumentRef- ID string, and we should remove that prefix - if !strings.HasPrefix(documentRefID, "DocumentRef-") { - return "", "", "", "", fmt.Errorf("expected first element to have DocumentRef- prefix") - } - documentRefID = strings.TrimPrefix(documentRefID, "DocumentRef-") - if documentRefID == "" { - return "", "", "", "", fmt.Errorf("document identifier has nothing after prefix") + return documentRefID, "", "", "", fmt.Errorf("expected 4 elements, got %d", len(keepSp)) } return documentRefID, uri, alg, checksum, nil diff --git a/tvloader/parser2v1/parse_creation_info_test.go b/tvloader/parser2v1/parse_creation_info_test.go index 83058dd8..bc25fb1e 100644 --- a/tvloader/parser2v1/parse_creation_info_test.go +++ b/tvloader/parser2v1/parse_creation_info_test.go @@ -31,12 +31,9 @@ func TestParser2_1CIMovesToPackageAfterParsingPackageNameTag(t *testing.T) { t.Errorf("expected package name %s, got %s", pkgName, parser.pkg.PackageName) } // and the package should default to true for FilesAnalyzed - if parser.pkg.FilesAnalyzed != true { + if *parser.pkg.FilesAnalyzed != true { t.Errorf("expected FilesAnalyzed to default to true, got false") } - if parser.pkg.IsFilesAnalyzedTagPresent != false { - t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") - } // and the package should NOT be in the SPDX Document's map of packages, // because it doesn't have an SPDX identifier yet if len(parser.doc.Packages) != 0 { @@ -360,7 +357,7 @@ func TestParser2_1CICreatesAnnotation(t *testing.T) { func TestCanExtractExternalDocumentReference(t *testing.T) { refstring := "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1:d6a770ba38583ed4bb4525bd96e50461655d2759" - wantDocumentRefID := "spdx-tool-1.2" + wantDocumentRefID := "DocumentRef-spdx-tool-1.2" wantURI := "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301" wantAlg := "SHA1" wantChecksum := "d6a770ba38583ed4bb4525bd96e50461655d2759" @@ -369,7 +366,7 @@ func TestCanExtractExternalDocumentReference(t *testing.T) { if err != nil { t.Errorf("got non-nil error: %v", err) } - if wantDocumentRefID != gotDocumentRefID { + if wantDocumentRefID != gotDocumentRefID.String() { t.Errorf("wanted document ref ID %s, got %s", wantDocumentRefID, gotDocumentRefID) } if wantURI != gotURI { @@ -385,7 +382,7 @@ func TestCanExtractExternalDocumentReference(t *testing.T) { func TestCanExtractExternalDocumentReferenceWithExtraWhitespace(t *testing.T) { refstring := " DocumentRef-spdx-tool-1.2 \t http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 \t SHA1: \t d6a770ba38583ed4bb4525bd96e50461655d2759" - wantDocumentRefID := "spdx-tool-1.2" + wantDocumentRefID := "DocumentRef-spdx-tool-1.2" wantURI := "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301" wantAlg := "SHA1" wantChecksum := "d6a770ba38583ed4bb4525bd96e50461655d2759" @@ -394,7 +391,7 @@ func TestCanExtractExternalDocumentReferenceWithExtraWhitespace(t *testing.T) { if err != nil { t.Errorf("got non-nil error: %v", err) } - if wantDocumentRefID != gotDocumentRefID { + if wantDocumentRefID != gotDocumentRefID.String() { t.Errorf("wanted document ref ID %s, got %s", wantDocumentRefID, gotDocumentRefID) } if wantURI != gotURI { @@ -416,7 +413,6 @@ func TestFailsExternalDocumentReferenceWithInvalidFormats(t *testing.T) { "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 d6a770ba38583ed4bb4525bd96e50461655d2759", "DocumentRef-spdx-tool-1.2", - "spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1:d6a770ba38583ed4bb4525bd96e50461655d2759", } for _, refstring := range invalidRefs { _, _, _, _, err := extractExternalDocumentReference(refstring) diff --git a/tvloader/parser2v1/parse_file.go b/tvloader/parser2v1/parse_file.go index 81768bb6..979894e3 100644 --- a/tvloader/parser2v1/parse_file.go +++ b/tvloader/parser2v1/parse_file.go @@ -69,13 +69,13 @@ func (parser *tvParser2_1) parsePairFromFile2_1(tag string, value string) error if parser.file.Checksums == nil { parser.file.Checksums = []spdx.Checksum{} } - switch spdx.ChecksumAlgorithm(subkey) { - case spdx.SHA1, spdx.SHA256, spdx.MD5: - algorithm := spdx.ChecksumAlgorithm(subkey) - parser.file.Checksums = append(parser.file.Checksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) - default: - return fmt.Errorf("got unknown checksum type %s", subkey) + + algorithm := spdx.ChecksumAlgorithm(subkey) + err = algorithm.Validate() + if err != nil { + return err } + parser.file.Checksums = append(parser.file.Checksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) case "LicenseConcluded": parser.file.LicenseConcluded = value case "LicenseInfoInFile": diff --git a/tvloader/parser2v1/parse_file_test.go b/tvloader/parser2v1/parse_file_test.go index 375f9677..f359e2ea 100644 --- a/tvloader/parser2v1/parse_file_test.go +++ b/tvloader/parser2v1/parse_file_test.go @@ -178,12 +178,9 @@ func TestParser2_1FileStartsNewPackageAfterParsingPackageNameTag(t *testing.T) { t.Errorf("expected package name %s, got %s", p2Name, parser.pkg.PackageName) } // and the package should default to true for FilesAnalyzed - if parser.pkg.FilesAnalyzed != true { + if *parser.pkg.FilesAnalyzed != true { t.Errorf("expected FilesAnalyzed to default to true, got false") } - if parser.pkg.IsFilesAnalyzedTagPresent != false { - t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") - } // and the new Package should have no files if len(parser.pkg.Files) != 0 { t.Errorf("Expected no files in pkg.Files, got %d", len(parser.pkg.Files)) diff --git a/tvloader/parser2v1/parse_package.go b/tvloader/parser2v1/parse_package.go index 22fc1ed2..cf1d6abb 100644 --- a/tvloader/parser2v1/parse_package.go +++ b/tvloader/parser2v1/parse_package.go @@ -24,10 +24,8 @@ func (parser *tvParser2_1) parsePairFromPackage2_1(tag string, value string) err if parser.pkg != nil && parser.pkg.PackageSPDXIdentifier == nullSpdxElementId2_1 { return fmt.Errorf("package with PackageName %s does not have SPDX identifier", parser.pkg.PackageName) } - parser.pkg = &spdx.Package2_1{ - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: false, - } + truthy := true + parser.pkg = &spdx.Package2_1{FilesAnalyzed: &truthy} } parser.pkg.PackageName = value // tag for going on to file section @@ -89,11 +87,9 @@ func (parser *tvParser2_1) parsePairFromPackage2_1(tag string, value string) err case "PackageDownloadLocation": parser.pkg.PackageDownloadLocation = value case "FilesAnalyzed": - parser.pkg.IsFilesAnalyzedTagPresent = true if value == "false" { - parser.pkg.FilesAnalyzed = false - } else if value == "true" { - parser.pkg.FilesAnalyzed = true + falsy := false + parser.pkg.FilesAnalyzed = &falsy } case "PackageVerificationCode": parser.pkg.PackageVerificationCode = extractCodeAndExcludes(value) @@ -105,13 +101,13 @@ func (parser *tvParser2_1) parsePairFromPackage2_1(tag string, value string) err if parser.pkg.PackageChecksums == nil { parser.pkg.PackageChecksums = []spdx.Checksum{} } - switch spdx.ChecksumAlgorithm(subkey) { - case spdx.SHA1, spdx.SHA256, spdx.MD5: - algorithm := spdx.ChecksumAlgorithm(subkey) - parser.pkg.PackageChecksums = append(parser.pkg.PackageChecksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) - default: - return fmt.Errorf("got unknown checksum type %s", subkey) + + algorithm := spdx.ChecksumAlgorithm(subkey) + err = algorithm.Validate() + if err != nil { + return err } + parser.pkg.PackageChecksums = append(parser.pkg.PackageChecksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) case "PackageHomePage": parser.pkg.PackageHomePage = value case "PackageSourceInfo": diff --git a/tvloader/parser2v1/parse_package_test.go b/tvloader/parser2v1/parse_package_test.go index 734fc913..b7882ded 100644 --- a/tvloader/parser2v1/parse_package_test.go +++ b/tvloader/parser2v1/parse_package_test.go @@ -43,12 +43,9 @@ func TestParser2_1PackageStartsNewPackageAfterParsingPackageNameTag(t *testing.T t.Errorf("expected package name %s, got %s", pkgName, parser.pkg.PackageName) } // and the package should default to true for FilesAnalyzed - if parser.pkg.FilesAnalyzed != true { + if *parser.pkg.FilesAnalyzed != true { t.Errorf("expected FilesAnalyzed to default to true, got false") } - if parser.pkg.IsFilesAnalyzedTagPresent != false { - t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") - } // and the Document's Packages should still be of size 1 and have pkgOld only if parser.doc.Packages[0] != pkgOld { t.Errorf("Expected package %v, got %v", pkgOld, parser.doc.Packages[0]) @@ -90,12 +87,9 @@ func TestParser2_1PackageStartsNewPackageAfterParsingPackageNameTagWhileInUnpack t.Errorf("expected package name %s, got %s", pkgName, parser.pkg.PackageName) } // and the package should default to true for FilesAnalyzed - if parser.pkg.FilesAnalyzed != true { + if *parser.pkg.FilesAnalyzed != true { t.Errorf("expected FilesAnalyzed to default to true, got false") } - if parser.pkg.IsFilesAnalyzedTagPresent != false { - t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") - } // and the Document's Packages should be of size 0, because the prior was // unpackaged files and this one won't be added until an SPDXID is seen if len(parser.doc.Packages) != 0 { @@ -238,10 +232,11 @@ func TestParser2_1PackageStaysAfterParsingAnnotationTags(t *testing.T) { // ===== Package data section tests ===== func TestParser2_1CanParsePackageTags(t *testing.T) { + truthy := true parser := tvParser2_1{ doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, - pkg: &spdx.Package2_1{}, + pkg: &spdx.Package2_1{FilesAnalyzed: &truthy}, } // should not yet be in Packages map, b/c no SPDX identifier @@ -317,12 +312,9 @@ func TestParser2_1CanParsePackageTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.FilesAnalyzed != false { + if *parser.pkg.FilesAnalyzed != false { t.Errorf("got %v for FilesAnalyzed", parser.pkg.FilesAnalyzed) } - if parser.pkg.IsFilesAnalyzedTagPresent != true { - t.Errorf("got %v for IsFilesAnalyzedTagPresent", parser.pkg.IsFilesAnalyzedTagPresent) - } // Package Verification Code // SKIP -- separate tests for "excludes", or not, below @@ -810,10 +802,11 @@ func TestParser2_1PackageUnknownTagFails(t *testing.T) { } func TestParser2_1FailsIfInvalidSPDXIDInPackageSection(t *testing.T) { + truthy := true parser := tvParser2_1{ doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, - pkg: &spdx.Package2_1{}, + pkg: &spdx.Package2_1{FilesAnalyzed: &truthy}, } // start with Package Name @@ -829,10 +822,11 @@ func TestParser2_1FailsIfInvalidSPDXIDInPackageSection(t *testing.T) { } func TestParser2_1FailsIfInvalidPackageSupplierFormat(t *testing.T) { + truthy := true parser := tvParser2_1{ doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, - pkg: &spdx.Package2_1{}, + pkg: &spdx.Package2_1{FilesAnalyzed: &truthy}, } // start with Package Name @@ -848,10 +842,11 @@ func TestParser2_1FailsIfInvalidPackageSupplierFormat(t *testing.T) { } func TestParser2_1FailsIfUnknownPackageSupplierType(t *testing.T) { + truthy := true parser := tvParser2_1{ doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, - pkg: &spdx.Package2_1{}, + pkg: &spdx.Package2_1{FilesAnalyzed: &truthy}, } // start with Package Name @@ -867,10 +862,11 @@ func TestParser2_1FailsIfUnknownPackageSupplierType(t *testing.T) { } func TestParser2_1FailsIfInvalidPackageOriginatorFormat(t *testing.T) { + truthy := true parser := tvParser2_1{ doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, - pkg: &spdx.Package2_1{}, + pkg: &spdx.Package2_1{FilesAnalyzed: &truthy}, } // start with Package Name @@ -886,10 +882,11 @@ func TestParser2_1FailsIfInvalidPackageOriginatorFormat(t *testing.T) { } func TestParser2_1FailsIfUnknownPackageOriginatorType(t *testing.T) { + truthy := true parser := tvParser2_1{ doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, - pkg: &spdx.Package2_1{}, + pkg: &spdx.Package2_1{FilesAnalyzed: &truthy}, } // start with Package Name @@ -905,10 +902,11 @@ func TestParser2_1FailsIfUnknownPackageOriginatorType(t *testing.T) { } func TestParser2_1SetsFilesAnalyzedTagsCorrectly(t *testing.T) { + truthy := true parser := tvParser2_1{ doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, - pkg: &spdx.Package2_1{}, + pkg: &spdx.Package2_1{FilesAnalyzed: &truthy}, } // start with Package Name @@ -921,19 +919,17 @@ func TestParser2_1SetsFilesAnalyzedTagsCorrectly(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.FilesAnalyzed != true { + if *parser.pkg.FilesAnalyzed != true { t.Errorf("expected %v, got %v", true, parser.pkg.FilesAnalyzed) } - if parser.pkg.IsFilesAnalyzedTagPresent != true { - t.Errorf("expected %v, got %v", true, parser.pkg.IsFilesAnalyzedTagPresent) - } } func TestParser2_1FailsIfInvalidPackageChecksumFormat(t *testing.T) { + truthy := true parser := tvParser2_1{ doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, - pkg: &spdx.Package2_1{}, + pkg: &spdx.Package2_1{FilesAnalyzed: &truthy}, } // start with Package Name @@ -949,10 +945,11 @@ func TestParser2_1FailsIfInvalidPackageChecksumFormat(t *testing.T) { } func TestParser2_1FailsIfInvalidPackageChecksumType(t *testing.T) { + truthy := true parser := tvParser2_1{ doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, - pkg: &spdx.Package2_1{}, + pkg: &spdx.Package2_1{FilesAnalyzed: &truthy}, } // start with Package Name @@ -968,10 +965,11 @@ func TestParser2_1FailsIfInvalidPackageChecksumType(t *testing.T) { } func TestParser2_1FailsIfInvalidExternalRefFormat(t *testing.T) { + truthy := true parser := tvParser2_1{ doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, - pkg: &spdx.Package2_1{}, + pkg: &spdx.Package2_1{FilesAnalyzed: &truthy}, } // start with Package Name @@ -987,10 +985,11 @@ func TestParser2_1FailsIfInvalidExternalRefFormat(t *testing.T) { } func TestParser2_1FailsIfExternalRefCommentBeforeExternalRef(t *testing.T) { + truthy := true parser := tvParser2_1{ doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, - pkg: &spdx.Package2_1{}, + pkg: &spdx.Package2_1{FilesAnalyzed: &truthy}, } // start with Package Name diff --git a/tvloader/parser2v1/parse_snippet_test.go b/tvloader/parser2v1/parse_snippet_test.go index ea747f48..e60affb5 100644 --- a/tvloader/parser2v1/parse_snippet_test.go +++ b/tvloader/parser2v1/parse_snippet_test.go @@ -102,12 +102,9 @@ func TestParser2_1SnippetStartsNewPackageAfterParsingPackageNameTag(t *testing.T t.Errorf("expected package name %s, got %s", p2Name, parser.pkg.PackageName) } // and the package should default to true for FilesAnalyzed - if parser.pkg.FilesAnalyzed != true { + if *parser.pkg.FilesAnalyzed != true { t.Errorf("expected FilesAnalyzed to default to true, got false") } - if parser.pkg.IsFilesAnalyzedTagPresent != false { - t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") - } // and the Document's Packages should still be of size 1 b/c no SPDX // identifier has been seen yet if len(parser.doc.Packages) != 1 { diff --git a/tvloader/parser2v2/parse_creation_info.go b/tvloader/parser2v2/parse_creation_info.go index f8406fc5..e99d555d 100644 --- a/tvloader/parser2v2/parse_creation_info.go +++ b/tvloader/parser2v2/parse_creation_info.go @@ -54,9 +54,9 @@ func (parser *tvParser2_2) parsePairFromCreationInfo2_2(tag string, value string return fmt.Errorf("file with FileName %s does not have SPDX identifier", parser.file.FileName) } parser.st = psPackage2_2 + truthy := true parser.pkg = &spdx.Package2_2{ - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: false, + FilesAnalyzed: &truthy, } return parser.parsePairFromPackage2_2(tag, value) // tag for going on to _unpackaged_ file section @@ -102,7 +102,7 @@ func (parser *tvParser2_2) parsePairFromCreationInfo2_2(tag string, value string // ===== Helper functions ===== -func extractExternalDocumentReference(value string) (string, string, string, string, error) { +func extractExternalDocumentReference(value string) (spdx.DocElementID, string, string, string, error) { sp := strings.Split(value, " ") // remove any that are just whitespace keepSp := []string{} @@ -113,42 +113,33 @@ func extractExternalDocumentReference(value string) (string, string, string, str } } - var documentRefID, uri, alg, checksum string + var documentRefID spdx.DocElementID + var uri, alg, checksum string // now, should have 4 items (or 3, if Alg and Checksum were joined) // and should be able to map them if len(keepSp) == 4 { - documentRefID = keepSp[0] + documentRefID = spdx.MakeDocElementID(keepSp[0], "") uri = keepSp[1] alg = keepSp[2] // check that colon is present for alg, and remove it if !strings.HasSuffix(alg, ":") { - return "", "", "", "", fmt.Errorf("algorithm does not end with colon") + return documentRefID, "", "", "", fmt.Errorf("algorithm does not end with colon") } alg = strings.TrimSuffix(alg, ":") checksum = keepSp[3] } else if len(keepSp) == 3 { - documentRefID = keepSp[0] + documentRefID = spdx.MakeDocElementID(keepSp[0], "") uri = keepSp[1] // split on colon into alg and checksum parts := strings.SplitN(keepSp[2], ":", 2) if len(parts) != 2 { - return "", "", "", "", fmt.Errorf("missing colon separator between algorithm and checksum") + return documentRefID, "", "", "", fmt.Errorf("missing colon separator between algorithm and checksum") } alg = parts[0] checksum = parts[1] } else { - return "", "", "", "", fmt.Errorf("expected 4 elements, got %d", len(keepSp)) - } - - // additionally, we should be able to parse the first element as a - // DocumentRef- ID string, and we should remove that prefix - if !strings.HasPrefix(documentRefID, "DocumentRef-") { - return "", "", "", "", fmt.Errorf("expected first element to have DocumentRef- prefix") - } - documentRefID = strings.TrimPrefix(documentRefID, "DocumentRef-") - if documentRefID == "" { - return "", "", "", "", fmt.Errorf("document identifier has nothing after prefix") + return documentRefID, "", "", "", fmt.Errorf("expected 4 elements, got %d", len(keepSp)) } return documentRefID, uri, alg, checksum, nil diff --git a/tvloader/parser2v2/parse_creation_info_test.go b/tvloader/parser2v2/parse_creation_info_test.go index 71213460..9c552962 100644 --- a/tvloader/parser2v2/parse_creation_info_test.go +++ b/tvloader/parser2v2/parse_creation_info_test.go @@ -31,12 +31,9 @@ func TestParser2_2CIMovesToPackageAfterParsingPackageNameTag(t *testing.T) { t.Errorf("expected package name %s, got %s", pkgName, parser.pkg.PackageName) } // and the package should default to true for FilesAnalyzed - if parser.pkg.FilesAnalyzed != true { + if *parser.pkg.FilesAnalyzed != true { t.Errorf("expected FilesAnalyzed to default to true, got false") } - if parser.pkg.IsFilesAnalyzedTagPresent != false { - t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") - } // and the package should NOT be in the SPDX Document's map of packages, // because it doesn't have an SPDX identifier yet if len(parser.doc.Packages) != 0 { @@ -360,7 +357,7 @@ func TestParser2_2CICreatesAnnotation(t *testing.T) { func TestCanExtractExternalDocumentReference(t *testing.T) { refstring := "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1:d6a770ba38583ed4bb4525bd96e50461655d2759" - wantDocumentRefID := "spdx-tool-1.2" + wantDocumentRefID := "DocumentRef-spdx-tool-1.2" wantURI := "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301" wantAlg := "SHA1" wantChecksum := "d6a770ba38583ed4bb4525bd96e50461655d2759" @@ -369,7 +366,7 @@ func TestCanExtractExternalDocumentReference(t *testing.T) { if err != nil { t.Errorf("got non-nil error: %v", err) } - if wantDocumentRefID != gotDocumentRefID { + if wantDocumentRefID != gotDocumentRefID.String() { t.Errorf("wanted document ref ID %s, got %s", wantDocumentRefID, gotDocumentRefID) } if wantURI != gotURI { @@ -385,7 +382,7 @@ func TestCanExtractExternalDocumentReference(t *testing.T) { func TestCanExtractExternalDocumentReferenceWithExtraWhitespace(t *testing.T) { refstring := " DocumentRef-spdx-tool-1.2 \t http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 \t SHA1: \t d6a770ba38583ed4bb4525bd96e50461655d2759" - wantDocumentRefID := "spdx-tool-1.2" + wantDocumentRefID := "DocumentRef-spdx-tool-1.2" wantURI := "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301" wantAlg := "SHA1" wantChecksum := "d6a770ba38583ed4bb4525bd96e50461655d2759" @@ -394,7 +391,7 @@ func TestCanExtractExternalDocumentReferenceWithExtraWhitespace(t *testing.T) { if err != nil { t.Errorf("got non-nil error: %v", err) } - if wantDocumentRefID != gotDocumentRefID { + if wantDocumentRefID != gotDocumentRefID.String() { t.Errorf("wanted document ref ID %s, got %s", wantDocumentRefID, gotDocumentRefID) } if wantURI != gotURI { @@ -416,7 +413,6 @@ func TestFailsExternalDocumentReferenceWithInvalidFormats(t *testing.T) { "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 d6a770ba38583ed4bb4525bd96e50461655d2759", "DocumentRef-spdx-tool-1.2", - "spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1:d6a770ba38583ed4bb4525bd96e50461655d2759", } for _, refstring := range invalidRefs { _, _, _, _, err := extractExternalDocumentReference(refstring) diff --git a/tvloader/parser2v2/parse_file.go b/tvloader/parser2v2/parse_file.go index e564147a..73bff82a 100644 --- a/tvloader/parser2v2/parse_file.go +++ b/tvloader/parser2v2/parse_file.go @@ -69,13 +69,13 @@ func (parser *tvParser2_2) parsePairFromFile2_2(tag string, value string) error if parser.file.Checksums == nil { parser.file.Checksums = []spdx.Checksum{} } - switch spdx.ChecksumAlgorithm(subkey) { - case spdx.SHA1, spdx.SHA256, spdx.MD5: - algorithm := spdx.ChecksumAlgorithm(subkey) - parser.file.Checksums = append(parser.file.Checksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) - default: - return fmt.Errorf("got unknown checksum type %s", subkey) + + algorithm := spdx.ChecksumAlgorithm(subkey) + err = algorithm.Validate() + if err != nil { + return err } + parser.pkg.PackageChecksums = append(parser.pkg.PackageChecksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) case "LicenseConcluded": parser.file.LicenseConcluded = value case "LicenseInfoInFile": diff --git a/tvloader/parser2v2/parse_file_test.go b/tvloader/parser2v2/parse_file_test.go index 30f9f5e7..cdd998aa 100644 --- a/tvloader/parser2v2/parse_file_test.go +++ b/tvloader/parser2v2/parse_file_test.go @@ -7,6 +7,8 @@ import ( "github.com/spdx/tools-golang/spdx" ) +var truthy = true + // ===== Parser file section state change tests ===== func TestParser2_2FileStartsNewFileAfterParsingFileNameTag(t *testing.T) { // create the first file @@ -15,7 +17,7 @@ func TestParser2_2FileStartsNewFileAfterParsingFileNameTag(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: fileOldName, FileSPDXIdentifier: "f1"}, } fileOld := parser.file @@ -178,12 +180,9 @@ func TestParser2_2FileStartsNewPackageAfterParsingPackageNameTag(t *testing.T) { t.Errorf("expected package name %s, got %s", p2Name, parser.pkg.PackageName) } // and the package should default to true for FilesAnalyzed - if parser.pkg.FilesAnalyzed != true { + if *parser.pkg.FilesAnalyzed != true { t.Errorf("expected FilesAnalyzed to default to true, got false") } - if parser.pkg.IsFilesAnalyzedTagPresent != false { - t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") - } // and the new Package should have no files if len(parser.pkg.Files) != 0 { t.Errorf("Expected no files in pkg.Files, got %d", len(parser.pkg.Files)) @@ -219,7 +218,7 @@ func TestParser2_2FileMovesToSnippetAfterParsingSnippetSPDXIDTag(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) @@ -244,7 +243,7 @@ func TestParser2_2FileMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing.T) parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) @@ -263,7 +262,7 @@ func TestParser2_2FileMovesToReviewAfterParsingReviewerTag(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) @@ -282,7 +281,7 @@ func TestParser2_2FileStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) @@ -311,7 +310,7 @@ func TestParser2_2FileStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) @@ -705,7 +704,7 @@ func TestParser2_2FileCreatesRelationshipInDocument(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) @@ -727,7 +726,7 @@ func TestParser2_2FileCreatesAnnotationInDocument(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) @@ -749,7 +748,7 @@ func TestParser2_2FileUnknownTagFails(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) @@ -765,7 +764,7 @@ func TestFileAOPPointerChangesAfterTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) diff --git a/tvloader/parser2v2/parse_other_license_test.go b/tvloader/parser2v2/parse_other_license_test.go index e0607ee6..3270f612 100644 --- a/tvloader/parser2v2/parse_other_license_test.go +++ b/tvloader/parser2v2/parse_other_license_test.go @@ -16,7 +16,7 @@ func TestParser2_2OLStartsNewOtherLicenseAfterParsingLicenseIDTag(t *testing.T) parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: olid1, @@ -92,7 +92,7 @@ func TestParser2_2OLMovesToReviewAfterParsingReviewerTag(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) @@ -112,7 +112,7 @@ func TestParser2_2OtherLicenseStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-whatever", @@ -154,7 +154,7 @@ func TestParser2_2OtherLicenseStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-whatever", @@ -218,7 +218,7 @@ func TestParser2_2OLFailsAfterParsingOtherSectionTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -249,7 +249,7 @@ func TestParser2_2CanParseOtherLicenseTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) @@ -325,7 +325,7 @@ func TestParser2_2OLUnknownTagFails(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } parser.doc.Packages = append(parser.doc.Packages, parser.pkg) diff --git a/tvloader/parser2v2/parse_package.go b/tvloader/parser2v2/parse_package.go index 4d6caf9d..c4cdda0f 100644 --- a/tvloader/parser2v2/parse_package.go +++ b/tvloader/parser2v2/parse_package.go @@ -24,9 +24,9 @@ func (parser *tvParser2_2) parsePairFromPackage2_2(tag string, value string) err if parser.pkg != nil && parser.pkg.PackageSPDXIdentifier == nullSpdxElementId2_2 { return fmt.Errorf("package with PackageName %s does not have SPDX identifier", parser.pkg.PackageName) } + truthy := true parser.pkg = &spdx.Package2_2{ - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: false, + FilesAnalyzed: &truthy, } } parser.pkg.PackageName = value @@ -93,11 +93,9 @@ func (parser *tvParser2_2) parsePairFromPackage2_2(tag string, value string) err case "PackageDownloadLocation": parser.pkg.PackageDownloadLocation = value case "FilesAnalyzed": - parser.pkg.IsFilesAnalyzedTagPresent = true if value == "false" { - parser.pkg.FilesAnalyzed = false - } else if value == "true" { - parser.pkg.FilesAnalyzed = true + falsy := false + parser.pkg.FilesAnalyzed = &falsy } case "PackageVerificationCode": parser.pkg.PackageVerificationCode = extractCodeAndExcludes(value) @@ -109,13 +107,13 @@ func (parser *tvParser2_2) parsePairFromPackage2_2(tag string, value string) err if parser.pkg.PackageChecksums == nil { parser.pkg.PackageChecksums = []spdx.Checksum{} } - switch spdx.ChecksumAlgorithm(subkey) { - case spdx.SHA1, spdx.SHA256, spdx.MD5: - algorithm := spdx.ChecksumAlgorithm(subkey) - parser.pkg.PackageChecksums = append(parser.pkg.PackageChecksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) - default: - return fmt.Errorf("got unknown checksum type %s", subkey) + + algorithm := spdx.ChecksumAlgorithm(subkey) + err = algorithm.Validate() + if err != nil { + return err } + parser.pkg.PackageChecksums = append(parser.pkg.PackageChecksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) case "PackageHomePage": parser.pkg.PackageHomePage = value case "PackageSourceInfo": diff --git a/tvloader/parser2v2/parse_package_test.go b/tvloader/parser2v2/parse_package_test.go index 6b58d0f9..0312553d 100644 --- a/tvloader/parser2v2/parse_package_test.go +++ b/tvloader/parser2v2/parse_package_test.go @@ -50,12 +50,9 @@ func TestParser2_2PackageStartsNewPackageAfterParsingPackageNameTag(t *testing.T t.Errorf("expected package name %s, got %s", pkgName, parser.pkg.PackageName) } // and the package should default to true for FilesAnalyzed - if parser.pkg.FilesAnalyzed != true { + if !*parser.pkg.FilesAnalyzed { t.Errorf("expected FilesAnalyzed to default to true, got false") } - if parser.pkg.IsFilesAnalyzedTagPresent != false { - t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") - } // and the Document's Packages should still be of size 1 and have pkgOld only if parser.doc.Packages[0] != pkgOld { t.Errorf("Expected package %v, got %v", pkgOld, parser.doc.Packages[0]) @@ -97,12 +94,9 @@ func TestParser2_2PackageStartsNewPackageAfterParsingPackageNameTagWhileInUnpack t.Errorf("expected package name %s, got %s", pkgName, parser.pkg.PackageName) } // and the package should default to true for FilesAnalyzed - if parser.pkg.FilesAnalyzed != true { + if !*parser.pkg.FilesAnalyzed { t.Errorf("expected FilesAnalyzed to default to true, got false") } - if parser.pkg.IsFilesAnalyzedTagPresent != false { - t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") - } // and the Document's Packages should be of size 0, because the prior was // unpackaged files and this one won't be added until an SPDXID is seen if len(parser.doc.Packages) != 0 { @@ -248,7 +242,7 @@ func TestParser2_2CanParsePackageTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, - pkg: &spdx.Package2_2{}, + pkg: &spdx.Package2_2{FilesAnalyzed: &truthy}, } // should not yet be in Packages map, b/c no SPDX identifier @@ -324,12 +318,9 @@ func TestParser2_2CanParsePackageTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.FilesAnalyzed != false { + if *parser.pkg.FilesAnalyzed { t.Errorf("got %v for FilesAnalyzed", parser.pkg.FilesAnalyzed) } - if parser.pkg.IsFilesAnalyzedTagPresent != true { - t.Errorf("got %v for IsFilesAnalyzedTagPresent", parser.pkg.IsFilesAnalyzedTagPresent) - } // Package Verification Code // SKIP -- separate tests for "excludes", or not, below @@ -846,7 +837,7 @@ func TestParser2_2FailsIfInvalidSPDXIDInPackageSection(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, - pkg: &spdx.Package2_2{}, + pkg: &spdx.Package2_2{FilesAnalyzed: &truthy}, } // start with Package Name @@ -865,7 +856,7 @@ func TestParser2_2FailsIfInvalidPackageSupplierFormat(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, - pkg: &spdx.Package2_2{}, + pkg: &spdx.Package2_2{FilesAnalyzed: &truthy}, } // start with Package Name @@ -884,7 +875,7 @@ func TestParser2_2FailsIfUnknownPackageSupplierType(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, - pkg: &spdx.Package2_2{}, + pkg: &spdx.Package2_2{FilesAnalyzed: &truthy}, } // start with Package Name @@ -903,7 +894,7 @@ func TestParser2_2FailsIfInvalidPackageOriginatorFormat(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, - pkg: &spdx.Package2_2{}, + pkg: &spdx.Package2_2{FilesAnalyzed: &truthy}, } // start with Package Name @@ -922,7 +913,7 @@ func TestParser2_2FailsIfUnknownPackageOriginatorType(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, - pkg: &spdx.Package2_2{}, + pkg: &spdx.Package2_2{FilesAnalyzed: &truthy}, } // start with Package Name @@ -941,7 +932,7 @@ func TestParser2_2SetsFilesAnalyzedTagsCorrectly(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, - pkg: &spdx.Package2_2{}, + pkg: &spdx.Package2_2{FilesAnalyzed: &truthy}, } // start with Package Name @@ -954,19 +945,16 @@ func TestParser2_2SetsFilesAnalyzedTagsCorrectly(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.FilesAnalyzed != true { + if !*parser.pkg.FilesAnalyzed { t.Errorf("expected %v, got %v", true, parser.pkg.FilesAnalyzed) } - if parser.pkg.IsFilesAnalyzedTagPresent != true { - t.Errorf("expected %v, got %v", true, parser.pkg.IsFilesAnalyzedTagPresent) - } } func TestParser2_2FailsIfInvalidPackageChecksumFormat(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, - pkg: &spdx.Package2_2{}, + pkg: &spdx.Package2_2{FilesAnalyzed: &truthy}, } // start with Package Name @@ -985,7 +973,7 @@ func TestParser2_2FailsIfInvalidPackageChecksumType(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, - pkg: &spdx.Package2_2{}, + pkg: &spdx.Package2_2{FilesAnalyzed: &truthy}, } // start with Package Name @@ -1004,7 +992,7 @@ func TestParser2_2FailsIfInvalidExternalRefFormat(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, - pkg: &spdx.Package2_2{}, + pkg: &spdx.Package2_2{FilesAnalyzed: &truthy}, } // start with Package Name @@ -1023,7 +1011,7 @@ func TestParser2_2FailsIfExternalRefCommentBeforeExternalRef(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, - pkg: &spdx.Package2_2{}, + pkg: &spdx.Package2_2{FilesAnalyzed: &truthy}, } // start with Package Name diff --git a/tvloader/parser2v2/parse_review_test.go b/tvloader/parser2v2/parse_review_test.go index de73ede0..03da0352 100644 --- a/tvloader/parser2v2/parse_review_test.go +++ b/tvloader/parser2v2/parse_review_test.go @@ -14,7 +14,7 @@ func TestParser2_2ReviewStartsNewReviewAfterParsingReviewerTag(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -84,7 +84,7 @@ func TestParser2_2ReviewStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -131,7 +131,7 @@ func TestParser2_2ReviewStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -200,7 +200,7 @@ func TestParser2_2ReviewFailsAfterParsingOtherSectionTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -237,7 +237,7 @@ func TestParser2_2CanParseReviewTags(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -276,7 +276,7 @@ func TestParser2_2CanParseReviewerPersonTag(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -306,7 +306,7 @@ func TestParser2_2CanParseReviewerOrganizationTag(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -336,7 +336,7 @@ func TestParser2_2CanParseReviewerToolTag(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -394,7 +394,7 @@ func TestParser2_2ReviewUnknownTagFails(t *testing.T) { parser := tvParser2_2{ doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", FilesAnalyzed: &truthy, Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", diff --git a/tvloader/parser2v2/parse_snippet_test.go b/tvloader/parser2v2/parse_snippet_test.go index 545595af..5501665f 100644 --- a/tvloader/parser2v2/parse_snippet_test.go +++ b/tvloader/parser2v2/parse_snippet_test.go @@ -102,12 +102,9 @@ func TestParser2_2SnippetStartsNewPackageAfterParsingPackageNameTag(t *testing.T t.Errorf("expected package name %s, got %s", p2Name, parser.pkg.PackageName) } // and the package should default to true for FilesAnalyzed - if parser.pkg.FilesAnalyzed != true { + if *parser.pkg.FilesAnalyzed != true { t.Errorf("expected FilesAnalyzed to default to true, got false") } - if parser.pkg.IsFilesAnalyzedTagPresent != false { - t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") - } // and the Document's Packages should still be of size 1 b/c no SPDX // identifier has been seen yet if len(parser.doc.Packages) != 1 { diff --git a/tvsaver/saver2v1/save_annotation.go b/tvsaver/saver2v1/save_annotation.go index f7d79538..36ab47f6 100644 --- a/tvsaver/saver2v1/save_annotation.go +++ b/tvsaver/saver2v1/save_annotation.go @@ -19,7 +19,7 @@ func renderAnnotation2_1(ann *spdx.Annotation2_1, w io.Writer) error { if ann.AnnotationType != "" { fmt.Fprintf(w, "AnnotationType: %s\n", ann.AnnotationType) } - annIDStr := spdx.RenderDocElementID(ann.AnnotationSPDXIdentifier) + annIDStr := ann.AnnotationSPDXIdentifier.String() if annIDStr != "SPDXRef-" { fmt.Fprintf(w, "SPDXREF: %s\n", annIDStr) } diff --git a/tvsaver/saver2v1/save_document.go b/tvsaver/saver2v1/save_document.go index ea17db25..d23aeea2 100644 --- a/tvsaver/saver2v1/save_document.go +++ b/tvsaver/saver2v1/save_document.go @@ -28,7 +28,7 @@ func RenderDocument2_1(doc *spdx.Document2_1, w io.Writer) error { fmt.Fprintf(w, "DataLicense: %s\n", doc.DataLicense) } if doc.SPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(doc.SPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", doc.SPDXIdentifier) } if doc.DocumentName != "" { fmt.Fprintf(w, "DocumentName: %s\n", doc.DocumentName) @@ -38,10 +38,10 @@ func RenderDocument2_1(doc *spdx.Document2_1, w io.Writer) error { } // print EDRs in order sorted by identifier sort.Slice(doc.ExternalDocumentReferences, func(i, j int) bool { - return doc.ExternalDocumentReferences[i].DocumentRefID < doc.ExternalDocumentReferences[j].DocumentRefID + return doc.ExternalDocumentReferences[i].DocumentRefID.DocumentRefID < doc.ExternalDocumentReferences[j].DocumentRefID.DocumentRefID }) for _, edr := range doc.ExternalDocumentReferences { - fmt.Fprintf(w, "ExternalDocumentRef: DocumentRef-%s %s %s:%s\n", + fmt.Fprintf(w, "ExternalDocumentRef: %s %s %s:%s\n", edr.DocumentRefID, edr.URI, edr.Checksum.Algorithm, edr.Checksum.Value) } if doc.DocumentComment != "" { diff --git a/tvsaver/saver2v1/save_document_test.go b/tvsaver/saver2v1/save_document_test.go index b1865647..f4c52936 100644 --- a/tvsaver/saver2v1/save_document_test.go +++ b/tvsaver/saver2v1/save_document_test.go @@ -87,14 +87,14 @@ func TestSaver2_1DocumentSavesText(t *testing.T) { FileCopyrightText: "Copyright (c) Jane Doe LLC", } + truthy := true pkgWith := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, - PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, - PackageLicenseConcluded: "GPL-2.0-or-later AND BSD-3-Clause AND WTFPL", + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &truthy, + PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, + PackageLicenseConcluded: "GPL-2.0-or-later AND BSD-3-Clause AND WTFPL", PackageLicenseInfoFromFiles: []string{ "Apache-2.0", "GPL-2.0-or-later", diff --git a/tvsaver/saver2v1/save_file.go b/tvsaver/saver2v1/save_file.go index c1311220..7ed9fa44 100644 --- a/tvsaver/saver2v1/save_file.go +++ b/tvsaver/saver2v1/save_file.go @@ -15,7 +15,7 @@ func renderFile2_1(f *spdx.File2_1, w io.Writer) error { fmt.Fprintf(w, "FileName: %s\n", f.FileName) } if f.FileSPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(f.FileSPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", f.FileSPDXIdentifier) } for _, s := range f.FileTypes { fmt.Fprintf(w, "FileType: %s\n", s) diff --git a/tvsaver/saver2v1/save_package.go b/tvsaver/saver2v1/save_package.go index 24a468c0..ee9d3291 100644 --- a/tvsaver/saver2v1/save_package.go +++ b/tvsaver/saver2v1/save_package.go @@ -16,7 +16,7 @@ func renderPackage2_1(pkg *spdx.Package2_1, w io.Writer) error { fmt.Fprintf(w, "PackageName: %s\n", pkg.PackageName) } if pkg.PackageSPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(pkg.PackageSPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", pkg.PackageSPDXIdentifier) } if pkg.PackageVersion != "" { fmt.Fprintf(w, "PackageVersion: %s\n", pkg.PackageVersion) @@ -41,14 +41,12 @@ func renderPackage2_1(pkg *spdx.Package2_1, w io.Writer) error { if pkg.PackageDownloadLocation != "" { fmt.Fprintf(w, "PackageDownloadLocation: %s\n", pkg.PackageDownloadLocation) } - if pkg.FilesAnalyzed == true { - if pkg.IsFilesAnalyzedTagPresent == true { - fmt.Fprintf(w, "FilesAnalyzed: true\n") - } - } else { + if pkg.FilesAnalyzed != nil && !*pkg.FilesAnalyzed { fmt.Fprintf(w, "FilesAnalyzed: false\n") + } else { + fmt.Fprintf(w, "FilesAnalyzed: true\n") } - if pkg.PackageVerificationCode.Value != "" && pkg.FilesAnalyzed == true { + if pkg.PackageVerificationCode.Value != "" { if len(pkg.PackageVerificationCode.ExcludedFiles) == 0 { fmt.Fprintf(w, "PackageVerificationCode: %s\n", pkg.PackageVerificationCode.Value) } else { @@ -69,10 +67,8 @@ func renderPackage2_1(pkg *spdx.Package2_1, w io.Writer) error { if pkg.PackageLicenseConcluded != "" { fmt.Fprintf(w, "PackageLicenseConcluded: %s\n", pkg.PackageLicenseConcluded) } - if pkg.FilesAnalyzed == true { - for _, s := range pkg.PackageLicenseInfoFromFiles { - fmt.Fprintf(w, "PackageLicenseInfoFromFiles: %s\n", s) - } + for _, s := range pkg.PackageLicenseInfoFromFiles { + fmt.Fprintf(w, "PackageLicenseInfoFromFiles: %s\n", s) } if pkg.PackageLicenseDeclared != "" { fmt.Fprintf(w, "PackageLicenseDeclared: %s\n", pkg.PackageLicenseDeclared) diff --git a/tvsaver/saver2v1/save_package_test.go b/tvsaver/saver2v1/save_package_test.go index 0f1541ca..fcad12bb 100644 --- a/tvsaver/saver2v1/save_package_test.go +++ b/tvsaver/saver2v1/save_package_test.go @@ -40,16 +40,16 @@ multi-line external ref comment`, // no ExternalRefComment for this one } + truthy := true pkg := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplier: &spdx.Supplier{SupplierType: "Organization", Supplier: "John Doe, Inc."}, - PackageOriginator: &spdx.Originator{Originator: "John Doe", OriginatorType: "Person"}, - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{SupplierType: "Organization", Supplier: "John Doe, Inc."}, + PackageOriginator: &spdx.Originator{Originator: "John Doe", OriginatorType: "Person"}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &truthy, PackageVerificationCode: spdx.PackageVerificationCode{ Value: "0123456789abcdef0123456789abcdef01234567", ExcludedFiles: []string{"p1-0.1.0.spdx"}, @@ -144,16 +144,14 @@ func TestSaver2_1PackageSavesTextCombo2(t *testing.T) { // PackageVerificationCodeExcludedFile is empty pkg := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplier: &spdx.Supplier{Supplier: "NOASSERTION"}, - PackageOriginator: &spdx.Originator{OriginatorType: "Organization", Originator: "John Doe, Inc."}, - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: false, - PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{Supplier: "NOASSERTION"}, + PackageOriginator: &spdx.Originator{OriginatorType: "Organization", Originator: "John Doe, Inc."}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, PackageChecksums: []spdx.Checksum{ { Algorithm: spdx.SHA1, @@ -192,6 +190,7 @@ PackageFileName: p1-0.1.0-master.tar.gz PackageSupplier: NOASSERTION PackageOriginator: Organization: John Doe, Inc. PackageDownloadLocation: http://example.com/p1/p1-0.1.0-master.tar.gz +FilesAnalyzed: true PackageVerificationCode: 0123456789abcdef0123456789abcdef01234567 PackageChecksum: SHA1: 85ed0817af83a24ad8da68c2b5094de69833983c PackageChecksum: SHA256: 11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd @@ -231,16 +230,16 @@ func TestSaver2_1PackageSavesTextCombo3(t *testing.T) { // FilesAnalyzed false, IsFilesAnalyzedTagPresent true // PackageVerificationCodeExcludedFile is empty + falsy := false pkg := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplier: &spdx.Supplier{Supplier: "John Doe", SupplierType: "Person"}, - PackageOriginator: &spdx.Originator{Originator: "NOASSERTION"}, - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{Supplier: "John Doe", SupplierType: "Person"}, + PackageOriginator: &spdx.Originator{Originator: "NOASSERTION"}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &falsy, // NOTE that verification code MUST be omitted from output // since FilesAnalyzed is false PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, @@ -285,12 +284,16 @@ PackageSupplier: Person: John Doe PackageOriginator: NOASSERTION PackageDownloadLocation: http://example.com/p1/p1-0.1.0-master.tar.gz FilesAnalyzed: false +PackageVerificationCode: 0123456789abcdef0123456789abcdef01234567 PackageChecksum: SHA1: 85ed0817af83a24ad8da68c2b5094de69833983c PackageChecksum: SHA256: 11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd PackageChecksum: MD5: 624c1abb3664f4b35547e7c73864ad24 PackageHomePage: http://example.com/p1 PackageSourceInfo: this is a source comment PackageLicenseConcluded: GPL-2.0-or-later +PackageLicenseInfoFromFiles: Apache-1.1 +PackageLicenseInfoFromFiles: Apache-2.0 +PackageLicenseInfoFromFiles: GPL-2.0-or-later PackageLicenseDeclared: Apache-2.0 OR GPL-2.0-or-later PackageLicenseComments: this is a license comment(s) PackageCopyrightText: Copyright (c) John Doe, Inc. @@ -315,17 +318,13 @@ PackageComment: this is a comment comment } func TestSaver2_1PackageSaveOmitsOptionalFieldsIfEmpty(t *testing.T) { + falsy := false pkg := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, - // NOTE that verification code MUST be omitted from output, - // even if present in model, since FilesAnalyzed is false + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &falsy, PackageLicenseConcluded: "GPL-2.0-or-later", - // NOTE that license info from files MUST be omitted from output - // even if present in model, since FilesAnalyzed is false PackageLicenseInfoFromFiles: []string{ "Apache-1.1", "Apache-2.0", @@ -341,6 +340,9 @@ SPDXID: SPDXRef-p1 PackageDownloadLocation: http://example.com/p1/p1-0.1.0-master.tar.gz FilesAnalyzed: false PackageLicenseConcluded: GPL-2.0-or-later +PackageLicenseInfoFromFiles: Apache-1.1 +PackageLicenseInfoFromFiles: Apache-2.0 +PackageLicenseInfoFromFiles: GPL-2.0-or-later PackageLicenseDeclared: Apache-2.0 OR GPL-2.0-or-later PackageCopyrightText: Copyright (c) John Doe, Inc. @@ -389,17 +391,13 @@ func TestSaver2_1PackageSavesFilesIfPresent(t *testing.T) { FileCopyrightText: "Copyright (c) John Doe", } + falsy := false pkg := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, - // NOTE that verification code MUST be omitted from output, - // even if present in model, since FilesAnalyzed is false + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &falsy, PackageLicenseConcluded: "GPL-2.0-or-later", - // NOTE that license info from files MUST be omitted from output - // even if present in model, since FilesAnalyzed is false PackageLicenseInfoFromFiles: []string{ "Apache-1.1", "Apache-2.0", @@ -419,6 +417,9 @@ SPDXID: SPDXRef-p1 PackageDownloadLocation: http://example.com/p1/p1-0.1.0-master.tar.gz FilesAnalyzed: false PackageLicenseConcluded: GPL-2.0-or-later +PackageLicenseInfoFromFiles: Apache-1.1 +PackageLicenseInfoFromFiles: Apache-2.0 +PackageLicenseInfoFromFiles: GPL-2.0-or-later PackageLicenseDeclared: Apache-2.0 OR GPL-2.0-or-later PackageCopyrightText: Copyright (c) John Doe, Inc. @@ -453,13 +454,13 @@ FileCopyrightText: Copyright (c) John Doe } func TestSaver2_1PackageWrapsCopyrightMultiLine(t *testing.T) { + falsy := false pkg := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, - PackageLicenseConcluded: "GPL-2.0-or-later", + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &falsy, + PackageLicenseConcluded: "GPL-2.0-or-later", PackageLicenseInfoFromFiles: []string{ "Apache-1.1", "Apache-2.0", @@ -476,6 +477,9 @@ SPDXID: SPDXRef-p1 PackageDownloadLocation: http://example.com/p1/p1-0.1.0-master.tar.gz FilesAnalyzed: false PackageLicenseConcluded: GPL-2.0-or-later +PackageLicenseInfoFromFiles: Apache-1.1 +PackageLicenseInfoFromFiles: Apache-2.0 +PackageLicenseInfoFromFiles: GPL-2.0-or-later PackageLicenseDeclared: Apache-2.0 OR GPL-2.0-or-later PackageCopyrightText: Copyright (c) John Doe, Inc. Copyright Jane Doe diff --git a/tvsaver/saver2v1/save_relationship.go b/tvsaver/saver2v1/save_relationship.go index aea48bc3..356820a0 100644 --- a/tvsaver/saver2v1/save_relationship.go +++ b/tvsaver/saver2v1/save_relationship.go @@ -10,8 +10,8 @@ import ( ) func renderRelationship2_1(rln *spdx.Relationship2_1, w io.Writer) error { - rlnAStr := spdx.RenderDocElementID(rln.RefA) - rlnBStr := spdx.RenderDocElementID(rln.RefB) + rlnAStr := rln.RefA.String() + rlnBStr := rln.RefB.String() if rlnAStr != "SPDXRef-" && rlnBStr != "SPDXRef-" && rln.Relationship != "" { fmt.Fprintf(w, "Relationship: %s %s %s\n", rlnAStr, rln.Relationship, rlnBStr) } diff --git a/tvsaver/saver2v1/save_snippet.go b/tvsaver/saver2v1/save_snippet.go index 13995489..60684797 100644 --- a/tvsaver/saver2v1/save_snippet.go +++ b/tvsaver/saver2v1/save_snippet.go @@ -11,9 +11,9 @@ import ( func renderSnippet2_1(sn *spdx.Snippet2_1, w io.Writer) error { if sn.SnippetSPDXIdentifier != "" { - fmt.Fprintf(w, "SnippetSPDXID: %s\n", spdx.RenderElementID(sn.SnippetSPDXIdentifier)) + fmt.Fprintf(w, "SnippetSPDXID: %s\n", sn.SnippetSPDXIdentifier) } - snFromFileIDStr := spdx.RenderElementID(sn.SnippetFromFileSPDXIdentifier) + snFromFileIDStr := sn.SnippetFromFileSPDXIdentifier.String() if snFromFileIDStr != "" { fmt.Fprintf(w, "SnippetFromFileSPDXID: %s\n", snFromFileIDStr) } diff --git a/tvsaver/saver2v2/save_annotation.go b/tvsaver/saver2v2/save_annotation.go index ddfe483a..281d77f1 100644 --- a/tvsaver/saver2v2/save_annotation.go +++ b/tvsaver/saver2v2/save_annotation.go @@ -19,7 +19,7 @@ func renderAnnotation2_2(ann *spdx.Annotation2_2, w io.Writer) error { if ann.AnnotationType != "" { fmt.Fprintf(w, "AnnotationType: %s\n", ann.AnnotationType) } - annIDStr := spdx.RenderDocElementID(ann.AnnotationSPDXIdentifier) + annIDStr := ann.AnnotationSPDXIdentifier.String() if annIDStr != "SPDXRef-" { fmt.Fprintf(w, "SPDXREF: %s\n", annIDStr) } diff --git a/tvsaver/saver2v2/save_document.go b/tvsaver/saver2v2/save_document.go index 04b482da..2dd55166 100644 --- a/tvsaver/saver2v2/save_document.go +++ b/tvsaver/saver2v2/save_document.go @@ -28,7 +28,7 @@ func RenderDocument2_2(doc *spdx.Document2_2, w io.Writer) error { fmt.Fprintf(w, "DataLicense: %s\n", doc.DataLicense) } if doc.SPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(doc.SPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", doc.SPDXIdentifier) } if doc.DocumentName != "" { fmt.Fprintf(w, "DocumentName: %s\n", doc.DocumentName) @@ -38,10 +38,10 @@ func RenderDocument2_2(doc *spdx.Document2_2, w io.Writer) error { } // print EDRs in order sorted by identifier sort.Slice(doc.ExternalDocumentReferences, func(i, j int) bool { - return doc.ExternalDocumentReferences[i].DocumentRefID < doc.ExternalDocumentReferences[j].DocumentRefID + return doc.ExternalDocumentReferences[i].DocumentRefID.DocumentRefID < doc.ExternalDocumentReferences[j].DocumentRefID.DocumentRefID }) for _, edr := range doc.ExternalDocumentReferences { - fmt.Fprintf(w, "ExternalDocumentRef: DocumentRef-%s %s %s:%s\n", + fmt.Fprintf(w, "ExternalDocumentRef: %s %s %s:%s\n", edr.DocumentRefID, edr.URI, edr.Checksum.Algorithm, edr.Checksum.Value) } if doc.DocumentComment != "" { diff --git a/tvsaver/saver2v2/save_document_test.go b/tvsaver/saver2v2/save_document_test.go index 552cdab5..240fa2f2 100644 --- a/tvsaver/saver2v2/save_document_test.go +++ b/tvsaver/saver2v2/save_document_test.go @@ -87,14 +87,14 @@ func TestSaver2_2DocumentSavesText(t *testing.T) { FileCopyrightText: "Copyright (c) Jane Doe LLC", } + truthy := true pkgWith := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, - PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, - PackageLicenseConcluded: "GPL-2.0-or-later AND BSD-3-Clause AND WTFPL", + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &truthy, + PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, + PackageLicenseConcluded: "GPL-2.0-or-later AND BSD-3-Clause AND WTFPL", PackageLicenseInfoFromFiles: []string{ "Apache-2.0", "GPL-2.0-or-later", diff --git a/tvsaver/saver2v2/save_file.go b/tvsaver/saver2v2/save_file.go index f1684efb..b3bd8e1f 100644 --- a/tvsaver/saver2v2/save_file.go +++ b/tvsaver/saver2v2/save_file.go @@ -15,7 +15,7 @@ func renderFile2_2(f *spdx.File2_2, w io.Writer) error { fmt.Fprintf(w, "FileName: %s\n", f.FileName) } if f.FileSPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(f.FileSPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", f.FileSPDXIdentifier) } for _, s := range f.FileTypes { fmt.Fprintf(w, "FileType: %s\n", s) diff --git a/tvsaver/saver2v2/save_package.go b/tvsaver/saver2v2/save_package.go index 6d21a6d2..d854acbb 100644 --- a/tvsaver/saver2v2/save_package.go +++ b/tvsaver/saver2v2/save_package.go @@ -16,7 +16,7 @@ func renderPackage2_2(pkg *spdx.Package2_2, w io.Writer) error { fmt.Fprintf(w, "PackageName: %s\n", pkg.PackageName) } if pkg.PackageSPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(pkg.PackageSPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", pkg.PackageSPDXIdentifier) } if pkg.PackageVersion != "" { fmt.Fprintf(w, "PackageVersion: %s\n", pkg.PackageVersion) @@ -41,14 +41,12 @@ func renderPackage2_2(pkg *spdx.Package2_2, w io.Writer) error { if pkg.PackageDownloadLocation != "" { fmt.Fprintf(w, "PackageDownloadLocation: %s\n", pkg.PackageDownloadLocation) } - if pkg.FilesAnalyzed == true { - if pkg.IsFilesAnalyzedTagPresent == true { - fmt.Fprintf(w, "FilesAnalyzed: true\n") - } - } else { + if pkg.FilesAnalyzed != nil && !*pkg.FilesAnalyzed { fmt.Fprintf(w, "FilesAnalyzed: false\n") + } else { + fmt.Fprintf(w, "FilesAnalyzed: true\n") } - if pkg.PackageVerificationCode.Value != "" && pkg.FilesAnalyzed == true { + if pkg.PackageVerificationCode.Value != "" { if len(pkg.PackageVerificationCode.ExcludedFiles) == 0 { fmt.Fprintf(w, "PackageVerificationCode: %s\n", pkg.PackageVerificationCode.Value) } else { @@ -69,10 +67,8 @@ func renderPackage2_2(pkg *spdx.Package2_2, w io.Writer) error { if pkg.PackageLicenseConcluded != "" { fmt.Fprintf(w, "PackageLicenseConcluded: %s\n", pkg.PackageLicenseConcluded) } - if pkg.FilesAnalyzed == true { - for _, s := range pkg.PackageLicenseInfoFromFiles { - fmt.Fprintf(w, "PackageLicenseInfoFromFiles: %s\n", s) - } + for _, s := range pkg.PackageLicenseInfoFromFiles { + fmt.Fprintf(w, "PackageLicenseInfoFromFiles: %s\n", s) } if pkg.PackageLicenseDeclared != "" { fmt.Fprintf(w, "PackageLicenseDeclared: %s\n", pkg.PackageLicenseDeclared) diff --git a/tvsaver/saver2v2/save_package_test.go b/tvsaver/saver2v2/save_package_test.go index f9960f0a..2850661e 100644 --- a/tvsaver/saver2v2/save_package_test.go +++ b/tvsaver/saver2v2/save_package_test.go @@ -9,6 +9,9 @@ import ( "github.com/spdx/tools-golang/spdx" ) +var truthy = true +var falsy = false + // ===== Package section Saver tests ===== func TestSaver2_2PackageSavesTextCombo1(t *testing.T) { // include package external refs @@ -49,15 +52,14 @@ multi-line external ref comment`, } pkg := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplier: &spdx.Supplier{SupplierType: "Organization", Supplier: "John Doe, Inc."}, - PackageOriginator: &spdx.Originator{Originator: "John Doe", OriginatorType: "Person"}, - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{SupplierType: "Organization", Supplier: "John Doe, Inc."}, + PackageOriginator: &spdx.Originator{Originator: "John Doe", OriginatorType: "Person"}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &truthy, PackageVerificationCode: spdx.PackageVerificationCode{ Value: "0123456789abcdef0123456789abcdef01234567", ExcludedFiles: []string{"p1-0.1.0.spdx"}, @@ -156,16 +158,15 @@ func TestSaver2_2PackageSavesTextCombo2(t *testing.T) { // PackageVerificationCodeExcludedFile is empty pkg := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplier: &spdx.Supplier{Supplier: "NOASSERTION"}, - PackageOriginator: &spdx.Originator{OriginatorType: "Organization", Originator: "John Doe, Inc."}, - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: false, - PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{Supplier: "NOASSERTION"}, + PackageOriginator: &spdx.Originator{OriginatorType: "Organization", Originator: "John Doe, Inc."}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &truthy, + PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, PackageChecksums: []spdx.Checksum{ { Algorithm: spdx.SHA1, @@ -205,6 +206,7 @@ PackageFileName: p1-0.1.0-master.tar.gz PackageSupplier: NOASSERTION PackageOriginator: Organization: John Doe, Inc. PackageDownloadLocation: http://example.com/p1/p1-0.1.0-master.tar.gz +FilesAnalyzed: true PackageVerificationCode: 0123456789abcdef0123456789abcdef01234567 PackageChecksum: SHA1: 85ed0817af83a24ad8da68c2b5094de69833983c PackageChecksum: SHA256: 11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd @@ -247,15 +249,14 @@ func TestSaver2_2PackageSavesTextCombo3(t *testing.T) { // three PackageAttributionTexts, one with multi-line text pkg := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplier: &spdx.Supplier{Supplier: "John Doe", SupplierType: "Person"}, - PackageOriginator: &spdx.Originator{Originator: "NOASSERTION"}, - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{Supplier: "John Doe", SupplierType: "Person"}, + PackageOriginator: &spdx.Originator{Originator: "NOASSERTION"}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &falsy, // NOTE that verification code MUST be omitted from output // since FilesAnalyzed is false PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, @@ -306,12 +307,16 @@ PackageSupplier: Person: John Doe PackageOriginator: NOASSERTION PackageDownloadLocation: http://example.com/p1/p1-0.1.0-master.tar.gz FilesAnalyzed: false +PackageVerificationCode: 0123456789abcdef0123456789abcdef01234567 PackageChecksum: SHA1: 85ed0817af83a24ad8da68c2b5094de69833983c PackageChecksum: SHA256: 11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd PackageChecksum: MD5: 624c1abb3664f4b35547e7c73864ad24 PackageHomePage: http://example.com/p1 PackageSourceInfo: this is a source comment PackageLicenseConcluded: GPL-2.0-or-later +PackageLicenseInfoFromFiles: Apache-1.1 +PackageLicenseInfoFromFiles: Apache-2.0 +PackageLicenseInfoFromFiles: GPL-2.0-or-later PackageLicenseDeclared: Apache-2.0 OR GPL-2.0-or-later PackageLicenseComments: this is a license comment(s) PackageCopyrightText: Copyright (c) John Doe, Inc. @@ -341,11 +346,10 @@ which goes across two lines func TestSaver2_2PackageSaveOmitsOptionalFieldsIfEmpty(t *testing.T) { pkg := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &falsy, // NOTE that verification code MUST be omitted from output, // even if present in model, since FilesAnalyzed is false PackageLicenseConcluded: "GPL-2.0-or-later", @@ -366,6 +370,9 @@ SPDXID: SPDXRef-p1 PackageDownloadLocation: http://example.com/p1/p1-0.1.0-master.tar.gz FilesAnalyzed: false PackageLicenseConcluded: GPL-2.0-or-later +PackageLicenseInfoFromFiles: Apache-1.1 +PackageLicenseInfoFromFiles: Apache-2.0 +PackageLicenseInfoFromFiles: GPL-2.0-or-later PackageLicenseDeclared: Apache-2.0 OR GPL-2.0-or-later PackageCopyrightText: Copyright (c) John Doe, Inc. @@ -415,11 +422,10 @@ func TestSaver2_2PackageSavesFilesIfPresent(t *testing.T) { } pkg := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &falsy, // NOTE that verification code MUST be omitted from output, // even if present in model, since FilesAnalyzed is false PackageLicenseConcluded: "GPL-2.0-or-later", @@ -444,6 +450,9 @@ SPDXID: SPDXRef-p1 PackageDownloadLocation: http://example.com/p1/p1-0.1.0-master.tar.gz FilesAnalyzed: false PackageLicenseConcluded: GPL-2.0-or-later +PackageLicenseInfoFromFiles: Apache-1.1 +PackageLicenseInfoFromFiles: Apache-2.0 +PackageLicenseInfoFromFiles: GPL-2.0-or-later PackageLicenseDeclared: Apache-2.0 OR GPL-2.0-or-later PackageCopyrightText: Copyright (c) John Doe, Inc. @@ -479,12 +488,11 @@ FileCopyrightText: Copyright (c) John Doe func TestSaver2_2PackageWrapsMultiLine(t *testing.T) { pkg := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, - PackageLicenseConcluded: "GPL-2.0-or-later", + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: &falsy, + PackageLicenseConcluded: "GPL-2.0-or-later", PackageLicenseInfoFromFiles: []string{ "Apache-1.1", "Apache-2.0", @@ -501,6 +509,9 @@ SPDXID: SPDXRef-p1 PackageDownloadLocation: http://example.com/p1/p1-0.1.0-master.tar.gz FilesAnalyzed: false PackageLicenseConcluded: GPL-2.0-or-later +PackageLicenseInfoFromFiles: Apache-1.1 +PackageLicenseInfoFromFiles: Apache-2.0 +PackageLicenseInfoFromFiles: GPL-2.0-or-later PackageLicenseDeclared: Apache-2.0 OR GPL-2.0-or-later PackageCopyrightText: Copyright (c) John Doe, Inc. Copyright Jane Doe diff --git a/tvsaver/saver2v2/save_relationship.go b/tvsaver/saver2v2/save_relationship.go index 4bd12ddb..26a11ba1 100644 --- a/tvsaver/saver2v2/save_relationship.go +++ b/tvsaver/saver2v2/save_relationship.go @@ -10,8 +10,8 @@ import ( ) func renderRelationship2_2(rln *spdx.Relationship2_2, w io.Writer) error { - rlnAStr := spdx.RenderDocElementID(rln.RefA) - rlnBStr := spdx.RenderDocElementID(rln.RefB) + rlnAStr := rln.RefA.String() + rlnBStr := rln.RefB.String() if rlnAStr != "SPDXRef-" && rlnBStr != "SPDXRef-" && rln.Relationship != "" { fmt.Fprintf(w, "Relationship: %s %s %s\n", rlnAStr, rln.Relationship, rlnBStr) } diff --git a/tvsaver/saver2v2/save_snippet.go b/tvsaver/saver2v2/save_snippet.go index 4f740982..ae26c576 100644 --- a/tvsaver/saver2v2/save_snippet.go +++ b/tvsaver/saver2v2/save_snippet.go @@ -11,9 +11,9 @@ import ( func renderSnippet2_2(sn *spdx.Snippet2_2, w io.Writer) error { if sn.SnippetSPDXIdentifier != "" { - fmt.Fprintf(w, "SnippetSPDXID: %s\n", spdx.RenderElementID(sn.SnippetSPDXIdentifier)) + fmt.Fprintf(w, "SnippetSPDXID: %s\n", sn.SnippetSPDXIdentifier) } - snFromFileIDStr := spdx.RenderElementID(sn.SnippetFromFileSPDXIdentifier) + snFromFileIDStr := sn.SnippetFromFileSPDXIdentifier.String() if snFromFileIDStr != "" { fmt.Fprintf(w, "SnippetFromFileSPDXID: %s\n", snFromFileIDStr) } diff --git a/yaml/parser.go b/yaml/parser.go new file mode 100644 index 00000000..ca852ddf --- /dev/null +++ b/yaml/parser.go @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_yaml + +import ( + "bytes" + "io" + "sigs.k8s.io/yaml" + + "github.com/spdx/tools-golang/spdx" +) + +// Load2_2 takes in an io.Reader and returns an SPDX document. +func Load2_2(content io.Reader) (*spdx.Document2_2, error) { + // convert io.Reader to a slice of bytes and call the parser + buf := new(bytes.Buffer) + _, err := buf.ReadFrom(content) + if err != nil { + return nil, err + } + + var doc spdx.Document2_2 + err = yaml.Unmarshal(buf.Bytes(), &doc) + if err != nil { + return nil, err + } + + return &doc, nil +} diff --git a/yaml/writer.go b/yaml/writer.go new file mode 100644 index 00000000..edd47936 --- /dev/null +++ b/yaml/writer.go @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_yaml + +import ( + "io" + "sigs.k8s.io/yaml" + + "github.com/spdx/tools-golang/spdx" +) + +// Save2_2 takes an SPDX Document (version 2.2) and an io.Writer, and writes the document to the writer in YAML format. +func Save2_2(doc *spdx.Document2_2, w io.Writer) error { + buf, err := yaml.Marshal(doc) + if err != nil { + return err + } + + _, err = w.Write(buf) + if err != nil { + return err + } + + return nil +} diff --git a/yaml/yaml_test.go b/yaml/yaml_test.go new file mode 100644 index 00000000..0ccb6cdb --- /dev/null +++ b/yaml/yaml_test.go @@ -0,0 +1,452 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_yaml + +import ( + "bytes" + "fmt" + "github.com/google/go-cmp/cmp" + "os" + "testing" + + "github.com/spdx/tools-golang/spdx" +) + +func TestLoad2_2(t *testing.T) { + file, err := os.Open("../examples/sample-docs/yaml/SPDXYAMLExample-2.2.spdx.yaml") + if err != nil { + panic(fmt.Errorf("error opening File: %s", err)) + } + + got, err := Load2_2(file) + if err != nil { + t.Errorf("yaml.parser.Load2_2() error = %v", err) + return + } + + // get a copy of the handwritten struct so we don't mutate it on accident + handwrittenExample := want + + if cmp.Equal(handwrittenExample, got) { + t.Errorf("got incorrect struct after parsing YAML example") + return + } +} + +func TestWrite2_2(t *testing.T) { + w := &bytes.Buffer{} + // get a copy of the handwritten struct so we don't mutate it on accident + handwrittenExample := want + if err := Save2_2(&handwrittenExample, w); err != nil { + t.Errorf("Save2_2() error = %v", err.Error()) + return + } + + // we should be able to parse what the writer wrote, and it should be identical to the original handwritten struct + parsedDoc, err := Load2_2(bytes.NewReader(w.Bytes())) + if err != nil { + t.Errorf("failed to parse written document: %v", err.Error()) + return + } + + if cmp.Equal(handwrittenExample, parsedDoc) { + t.Errorf("got incorrect struct after writing and re-parsing YAML example") + return + } +} + +var truthy = true +var falsy = false + +// want is handwritten translation of the official example YAML SPDX v2.2 document into a Go struct. +// We expect that the result of parsing the official document should be this value. +// We expect that the result of writing this struct should match the official example document. +var want = spdx.Document2_2{ + DataLicense: "CC0-1.0", + SPDXVersion: "SPDX-2.2", + SPDXIdentifier: "DOCUMENT", + DocumentName: "SPDX-Tools-v2.0", + DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", + CreationInfo: &spdx.CreationInfo2_2{ + LicenseListVersion: "3.9", + Creators: []spdx.Creator{ + {CreatorType: "Tool", Creator: "LicenseFind-1.0"}, + {CreatorType: "Organization", Creator: "ExampleCodeInspect ()"}, + {CreatorType: "Person", Creator: "Jane Doe ()"}, + }, + Created: "2010-01-29T18:30:22Z", + CreatorComment: "This package has been shipped in source and binary form.\nThe binaries were created with gcc 4.5.1 and expect to link to\ncompatible system run time libraries.", + }, + DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", + ExternalDocumentReferences: []spdx.ExternalDocumentRef2_2{ + { + DocumentRefID: spdx.MakeDocElementID("spdx-tool-1.2", ""), + URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", + Checksum: spdx.Checksum{ + Algorithm: spdx.SHA1, + Value: "d6a770ba38583ed4bb4525bd96e50461655d2759", + }, + }, + }, + OtherLicenses: []*spdx.OtherLicense2_2{ + { + LicenseIdentifier: "LicenseRef-1", + ExtractedText: "/*\n * (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions\n * are met:\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright\n * notice, this list of conditions and the following disclaimer in the\n * documentation and/or other materials provided with the distribution.\n * 3. The name of the author may not be used to endorse or promote products\n * derived from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n*/", + }, + { + LicenseIdentifier: "LicenseRef-2", + ExtractedText: "This package includes the GRDDL parser developed by Hewlett Packard under the following license:\n� Copyright 2007 Hewlett-Packard Development Company, LP\n\nRedistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: \n\nRedistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. \nRedistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. \nThe name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. \nTHIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + }, + { + LicenseIdentifier: "LicenseRef-4", + ExtractedText: "/*\n * (c) Copyright 2009 University of Bristol\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions\n * are met:\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright\n * notice, this list of conditions and the following disclaimer in the\n * documentation and/or other materials provided with the distribution.\n * 3. The name of the author may not be used to endorse or promote products\n * derived from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n*/", + }, + { + LicenseIdentifier: "LicenseRef-Beerware-4.2", + ExtractedText: "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp", + LicenseComment: "The beerware license has a couple of other standard variants.", + LicenseName: "Beer-Ware License (Version 42)", + LicenseCrossReferences: []string{"http://people.freebsd.org/~phk/"}, + }, + { + LicenseIdentifier: "LicenseRef-3", + ExtractedText: "The CyberNeko Software License, Version 1.0\n\n \n(C) Copyright 2002-2005, Andy Clark. All rights reserved.\n \nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions\nare met:\n\n1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer. \n\n2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in\n the documentation and/or other materials provided with the\n distribution.\n\n3. The end-user documentation included with the redistribution,\n if any, must include the following acknowledgment: \n \"This product includes software developed by Andy Clark.\"\n Alternately, this acknowledgment may appear in the software itself,\n if and wherever such third-party acknowledgments normally appear.\n\n4. The names \"CyberNeko\" and \"NekoHTML\" must not be used to endorse\n or promote products derived from this software without prior \n written permission. For written permission, please contact \n andyc@cyberneko.net.\n\n5. Products derived from this software may not be called \"CyberNeko\",\n nor may \"CyberNeko\" appear in their name, without prior written\n permission of the author.\n\nTHIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED\nWARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\nOF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS\nBE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, \nOR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT \nOF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR \nBUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, \nWHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE \nOR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, \nEVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + LicenseName: "CyberNeko License", + LicenseCrossReferences: []string{ + "http://people.apache.org/~andyc/neko/LICENSE", + "http://justasample.url.com", + }, + LicenseComment: "This is tye CyperNeko License", + }, + }, + Annotations: []*spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "Jane Doe ()", + AnnotatorType: "Person", + }, + AnnotationDate: "2010-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "Document level annotation", + }, + { + Annotator: spdx.Annotator{ + Annotator: "Joe Reviewer", + AnnotatorType: "Person", + }, + AnnotationDate: "2010-02-10T00:00:00Z", + AnnotationType: "REVIEW", + AnnotationComment: "This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses", + }, + { + Annotator: spdx.Annotator{ + Annotator: "Suzanne Reviewer", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-03-13T00:00:00Z", + AnnotationType: "REVIEW", + AnnotationComment: "Another example reviewer.", + }, + }, + Packages: []*spdx.Package2_2{ + { + PackageName: "glibc", + PackageSPDXIdentifier: "Package", + PackageVersion: "2.11.1", + PackageFileName: "glibc-2.11.1.tar.gz", + PackageSupplier: &spdx.Supplier{ + Supplier: "Jane Doe (jane.doe@example.com)", + SupplierType: "Person", + }, + PackageOriginator: &spdx.Originator{ + Originator: "ExampleCodeInspect (contact@example.com)", + OriginatorType: "Organization", + }, + PackageDownloadLocation: "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz", + FilesAnalyzed: &truthy, + PackageVerificationCode: spdx.PackageVerificationCode{ + Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", + ExcludedFiles: []string{"./package.spdx"}, + }, + PackageChecksums: []spdx.Checksum{ + { + Algorithm: "MD5", + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + { + Algorithm: "SHA1", + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + { + Algorithm: "SHA256", + Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", + }, + }, + PackageHomePage: "http://ftp.gnu.org/gnu/glibc", + PackageSourceInfo: "uses glibc-2_11-branch from git://sourceware.org/git/glibc.git.", + PackageLicenseConcluded: "(LGPL-2.0-only OR LicenseRef-3)", + PackageLicenseInfoFromFiles: []string{ + "GPL-2.0-only", + "LicenseRef-2", + "LicenseRef-1", + }, + PackageLicenseDeclared: "(LGPL-2.0-only AND LicenseRef-3)", + PackageLicenseComments: "The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change.", + PackageCopyrightText: "Copyright 2008-2010 John Smith", + PackageSummary: "GNU C library.", + PackageDescription: "The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems.", + PackageComment: "", + PackageExternalReferences: []*spdx.PackageExternalReference2_2{ + { + Category: "SECURITY", + RefType: "cpe23Type", + Locator: "cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*", + }, + { + Category: "OTHER", + RefType: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301#LocationRef-acmeforge", + Locator: "acmecorp/acmenator/4.1.3-alpha", + ExternalRefComment: "This is the external ref for Acme", + }, + }, + PackageAttributionTexts: []string{ + "The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually.", + }, + Files: nil, + Annotations: []spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "Package Commenter", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "Package level annotation", + }, + }, + }, + { + PackageSPDXIdentifier: "fromDoap-1", + PackageCopyrightText: "NOASSERTION", + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: &falsy, + PackageHomePage: "http://commons.apache.org/proper/commons-lang/", + PackageLicenseConcluded: "NOASSERTION", + PackageLicenseDeclared: "NOASSERTION", + PackageName: "Apache Commons Lang", + }, + { + PackageName: "Jena", + PackageSPDXIdentifier: "fromDoap-0", + PackageCopyrightText: "NOASSERTION", + PackageDownloadLocation: "https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz", + PackageExternalReferences: []*spdx.PackageExternalReference2_2{ + { + Category: "PACKAGE_MANAGER", + RefType: "purl", + Locator: "pkg:maven/org.apache.jena/apache-jena@3.12.0", + }, + }, + FilesAnalyzed: &falsy, + PackageHomePage: "http://www.openjena.org/", + PackageLicenseConcluded: "NOASSERTION", + PackageLicenseDeclared: "NOASSERTION", + PackageVersion: "3.12.0", + }, + { + PackageSPDXIdentifier: "Saxon", + PackageChecksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + }, + PackageCopyrightText: "Copyright Saxonica Ltd", + PackageDescription: "The Saxon package is a collection of tools for processing XML documents.", + PackageDownloadLocation: "https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download", + FilesAnalyzed: &falsy, + PackageHomePage: "http://saxon.sourceforge.net/", + PackageLicenseComments: "Other versions available for a commercial license", + PackageLicenseConcluded: "MPL-1.0", + PackageLicenseDeclared: "MPL-1.0", + PackageName: "Saxon", + PackageFileName: "saxonB-8.8.zip", + PackageVersion: "8.8", + }, + }, + Files: []*spdx.File2_2{ + { + FileName: "./src/org/spdx/parser/DOAPProject.java", + FileSPDXIdentifier: "DoapSource", + FileTypes: []string{ + "SOURCE", + }, + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", + }, + }, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{ + "Apache-2.0", + }, + FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", + FileContributors: []string{ + "Protecode Inc.", + "SPDX Technical Team Members", + "Open Logic Inc.", + "Source Auditor Inc.", + "Black Duck Software In.c", + }, + }, + { + FileSPDXIdentifier: "CommonsLangSrc", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "c2b4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file is used by Jena", + FileCopyrightText: "Copyright 2001-2011 The Apache Software Foundation", + FileContributors: []string{"Apache Software Foundation"}, + FileName: "./lib-source/commons-lang3-3.1-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{"Apache-2.0"}, + FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", + }, + { + FileSPDXIdentifier: "JenaLib", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "3ab4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file belongs to Jena", + FileCopyrightText: "(c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP", + FileContributors: []string{"Apache Software Foundation", "Hewlett Packard Inc."}, + FileName: "./lib-source/jena-2.6.3-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseComments: "This license is used by Jena", + LicenseConcluded: "LicenseRef-1", + LicenseInfoInFiles: []string{"LicenseRef-1"}, + }, + { + FileSPDXIdentifier: "File", + Annotations: []spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "File Commenter", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "File level annotation", + }, + }, + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", + }, + { + Algorithm: "MD5", + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + }, + FileComment: "The concluded license was taken from the package level that the file was included in.\nThis information was found in the COPYING.txt file in the xyz directory.", + FileCopyrightText: "Copyright 2008-2010 John Smith", + FileContributors: []string{"The Regents of the University of California", "Modified by Paul Mundt lethal@linux-sh.org", "IBM Corporation"}, + FileName: "./package/foo.c", + FileTypes: []string{"SOURCE"}, + LicenseComments: "The concluded license was taken from the package level that the file was included in.", + LicenseConcluded: "(LGPL-2.0-only OR LicenseRef-2)", + LicenseInfoInFiles: []string{"GPL-2.0-only", "LicenseRef-2"}, + FileNotice: "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: \nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", + }, + }, + Snippets: []spdx.Snippet2_2{ + { + SnippetSPDXIdentifier: "Snippet", + SnippetFromFileSPDXIdentifier: "DoapSource", + Ranges: []spdx.SnippetRange{ + { + StartPointer: spdx.SnippetRangePointer{ + Offset: 310, + FileSPDXIdentifier: "DoapSource", + }, + EndPointer: spdx.SnippetRangePointer{ + Offset: 420, + FileSPDXIdentifier: "DoapSource", + }, + }, + { + StartPointer: spdx.SnippetRangePointer{ + LineNumber: 5, + FileSPDXIdentifier: "DoapSource", + }, + EndPointer: spdx.SnippetRangePointer{ + LineNumber: 23, + FileSPDXIdentifier: "DoapSource", + }, + }, + }, + SnippetLicenseConcluded: "GPL-2.0-only", + LicenseInfoInSnippet: []string{"GPL-2.0-only"}, + SnippetLicenseComments: "The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.", + SnippetCopyrightText: "Copyright 2008-2010 John Smith", + SnippetComment: "This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.", + SnippetName: "from linux kernel", + }, + }, + Relationships: []*spdx.Relationship2_2{ + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("spdx-tool-1.2", "ToolsElement"), + Relationship: "COPY_OF", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "File"), + Relationship: "DESCRIBES", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "DESCRIBES", + }, + { + RefA: spdx.MakeDocElementID("", "Package"), + RefB: spdx.MakeDocElementID("", "JenaLib"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "Package"), + RefB: spdx.MakeDocElementID("", "Saxon"), + Relationship: "DYNAMIC_LINK", + }, + { + RefA: spdx.MakeDocElementID("", "CommonsLangSrc"), + RefB: spdx.MakeDocElementSpecial("NOASSERTION"), + Relationship: "GENERATED_FROM", + }, + { + RefA: spdx.MakeDocElementID("", "JenaLib"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "File"), + RefB: spdx.MakeDocElementID("", "fromDoap-0"), + Relationship: "GENERATED_FROM", + }, + }, +}