From e752b0136fcabb27866b8aeeb40791ed31006ad9 Mon Sep 17 00:00:00 2001 From: Aaron Prindle Date: Sun, 26 Nov 2023 07:42:48 +0000 Subject: [PATCH] feat: update chain's controllers to use v1 Tekton APIs natively while converting to v1beta1 to keep formats backwards compatible --- .github/workflows/kind-e2e.yaml | 1 - cmd/controller/main.go | 2 +- examples/kaniko/gcp/kaniko.yaml | 2 +- examples/kaniko/gcp/taskrun.yaml | 2 +- examples/kaniko/kaniko.yaml | 2 +- examples/kaniko/taskrun.yaml | 2 +- .../pipelineruns/pipeline-output-image.yaml | 2 +- .../releases/v0.3.0-build-chains-taskrun.yaml | 2 +- examples/taskruns/task-output-image.yaml | 2 +- go.mod | 3 +- go.sum | 2 + pkg/artifacts/signable.go | 123 ++-- pkg/artifacts/signable_test.go | 214 +++--- pkg/artifacts/structured.go | 9 +- pkg/chains/annotations_test.go | 46 +- pkg/chains/formats/all/all.go | 1 + pkg/chains/formats/format.go | 2 + pkg/chains/formats/slsa/attest/attest.go | 96 ++- pkg/chains/formats/slsa/extract/extract.go | 90 ++- .../formats/slsa/extract/extract_test.go | 64 +- .../formats/slsa/extract/v1beta1/extract.go | 192 ++++++ .../slsa/extract/v1beta1/extract_test.go | 365 ++++++++++ .../slsa/internal/artifact/v1beta1/append.go | 128 ++++ .../internal/artifact/v1beta1/append_test.go | 320 +++++++++ .../slsa/internal/material/material.go | 46 +- .../slsa/internal/material/material_test.go | 566 +++++++-------- .../internal/material/v1beta1/material.go | 328 +++++++++ .../material/v1beta1/material_test.go | 624 +++++++++++++++++ .../slsa/testdata/pipelinerun-childrefs.json | 10 +- .../formats/slsa/testdata/pipelinerun1.json | 10 +- .../pipelinerun_structured_results.json | 10 +- .../testdata/taskrun-multiple-subjects.json | 2 +- .../formats/slsa/testdata/taskrun1.json | 2 +- .../formats/slsa/testdata/taskrun2.json | 2 +- .../v1beta1/pipelinerun-childrefs.json | 129 ++++ .../slsa/testdata/v1beta1/pipelinerun1.json | 306 +++++++++ .../pipelinerun_structured_results.json | 264 +++++++ .../v1beta1/taskrun-multiple-subjects.json | 56 ++ .../slsa/testdata/v1beta1/taskrun1.json | 136 ++++ .../slsa/testdata/v1beta1/taskrun2.json | 105 +++ .../slsa/testdata/v2alpha3/pipelinerun1.json | 312 +++++++++ .../pipelinerun_structured_results.json | 270 ++++++++ .../v2alpha3/taskrun-multiple-subjects.json | 56 ++ .../slsa/testdata/v2alpha3/taskrun1.json | 141 ++++ .../slsa/testdata/v2alpha3/taskrun2.json | 106 +++ pkg/chains/formats/slsa/v1/intotoite6.go | 29 +- pkg/chains/formats/slsa/v1/intotoite6_test.go | 28 +- .../slsa/v1/pipelinerun/pipelinerun.go | 18 +- .../slsa/v1/pipelinerun/provenance_test.go | 22 +- .../formats/slsa/v1/taskrun/buildconfig.go | 6 +- .../slsa/v1/taskrun/buildconfig_test.go | 4 +- .../slsa/v1/taskrun/provenance_test.go | 40 +- pkg/chains/formats/slsa/v1/taskrun/taskrun.go | 12 +- pkg/chains/formats/slsa/v2alpha1/README.md | 6 +- pkg/chains/formats/slsa/v2alpha1/slsav2.go | 12 +- .../formats/slsa/v2alpha1/slsav2_test.go | 18 +- .../formats/slsa/v2alpha1/taskrun/taskrun.go | 14 +- .../slsa/v2alpha1/taskrun/taskrun_test.go | 28 +- .../external_parameters.go | 13 +- .../external_parameters_test.go | 29 +- .../internal_parameters.go | 9 +- .../internal_parameters_test.go | 8 +- .../internal/pipelinerun/pipelinerun.go | 8 +- .../internal/pipelinerun/pipelinerun_test.go | 20 +- .../resolved_dependencies.go | 30 +- .../resolved_dependencies_test.go | 20 +- .../slsa/v2alpha2/internal/taskrun/taskrun.go | 8 +- .../v2alpha2/internal/taskrun/taskrun_test.go | 20 +- pkg/chains/formats/slsa/v2alpha2/slsav2.go | 29 +- .../formats/slsa/v2alpha2/slsav2_test.go | 22 +- .../internal/build_types/build_types.go | 22 + .../external_parameters.go | 59 ++ .../external_parameters_test.go | 136 ++++ .../internal_parameters.go | 41 ++ .../internal_parameters_test.go | 60 ++ .../internal/pipelinerun/pipelinerun.go | 137 ++++ .../internal/pipelinerun/pipelinerun_test.go | 360 ++++++++++ .../resolved_dependencies.go | 295 ++++++++ .../resolved_dependencies_test.go | 647 ++++++++++++++++++ .../slsa/v2alpha3/internal/taskrun/taskrun.go | 133 ++++ .../v2alpha3/internal/taskrun/taskrun_test.go | 332 +++++++++ pkg/chains/formats/slsa/v2alpha3/slsav2.go | 70 ++ .../formats/slsa/v2alpha3/slsav2_test.go | 503 ++++++++++++++ pkg/chains/objects/objects.go | 530 ++++++++++++-- pkg/chains/objects/objects_test.go | 190 ++--- pkg/chains/rekor_test.go | 10 +- pkg/chains/signing_test.go | 14 +- pkg/chains/storage/docdb/docdb_test.go | 6 +- pkg/chains/storage/gcs/gcs.go | 40 +- pkg/chains/storage/gcs/gcs_test.go | 18 +- pkg/chains/storage/grafeas/grafeas_test.go | 62 +- pkg/chains/storage/oci/oci_test.go | 22 +- pkg/chains/storage/pubsub/pubsub_test.go | 12 +- pkg/chains/storage/tekton/tekton_test.go | 22 +- pkg/chains/verifier.go | 8 +- pkg/config/config.go | 4 +- pkg/internal/objectloader/objectloader.go | 33 +- pkg/reconciler/pipelinerun/controller.go | 70 +- pkg/reconciler/pipelinerun/pipelinerun.go | 35 +- .../pipelinerun/pipelinerun_test.go | 171 ++--- pkg/reconciler/taskrun/controller.go | 55 +- pkg/reconciler/taskrun/taskrun.go | 16 +- pkg/reconciler/taskrun/taskrun_test.go | 37 +- pkg/test/tekton/tekton.go | 54 +- test/clients.go | 2 +- test/e2e_test.go | 93 ++- test/examples_test.go | 132 ++-- test/kaniko.go | 60 +- test/test_utils.go | 28 +- .../slsa/v2alpha2/pipeline-output-image.json | 4 +- .../slsa/v2alpha3/pipeline-output-image.json | 126 ++++ .../slsa/v2alpha3/task-output-image.json | 74 ++ test/testdata/type-hinting/taskrun.json | 33 + .../pipeline/v1beta1/pipelinerun/fake/fake.go | 40 -- .../v1beta1/pipelinerun/pipelinerun.go | 52 -- .../pipeline/v1beta1/taskrun/fake/fake.go | 40 -- .../{v1beta1 => v1}/pipelinerun/controller.go | 2 +- .../{v1beta1 => v1}/pipelinerun/reconciler.go | 56 +- .../{v1beta1 => v1}/pipelinerun/state.go | 4 +- .../pipeline/v1/taskrun/controller.go | 170 +++++ .../pipeline/v1/taskrun/reconciler.go | 432 ++++++++++++ .../reconciler/pipeline/v1/taskrun/state.go | 97 +++ vendor/modules.txt | 8 +- 123 files changed, 9728 insertions(+), 1535 deletions(-) create mode 100644 pkg/chains/formats/slsa/extract/v1beta1/extract.go create mode 100644 pkg/chains/formats/slsa/extract/v1beta1/extract_test.go create mode 100644 pkg/chains/formats/slsa/internal/artifact/v1beta1/append.go create mode 100644 pkg/chains/formats/slsa/internal/artifact/v1beta1/append_test.go create mode 100644 pkg/chains/formats/slsa/internal/material/v1beta1/material.go create mode 100644 pkg/chains/formats/slsa/internal/material/v1beta1/material_test.go create mode 100644 pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun-childrefs.json create mode 100644 pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun1.json create mode 100644 pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun_structured_results.json create mode 100644 pkg/chains/formats/slsa/testdata/v1beta1/taskrun-multiple-subjects.json create mode 100644 pkg/chains/formats/slsa/testdata/v1beta1/taskrun1.json create mode 100644 pkg/chains/formats/slsa/testdata/v1beta1/taskrun2.json create mode 100644 pkg/chains/formats/slsa/testdata/v2alpha3/pipelinerun1.json create mode 100644 pkg/chains/formats/slsa/testdata/v2alpha3/pipelinerun_structured_results.json create mode 100644 pkg/chains/formats/slsa/testdata/v2alpha3/taskrun-multiple-subjects.json create mode 100644 pkg/chains/formats/slsa/testdata/v2alpha3/taskrun1.json create mode 100644 pkg/chains/formats/slsa/testdata/v2alpha3/taskrun2.json create mode 100644 pkg/chains/formats/slsa/v2alpha3/internal/build_types/build_types.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/internal/external_parameters/external_parameters.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/internal/external_parameters/external_parameters_test.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters/internal_parameters.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters/internal_parameters_test.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/internal/pipelinerun/pipelinerun.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/internal/pipelinerun/pipelinerun_test.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies/resolved_dependencies.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies/resolved_dependencies_test.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/internal/taskrun/taskrun.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/internal/taskrun/taskrun_test.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/slsav2.go create mode 100644 pkg/chains/formats/slsa/v2alpha3/slsav2_test.go create mode 100644 test/testdata/slsa/v2alpha3/pipeline-output-image.json create mode 100644 test/testdata/slsa/v2alpha3/task-output-image.json create mode 100644 test/testdata/type-hinting/taskrun.json delete mode 100644 vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/fake/fake.go delete mode 100644 vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/pipelinerun.go delete mode 100644 vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake/fake.go rename vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/{v1beta1 => v1}/pipelinerun/controller.go (99%) rename vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/{v1beta1 => v1}/pipelinerun/reconciler.go (86%) rename vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/{v1beta1 => v1}/pipelinerun/state.go (94%) create mode 100644 vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/controller.go create mode 100644 vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/reconciler.go create mode 100644 vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/state.go diff --git a/.github/workflows/kind-e2e.yaml b/.github/workflows/kind-e2e.yaml index 60bf099f4a..67f5202fe1 100644 --- a/.github/workflows/kind-e2e.yaml +++ b/.github/workflows/kind-e2e.yaml @@ -32,7 +32,6 @@ jobs: fail-fast: false # Keep running if one leg fails. matrix: pipelines-release: - - v0.41.3 # LTS - v0.44.4 # LTS - v0.47.3 # LTS - v0.50.1 # LTS diff --git a/cmd/controller/main.go b/cmd/controller/main.go index d8541a617f..30a046809f 100644 --- a/cmd/controller/main.go +++ b/cmd/controller/main.go @@ -41,5 +41,5 @@ func main() { flag.Parse() ctx := injection.WithNamespaceScope(signals.NewContext(), *namespace) - sharedmain.MainWithContext(ctx, "watcher", taskrun.NewController, pipelinerun.NewController) + sharedmain.MainWithContext(ctx, "watcher", taskrun.NewControllerV1, pipelinerun.NewControllerV1) } diff --git a/examples/kaniko/gcp/kaniko.yaml b/examples/kaniko/gcp/kaniko.yaml index 6e62432557..3e4486ee3d 100644 --- a/examples/kaniko/gcp/kaniko.yaml +++ b/examples/kaniko/gcp/kaniko.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: tekton.dev/v1beta1 +apiVersion: tekton.dev/v1 kind: Task metadata: name: kaniko-gcp diff --git a/examples/kaniko/gcp/taskrun.yaml b/examples/kaniko/gcp/taskrun.yaml index de8e0e6393..4017995af6 100644 --- a/examples/kaniko/gcp/taskrun.yaml +++ b/examples/kaniko/gcp/taskrun.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: tekton.dev/v1beta1 +apiVersion: tekton.dev/v1 kind: TaskRun metadata: name: kaniko-gcp diff --git a/examples/kaniko/kaniko.yaml b/examples/kaniko/kaniko.yaml index ad5dc752ef..2606deb58a 100644 --- a/examples/kaniko/kaniko.yaml +++ b/examples/kaniko/kaniko.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: tekton.dev/v1beta1 +apiVersion: tekton.dev/v1 kind: Task metadata: name: kaniko-chains diff --git a/examples/kaniko/taskrun.yaml b/examples/kaniko/taskrun.yaml index d2785d36f0..cfc8cbc791 100644 --- a/examples/kaniko/taskrun.yaml +++ b/examples/kaniko/taskrun.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: tekton.dev/v1beta1 +apiVersion: tekton.dev/v1 kind: TaskRun metadata: name: kaniko-run diff --git a/examples/pipelineruns/pipeline-output-image.yaml b/examples/pipelineruns/pipeline-output-image.yaml index a1d8761adb..b0e2efe573 100644 --- a/examples/pipelineruns/pipeline-output-image.yaml +++ b/examples/pipelineruns/pipeline-output-image.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: tekton.dev/v1beta1 +apiVersion: tekton.dev/v1 kind: PipelineRun metadata: generateName: image-pipelinerun diff --git a/examples/releases/v0.3.0-build-chains-taskrun.yaml b/examples/releases/v0.3.0-build-chains-taskrun.yaml index f80076094b..db9b95bc9a 100644 --- a/examples/releases/v0.3.0-build-chains-taskrun.yaml +++ b/examples/releases/v0.3.0-build-chains-taskrun.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: tekton.dev/v1beta1 +apiVersion: tekton.dev/v1 kind: TaskRun metadata: generateName: build-chains- diff --git a/examples/taskruns/task-output-image.yaml b/examples/taskruns/task-output-image.yaml index 8e780970de..5b1e6c1aea 100644 --- a/examples/taskruns/task-output-image.yaml +++ b/examples/taskruns/task-output-image.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: tekton.dev/v1beta1 +apiVersion: tekton.dev/v1 kind: TaskRun metadata: name: build-push-run-output-image-test diff --git a/go.mod b/go.mod index 0ac6412208..bdcfe06877 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/tektoncd/chains -go 1.20 +go 1.21 require ( cloud.google.com/go/compute/metadata v0.2.3 @@ -268,6 +268,7 @@ require ( github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect + github.com/jstemmer/go-junit-report v1.0.0 // indirect github.com/julz/importas v0.1.0 // indirect github.com/kelseyhightower/envconfig v1.4.0 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect diff --git a/go.sum b/go.sum index 91609ea8a1..59ded6b328 100644 --- a/go.sum +++ b/go.sum @@ -834,6 +834,8 @@ github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnr github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jstemmer/go-junit-report v1.0.0 h1:8X1gzZpR+nVQLAht+L/foqOeX2l9DTZoaIPbEQHxsds= +github.com/jstemmer/go-junit-report v1.0.0/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= diff --git a/pkg/artifacts/signable.go b/pkg/artifacts/signable.go index 1ac9492f99..d4ac6f4c8f 100644 --- a/pkg/artifacts/signable.go +++ b/pkg/artifacts/signable.go @@ -17,6 +17,7 @@ import ( "context" _ "crypto/sha256" // Recommended by go-digest. _ "crypto/sha512" // Recommended by go-digest. + "encoding/json" "fmt" "regexp" "strings" @@ -24,9 +25,11 @@ import ( "github.com/google/go-containerregistry/pkg/name" "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" "github.com/opencontainers/go-digest" + "github.com/opentracing/opentracing-go/log" "github.com/tektoncd/chains/internal/backport" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "k8s.io/apimachinery/pkg/util/sets" "knative.dev/pkg/logging" @@ -65,12 +68,12 @@ type TaskRunArtifact struct{} var _ Signable = &TaskRunArtifact{} func (ta *TaskRunArtifact) ShortKey(obj interface{}) string { - tro := obj.(*objects.TaskRunObject) + tro := obj.(*objects.TaskRunObjectV1) return "taskrun-" + string(tro.UID) } func (ta *TaskRunArtifact) FullKey(obj interface{}) string { - tro := obj.(*objects.TaskRunObject) + tro := obj.(*objects.TaskRunObjectV1) gvk := tro.GetGroupVersionKind() return fmt.Sprintf("%s-%s-%s-%s", gvk.Group, gvk.Version, gvk.Kind, tro.UID) } @@ -104,12 +107,12 @@ type PipelineRunArtifact struct{} var _ Signable = &PipelineRunArtifact{} func (pa *PipelineRunArtifact) ShortKey(obj interface{}) string { - pro := obj.(*objects.PipelineRunObject) + pro := obj.(*objects.PipelineRunObjectV1) return "pipelinerun-" + string(pro.UID) } func (pa *PipelineRunArtifact) FullKey(obj interface{}) string { - pro := obj.(*objects.PipelineRunObject) + pro := obj.(*objects.PipelineRunObjectV1) gvk := pro.GetGroupVersionKind() return fmt.Sprintf("%s-%s-%s-%s", gvk.Group, gvk.Version, gvk.Kind, pro.UID) } @@ -149,40 +152,56 @@ type image struct { } func (oa *OCIArtifact) ExtractObjects(ctx context.Context, obj objects.TektonObject) []interface{} { - log := logging.FromContext(ctx) objs := []interface{}{} - - // TODO: Not applicable to PipelineRuns, should look into a better way to separate this out - if tr, ok := obj.GetObject().(*v1beta1.TaskRun); ok { - imageResourceNames := map[string]*image{} - if tr.Status.TaskSpec != nil && tr.Status.TaskSpec.Resources != nil { - for _, output := range tr.Status.TaskSpec.Resources.Outputs { - if output.Type == backport.PipelineResourceTypeImage { - imageResourceNames[output.Name] = &image{} - } + if trV1, ok := obj.GetObject().(*v1.TaskRun); ok { + var resources v1beta1.TaskResources //nolint:staticcheck + shouldReplace := false + if serializedResources, ok := trV1.Annotations["tekton.dev/v1beta1-status-taskrunstatusfields-taskspec-resources"]; ok { + if err := json.Unmarshal([]byte(serializedResources), &resources); err == nil { + shouldReplace = true } } - - for _, rr := range tr.Status.ResourcesResult { - img, ok := imageResourceNames[rr.ResourceName] - if !ok { - continue - } - // We have a result for an image! - if rr.Key == "url" { - img.url = rr.Value - } else if rr.Key == "digest" { - img.digest = rr.Value + var results []v1beta1.RunResult //nolint:staticcheck + if serializedResources, ok := trV1.Annotations["tekton.dev/v1beta1ResourcesResult"]; ok { + if err := json.Unmarshal([]byte(serializedResources), &results); err == nil { + shouldReplace = shouldReplace && true } } + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + if err := trV1Beta1.ConvertFrom(ctx, trV1); err == nil { + if shouldReplace { + trV1Beta1.Status.TaskSpec.Resources = &resources //nolint:staticcheck + trV1Beta1.Status.ResourcesResult = results //nolint:staticcheck + } + imageResourceNames := map[string]*image{} + if trV1Beta1.Status.TaskSpec != nil && trV1Beta1.Status.TaskSpec.Resources != nil { //nolint:staticcheck + for _, output := range trV1Beta1.Status.TaskSpec.Resources.Outputs { //nolint:staticcheck + if output.Type == backport.PipelineResourceTypeImage { + imageResourceNames[output.Name] = &image{} + } + } + } + for _, rr := range trV1Beta1.Status.ResourcesResult { + img, ok := imageResourceNames[rr.ResourceName] + if !ok { + continue + } + // We have a result for an image! + if rr.Key == "url" { + img.url = rr.Value + } else if rr.Key == "digest" { + img.digest = rr.Value + } + } - for _, image := range imageResourceNames { - dgst, err := name.NewDigest(fmt.Sprintf("%s@%s", image.url, image.digest)) - if err != nil { - log.Error(err) - continue + for _, image := range imageResourceNames { + dgst, err := name.NewDigest(fmt.Sprintf("%s@%s", image.url, image.digest)) + if err != nil { + log.Error(err) + continue + } + objs = append(objs, dgst) } - objs = append(objs, dgst) } } @@ -197,6 +216,7 @@ func ExtractOCIImagesFromResults(ctx context.Context, obj objects.TektonObject) logger := logging.FromContext(ctx) objs := []interface{}{} + logger.Infof("aprindle-10 - here") extractor := structuredSignableExtractor{ uriSuffix: "IMAGE_URL", digestSuffix: "IMAGE_DIGEST", @@ -205,19 +225,19 @@ func ExtractOCIImagesFromResults(ctx context.Context, obj objects.TektonObject) for _, s := range extractor.extract(ctx, obj) { dgst, err := name.NewDigest(fmt.Sprintf("%s@%s", s.URI, s.Digest)) if err != nil { + logger.Infof("aprindle-11 - here") logger.Errorf("error getting digest: %v", err) continue } - objs = append(objs, dgst) } // look for a comma separated list of images for _, key := range obj.GetResults() { - if key.Name != "IMAGES" { + if key.GetName() != "IMAGES" { continue } - imgs := strings.FieldsFunc(key.Value.StringVal, split) + imgs := strings.FieldsFunc(key.GetStringValue(), split) for _, img := range imgs { trimmed := strings.TrimSpace(img) @@ -226,12 +246,14 @@ func ExtractOCIImagesFromResults(ctx context.Context, obj objects.TektonObject) } dgst, err := name.NewDigest(trimmed) if err != nil { + logger.Infof("aprindle-12 - here") logger.Errorf("error getting digest for img %s: %v", trimmed, err) continue } objs = append(objs, dgst) } } + logger.Infof("aprindle-13 - here") return objs } @@ -291,43 +313,36 @@ func ExtractStructuredTargetFromResults(ctx context.Context, obj objects.TektonO } // TODO(#592): support structured results using Run - results := []objects.Result{} for _, res := range obj.GetResults() { - results = append(results, objects.Result{ - Name: res.Name, - Value: res.Value, - }) - } - for _, res := range results { - if strings.HasSuffix(res.Name, categoryMarker) { + if strings.HasSuffix(res.GetName(), categoryMarker) { valid, err := isStructuredResult(res, categoryMarker) if err != nil { logger.Debugf("ExtractStructuredTargetFromResults: %v", err) } if valid { - logger.Debugf("Extracted Structured data from Result %s, %s", res.Value.ObjectVal["uri"], res.Value.ObjectVal["digest"]) - objs = append(objs, &StructuredSignable{URI: res.Value.ObjectVal["uri"], Digest: res.Value.ObjectVal["digest"]}) + logger.Debugf("Extracted Structured data from Result %v", res) + objs = append(objs, &StructuredSignable{URI: res.GetObjectValue("uri"), Digest: res.GetObjectValue("digest")}) } } } return objs } -func isStructuredResult(res objects.Result, categoryMarker string) (bool, error) { - if !strings.HasSuffix(res.Name, categoryMarker) { +func isStructuredResult(res objects.GenericResult, categoryMarker string) (bool, error) { + if !strings.HasSuffix(res.GetName(), categoryMarker) { return false, nil } - if res.Value.ObjectVal == nil { - return false, fmt.Errorf("%s should be an object: %v", res.Name, res.Value.ObjectVal) + if res.ObjectValueIsNil() { + return false, fmt.Errorf("%s should be an object: %v", res.GetName(), res) } - if res.Value.ObjectVal["uri"] == "" { - return false, fmt.Errorf("%s should have uri field: %v", res.Name, res.Value.ObjectVal) + if res.GetObjectValue("uri") == "" { + return false, fmt.Errorf("%s should have uri field: %v", res.GetName(), res) } - if res.Value.ObjectVal["digest"] == "" { - return false, fmt.Errorf("%s should have digest field: %v", res.Name, res.Value.ObjectVal) + if res.GetObjectValue("digest") == "" { + return false, fmt.Errorf("%s should have digest field: %v", res.GetName(), res) } - if _, _, err := ParseDigest(res.Value.ObjectVal["digest"]); err != nil { - return false, fmt.Errorf("error getting digest %s: %v", res.Value.ObjectVal["digest"], err) + if _, _, err := ParseDigest(res.GetObjectValue("digest")); err != nil { + return false, fmt.Errorf("error getting digest %s: %v", res.GetObjectValue("digest"), err) } return true, nil } diff --git a/pkg/artifacts/signable_test.go b/pkg/artifacts/signable_test.go index b3181020c3..884adc4107 100644 --- a/pkg/artifacts/signable_test.go +++ b/pkg/artifacts/signable_test.go @@ -14,6 +14,7 @@ limitations under the License. package artifacts import ( + "encoding/json" "fmt" "sort" "testing" @@ -23,6 +24,7 @@ import ( "github.com/google/go-containerregistry/pkg/name" "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" "github.com/tektoncd/chains/pkg/chains/objects" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" @@ -52,7 +54,7 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }{ { name: "one image", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck TypeMeta: metav1.TypeMeta{ Kind: "TaskRun", }, @@ -71,10 +73,10 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, }, TaskSpec: &v1beta1.TaskSpec{ - Resources: &v1beta1.TaskResources{ - Outputs: []v1beta1.TaskResource{ + Resources: &v1beta1.TaskResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResource{ //nolint:staticcheck { - ResourceDeclaration: v1beta1.ResourceDeclaration{ + ResourceDeclaration: v1beta1.ResourceDeclaration{ //nolint:staticcheck Name: "my-image", Type: "image", }, @@ -89,7 +91,7 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, { name: "two images", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck TypeMeta: metav1.TypeMeta{ Kind: "TaskRun", }, @@ -118,16 +120,16 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, }, TaskSpec: &v1beta1.TaskSpec{ - Resources: &v1beta1.TaskResources{ - Outputs: []v1beta1.TaskResource{ + Resources: &v1beta1.TaskResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResource{ //nolint:staticcheck { - ResourceDeclaration: v1beta1.ResourceDeclaration{ + ResourceDeclaration: v1beta1.ResourceDeclaration{ //nolint:staticcheck Name: "my-image1", Type: "image", }, }, { - ResourceDeclaration: v1beta1.ResourceDeclaration{ + ResourceDeclaration: v1beta1.ResourceDeclaration{ //nolint:staticcheck Name: "my-image2", Type: "image", }, @@ -145,7 +147,7 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, { name: "resource and result", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck TypeMeta: metav1.TypeMeta{ Kind: "TaskRun", }, @@ -182,10 +184,10 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { Name: "IMAGE_DIGEST", }, }, - Resources: &v1beta1.TaskResources{ - Outputs: []v1beta1.TaskResource{ + Resources: &v1beta1.TaskResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResource{ //nolint:staticcheck { - ResourceDeclaration: v1beta1.ResourceDeclaration{ + ResourceDeclaration: v1beta1.ResourceDeclaration{ //nolint:staticcheck Name: "my-image", Type: "image", }, @@ -202,7 +204,7 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, { name: "extra", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck TypeMeta: metav1.TypeMeta{ Kind: "TaskRun", }, @@ -241,10 +243,10 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, }, TaskSpec: &v1beta1.TaskSpec{ - Resources: &v1beta1.TaskResources{ - Outputs: []v1beta1.TaskResource{ + Resources: &v1beta1.TaskResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResource{ //nolint:staticcheck { - ResourceDeclaration: v1beta1.ResourceDeclaration{ + ResourceDeclaration: v1beta1.ResourceDeclaration{ //nolint:staticcheck Name: "my-image", Type: "image", }, @@ -256,15 +258,16 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, }), want: []interface{}{createDigest(t, "gcr.io/foo/bar@sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, - }, { + }, + { name: "images", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ { Name: "IMAGES", - Value: *v1beta1.NewStructuredValues(fmt.Sprintf(" \n \tgcr.io/foo/bar@%s\n,gcr.io/baz/bar@%s", digest1, digest2)), + Value: *v1.NewStructuredValues(fmt.Sprintf(" \n \tgcr.io/foo/bar@%s\n,gcr.io/baz/bar@%s", digest1, digest2)), }, }, }, @@ -276,13 +279,13 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, }, { name: "images-newline", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ { Name: "IMAGES", - Value: *v1beta1.NewStructuredValues(fmt.Sprintf("gcr.io/foo/bar@%s\ngcr.io/baz/bar@%s\n\n", digest1, digest2)), + Value: *v1.NewStructuredValues(fmt.Sprintf("gcr.io/foo/bar@%s\ngcr.io/baz/bar@%s\n\n", digest1, digest2)), }, }, }, @@ -298,6 +301,19 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { t.Run(tt.name, func(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) oa := &OCIArtifact{} + if trV1Beta1, ok := tt.obj.GetObject().(*v1beta1.TaskRun); ok { //nolint:staticcheck + trV1 := &v1.TaskRun{} + if err := trV1Beta1.ConvertTo(ctx, trV1); err == nil { + if trV1Beta1.Status.TaskRunStatusFields.TaskSpec != nil && trV1Beta1.Status.TaskRunStatusFields.TaskSpec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Status.TaskRunStatusFields.TaskSpec.Resources) //nolint:staticcheck + if err != nil { + t.Errorf("Error serializing to JSON: %v", err) + } + trV1.Annotations["tekton.dev/v1beta1-status-taskrunstatusfields-taskspec-resources"] = string(jsonData) + } + tt.obj = objects.NewTaskRunObjectV1(trV1) + } + } got := oa.ExtractObjects(ctx, tt.obj) sort.Slice(got, func(i, j int) bool { a := got[i].(name.Digest) @@ -312,25 +328,25 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { } func TestExtractOCIImagesFromResults(t *testing.T) { - tr := &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: "img1_IMAGE_URL", Value: *v1beta1.NewStructuredValues("img1")}, - {Name: "img1_IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues(digest1)}, - {Name: "img2_IMAGE_URL", Value: *v1beta1.NewStructuredValues("img2")}, - {Name: "img2_IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues(digest2)}, - {Name: "IMAGE_URL", Value: *v1beta1.NewStructuredValues("img3")}, - {Name: "IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues(digest1)}, - {Name: "img4_IMAGE_URL", Value: *v1beta1.NewStructuredValues("img4")}, - {Name: "img5_IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues("sha123:abc")}, - {Name: "empty_str_IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues("")}, - {Name: "empty_str_IMAGE_URL", Value: *v1beta1.NewStructuredValues("")}, + tr := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + {Name: "img1_IMAGE_URL", Value: *v1.NewStructuredValues("img1")}, + {Name: "img1_IMAGE_DIGEST", Value: *v1.NewStructuredValues(digest1)}, + {Name: "img2_IMAGE_URL", Value: *v1.NewStructuredValues("img2")}, + {Name: "img2_IMAGE_DIGEST", Value: *v1.NewStructuredValues(digest2)}, + {Name: "IMAGE_URL", Value: *v1.NewStructuredValues("img3")}, + {Name: "IMAGE_DIGEST", Value: *v1.NewStructuredValues(digest1)}, + {Name: "img4_IMAGE_URL", Value: *v1.NewStructuredValues("img4")}, + {Name: "img5_IMAGE_DIGEST", Value: *v1.NewStructuredValues("sha123:abc")}, + {Name: "empty_str_IMAGE_DIGEST", Value: *v1.NewStructuredValues("")}, + {Name: "empty_str_IMAGE_URL", Value: *v1.NewStructuredValues("")}, }, }, }, } - obj := objects.NewTaskRunObject(tr) + obj := objects.NewTaskRunObjectV1(tr) want := []interface{}{ createDigest(t, fmt.Sprintf("img1@%s", digest1)), createDigest(t, fmt.Sprintf("img2@%s", digest2)), @@ -349,23 +365,23 @@ func TestExtractOCIImagesFromResults(t *testing.T) { } func TestExtractSignableTargetFromResults(t *testing.T) { - tr := &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: "mvn1_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, - {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest1)}, - {Name: "mvn1_pom_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("com.google.guava:guava:31.0-jre.pom")}, - {Name: "mvn1_pom_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest2)}, - {Name: "mvn1_src_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("com.google.guava:guava:31.0-jre-sources.jar")}, - {Name: "mvn1_src_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest3)}, - {Name: "mvn2_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/a.b.c:d:1.0-jre")}, - {Name: "mvn2_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest4)}, - {Name: "ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/empty_prefix")}, - {Name: "ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest1)}, - {Name: "miss_target_name_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest1)}, - {Name: "wrong_digest_format_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/wrong_digest_format")}, - {Name: "wrong_digest_format_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("abc")}, + tr := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + {Name: "mvn1_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, + {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest1)}, + {Name: "mvn1_pom_ARTIFACT_URI", Value: *v1.NewStructuredValues("com.google.guava:guava:31.0-jre.pom")}, + {Name: "mvn1_pom_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest2)}, + {Name: "mvn1_src_ARTIFACT_URI", Value: *v1.NewStructuredValues("com.google.guava:guava:31.0-jre-sources.jar")}, + {Name: "mvn1_src_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest3)}, + {Name: "mvn2_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/a.b.c:d:1.0-jre")}, + {Name: "mvn2_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest4)}, + {Name: "ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/empty_prefix")}, + {Name: "ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest1)}, + {Name: "miss_target_name_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest1)}, + {Name: "wrong_digest_format_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/wrong_digest_format")}, + {Name: "wrong_digest_format_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("abc")}, }, }, }, @@ -378,7 +394,7 @@ func TestExtractSignableTargetFromResults(t *testing.T) { {URI: "projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/empty_prefix", Digest: digest1}, } ctx := logtesting.TestContextWithLogger(t) - got := ExtractSignableTargetFromResults(ctx, objects.NewTaskRunObject(tr)) + got := ExtractSignableTargetFromResults(ctx, objects.NewTaskRunObjectV1(tr)) sort.Slice(got, func(i, j int) bool { return got[i].URI < got[j].URI }) @@ -391,13 +407,13 @@ func TestExtractSignableTargetFromResults(t *testing.T) { } func TestExtractStructuredTargetFromResults(t *testing.T) { - tr := &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ + tr := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ { Name: "mvn1_pkg" + "_" + ArtifactsOutputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre", "digest": digest1, "signable_type": "", @@ -405,7 +421,7 @@ func TestExtractStructuredTargetFromResults(t *testing.T) { }, { Name: "mvn1_pom_sha512" + "_" + ArtifactsOutputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "com.google.guava:guava:31.0-jre.pom", "digest": digest2, "signable_type": "", @@ -413,56 +429,56 @@ func TestExtractStructuredTargetFromResults(t *testing.T) { }, { Name: "img1_input" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest3, }), }, { Name: "img2_input_sha1" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_sha1, }), }, { Name: "img2_input_incorrect_sha1" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_incorrect_sha1, }), }, { Name: "img3_input_sha384" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_sha384, }), }, { Name: "img3_input_incorrect_sha384" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_incorrect_sha384, }), }, { Name: "img4_input_sha512" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_sha512, }), }, { Name: "img4_input_incorrect_sha512" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_incorrect_sha512, }), }, { Name: "img2_input_no_digest" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/foo", "digest": "", }), @@ -479,7 +495,7 @@ func TestExtractStructuredTargetFromResults(t *testing.T) { {URI: "gcr.io/foo/bar", Digest: digest_sha512}, } ctx := logtesting.TestContextWithLogger(t) - gotInputs := ExtractStructuredTargetFromResults(ctx, objects.NewTaskRunObject(tr), ArtifactsInputsResultName) + gotInputs := ExtractStructuredTargetFromResults(ctx, objects.NewTaskRunObjectV1(tr), ArtifactsInputsResultName) if diff := cmp.Diff(gotInputs, wantInputs, cmpopts.SortSlices(func(x, y *StructuredSignable) bool { return x.Digest < y.Digest })); diff != "" { t.Errorf("Inputs are not as expected: %v", diff) } @@ -488,7 +504,7 @@ func TestExtractStructuredTargetFromResults(t *testing.T) { {URI: "projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre", Digest: digest1}, {URI: "com.google.guava:guava:31.0-jre.pom", Digest: digest2}, } - gotOutputs := ExtractStructuredTargetFromResults(ctx, objects.NewTaskRunObject(tr), ArtifactsOutputsResultName) + gotOutputs := ExtractStructuredTargetFromResults(ctx, objects.NewTaskRunObjectV1(tr), ArtifactsOutputsResultName) opts := append(ignore, cmpopts.SortSlices(func(x, y *StructuredSignable) bool { return x.Digest < y.Digest })) if diff := cmp.Diff(gotOutputs, wantOutputs, opts...); diff != "" { t.Error(diff) @@ -496,27 +512,27 @@ func TestExtractStructuredTargetFromResults(t *testing.T) { } func TestRetrieveMaterialsFromStructuredResults(t *testing.T) { - tr := &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ + tr := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ { Name: "img1_input" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": OCIScheme + "gcr.io/foo/bar", "digest": "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7", }), }, { Name: "img2_input_no_digest" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": OCIScheme + "gcr.io/foo/foo", "digest": "", }), }, { Name: "img2_input_invalid_digest" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": OCIScheme + "gcr.io/foo/foo", "digest": "sha:123", }), @@ -532,7 +548,7 @@ func TestRetrieveMaterialsFromStructuredResults(t *testing.T) { }, } ctx := logtesting.TestContextWithLogger(t) - gotMaterials := RetrieveMaterialsFromStructuredResults(ctx, objects.NewTaskRunObject(tr), ArtifactsInputsResultName) + gotMaterials := RetrieveMaterialsFromStructuredResults(ctx, objects.NewTaskRunObjectV1(tr), ArtifactsInputsResultName) if diff := cmp.Diff(gotMaterials, wantMaterials, ignore...); diff != "" { t.Fatalf("Materials not the same %s", diff) @@ -542,7 +558,7 @@ func TestRetrieveMaterialsFromStructuredResults(t *testing.T) { func TestValidateResults(t *testing.T) { tests := []struct { name string - obj objects.Result + obj objects.ResultV1 categoryMarker string wantResult bool wantErr error @@ -550,9 +566,9 @@ func TestValidateResults(t *testing.T) { { name: "valid result", categoryMarker: ArtifactsOutputsResultName, - obj: objects.Result{ + obj: objects.ResultV1{ Name: "valid_result-ARTIFACT_OUTPUTS", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ ObjectVal: map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest3, @@ -565,37 +581,37 @@ func TestValidateResults(t *testing.T) { { name: "invalid result without digest field", categoryMarker: ArtifactsOutputsResultName, - obj: objects.Result{ + obj: objects.ResultV1{ Name: "missing_digest-ARTIFACT_OUTPUTS", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ ObjectVal: map[string]string{ "uri": "gcr.io/foo/bar", }, }, }, wantResult: false, - wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have digest field: map[uri:gcr.io/foo/bar]"), + wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have digest field: {missing_digest-ARTIFACT_OUTPUTS { [] map[uri:gcr.io/foo/bar]}}"), }, { name: "invalid result without uri field", categoryMarker: ArtifactsOutputsResultName, - obj: objects.Result{ + obj: objects.ResultV1{ Name: "missing_digest-ARTIFACT_OUTPUTS", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ ObjectVal: map[string]string{ "digest": digest3, }, }, }, wantResult: false, - wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have uri field: map[digest:sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7]"), + wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have uri field: {missing_digest-ARTIFACT_OUTPUTS { [] map[digest:sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7]}}"), }, { name: "invalid result wrong digest format", categoryMarker: ArtifactsOutputsResultName, - obj: objects.Result{ + obj: objects.ResultV1{ Name: "missing_digest-ARTIFACT_OUTPUTS", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ ObjectVal: map[string]string{ "uri": "gcr.io/foo/bar", "digest": "", @@ -603,14 +619,14 @@ func TestValidateResults(t *testing.T) { }, }, wantResult: false, - wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have digest field: map[digest: uri:gcr.io/foo/bar]"), + wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have digest field: {missing_digest-ARTIFACT_OUTPUTS { [] map[digest: uri:gcr.io/foo/bar]}}"), }, { name: "invalid result wrong type hinting", categoryMarker: ArtifactsOutputsResultName, - obj: objects.Result{ + obj: objects.ResultV1{ Name: "missing_digest-ARTIFACTs_OUTPUTS", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ ObjectVal: map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest3, diff --git a/pkg/artifacts/structured.go b/pkg/artifacts/structured.go index 4e9efb1a9b..8b75fc0326 100644 --- a/pkg/artifacts/structured.go +++ b/pkg/artifacts/structured.go @@ -40,6 +40,7 @@ func (b *structuredSignableExtractor) extract(ctx context.Context, obj objects.T logger := logging.FromContext(ctx) partials := map[string]StructuredSignable{} + logger.Infof("aprindle-20 - here") suffixes := map[string]func(StructuredSignable, string) StructuredSignable{ b.uriSuffix: func(s StructuredSignable, value string) StructuredSignable { s.URI = value @@ -56,15 +57,15 @@ func (b *structuredSignableExtractor) extract(ctx context.Context, obj objects.T if suffix == "" { continue } - if !strings.HasSuffix(res.Name, suffix) { + if !strings.HasSuffix(res.GetName(), suffix) { continue } - value := strings.TrimSpace(res.Value.StringVal) + value := strings.TrimSpace(res.GetStringValue()) if value == "" { - logger.Debugf("error getting string value for %s", res.Name) + logger.Debugf("error getting string value for %s", res.GetName()) continue } - marker := strings.TrimSuffix(res.Name, suffix) + marker := strings.TrimSuffix(res.GetName(), suffix) if _, ok := partials[marker]; !ok { partials[marker] = StructuredSignable{} } diff --git a/pkg/chains/annotations_test.go b/pkg/chains/annotations_test.go index d8dd0ed717..e697b5752e 100644 --- a/pkg/chains/annotations_test.go +++ b/pkg/chains/annotations_test.go @@ -18,7 +18,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" fakepipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client/fake" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" rtesting "knative.dev/pkg/reconciler/testing" @@ -78,7 +78,7 @@ func TestReconciled(t *testing.T) { c := fakepipelineclient.Get(ctx) // Test TaskRun - taskRun := objects.NewTaskRunObject(&v1beta1.TaskRun{ + taskRun := objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ ChainsAnnotation: tt.annotation, @@ -87,7 +87,7 @@ func TestReconciled(t *testing.T) { }) tekton.CreateObject(t, ctx, c, taskRun) - cachedTaskRun := objects.NewTaskRunObject(&v1beta1.TaskRun{ + cachedTaskRun := objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ ChainsAnnotation: tt.latestAnnotation, @@ -101,7 +101,7 @@ func TestReconciled(t *testing.T) { } // Test PipelineRun - pipelineRun := objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + pipelineRun := objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ ChainsAnnotation: tt.annotation, @@ -110,7 +110,7 @@ func TestReconciled(t *testing.T) { }) tekton.CreateObject(t, ctx, c, pipelineRun) - cachedPipelineRun := objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + cachedPipelineRun := objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ ChainsAnnotation: tt.latestAnnotation, @@ -133,12 +133,12 @@ func TestMarkSigned(t *testing.T) { }{ { name: "mark taskrun", - object: objects.NewTaskRunObject(&v1beta1.TaskRun{ + object: objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", }, - Spec: v1beta1.TaskRunSpec{ - TaskRef: &v1beta1.TaskRef{ + Spec: v1.TaskRunSpec{ + TaskRef: &v1.TaskRef{ Name: "foo", }, }, @@ -146,12 +146,12 @@ func TestMarkSigned(t *testing.T) { }, { name: "mark pipelinerun", - object: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + object: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "my-pipelinerun", }, - Spec: v1beta1.PipelineRunSpec{ - PipelineRef: &v1beta1.PipelineRef{ + Spec: v1.PipelineRunSpec{ + PipelineRef: &v1.PipelineRef{ Name: "foo", }, }, @@ -212,13 +212,13 @@ func TestMarkFailed(t *testing.T) { }{ { name: "mark taskrun failed", - object: objects.NewTaskRunObject(&v1beta1.TaskRun{ + object: objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Annotations: map[string]string{RetryAnnotation: "3"}, }, - Spec: v1beta1.TaskRunSpec{ - TaskRef: &v1beta1.TaskRef{ + Spec: v1.TaskRunSpec{ + TaskRef: &v1.TaskRef{ Name: "foo", }, }, @@ -226,13 +226,13 @@ func TestMarkFailed(t *testing.T) { }, { name: "mark pipelinerun failed", - object: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + object: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "my-pipelinerun", Annotations: map[string]string{RetryAnnotation: "3"}, }, - Spec: v1beta1.PipelineRunSpec{ - PipelineRef: &v1beta1.PipelineRef{ + Spec: v1.PipelineRunSpec{ + PipelineRef: &v1.PipelineRef{ Name: "foo", }, }, @@ -294,23 +294,23 @@ func TestRetryAvailble(t *testing.T) { for _, test := range tests { t.Run(test.description, func(t *testing.T) { // test taskrun - tr := &v1beta1.TaskRun{ + tr := &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: test.annotations, }, } - trObj := objects.NewTaskRunObject(tr) + trObj := objects.NewTaskRunObjectV1(tr) got := RetryAvailable(trObj) if got != test.expected { t.Fatalf("RetryAvailble() got %v expected %v", got, test.expected) } // test pipelinerun - pr := &v1beta1.PipelineRun{ + pr := &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: test.annotations, }, } - prObj := objects.NewPipelineRunObject(pr) + prObj := objects.NewPipelineRunObjectV1(pr) got = RetryAvailable(prObj) if got != test.expected { t.Fatalf("RetryAvailble() got %v expected %v", got, test.expected) @@ -326,13 +326,13 @@ func TestAddRetry(t *testing.T) { }{ { name: "add retry to taskrun", - object: objects.NewTaskRunObject(&v1beta1.TaskRun{ + object: objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{Name: "mytaskrun"}, }), }, { name: "add retry to pipelinerun", - object: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + object: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{Name: "mypipelinerun"}, }), }, diff --git a/pkg/chains/formats/all/all.go b/pkg/chains/formats/all/all.go index 5488537044..2d15efc893 100644 --- a/pkg/chains/formats/all/all.go +++ b/pkg/chains/formats/all/all.go @@ -19,4 +19,5 @@ import ( _ "github.com/tektoncd/chains/pkg/chains/formats/slsa/v1" _ "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha1" _ "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha2" + _ "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3" ) diff --git a/pkg/chains/formats/format.go b/pkg/chains/formats/format.go index 6c75a5866a..fccb396853 100644 --- a/pkg/chains/formats/format.go +++ b/pkg/chains/formats/format.go @@ -34,6 +34,7 @@ const ( PayloadTypeSlsav1 config.PayloadType = "slsa/v1" PayloadTypeSlsav2alpha1 config.PayloadType = "slsa/v2alpha1" PayloadTypeSlsav2alpha2 config.PayloadType = "slsa/v2alpha2" + PayloadTypeSlsav2alpha3 config.PayloadType = "slsa/v2alpha3" ) var ( @@ -42,6 +43,7 @@ var ( PayloadTypeSlsav1: {}, PayloadTypeSlsav2alpha1: {}, PayloadTypeSlsav2alpha2: {}, + PayloadTypeSlsav2alpha3: {}, } payloaderMap = map[config.PayloadType]PayloaderInit{} ) diff --git a/pkg/chains/formats/slsa/attest/attest.go b/pkg/chains/formats/slsa/attest/attest.go index e0bc380d38..2c1ce89409 100644 --- a/pkg/chains/formats/slsa/attest/attest.go +++ b/pkg/chains/formats/slsa/attest/attest.go @@ -23,6 +23,7 @@ import ( slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" "github.com/tektoncd/chains/pkg/artifacts" "github.com/tektoncd/chains/pkg/chains/objects" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" corev1 "k8s.io/api/core/v1" ) @@ -40,7 +41,7 @@ type StepAttestation struct { Annotations map[string]string `json:"annotations"` } -func Step(step *v1beta1.Step, stepState *v1beta1.StepState) StepAttestation { +func Step(step *v1.Step, stepState *v1.StepState) StepAttestation { attestation := StepAttestation{} entrypoint := strings.Join(step.Command, " ") @@ -58,16 +59,20 @@ func Step(step *v1beta1.Step, stepState *v1beta1.StepState) StepAttestation { return attestation } -func Invocation(obj objects.TektonObject, params []v1beta1.Param, paramSpecs []v1beta1.ParamSpec) slsa.ProvenanceInvocation { - var source *v1beta1.RefSource - if p := obj.GetProvenance(); p != nil { - source = p.RefSource - } +func Invocation(obj objects.TektonObject, params []v1.Param, paramSpecs []v1.ParamSpec) slsa.ProvenanceInvocation { i := slsa.ProvenanceInvocation{ - ConfigSource: convertConfigSource(source), + ConfigSource: slsa.ConfigSource{}, + } + p := obj.GetProvenance() + if !p.IsNil() && !p.RefSourceIsNil() { + i.ConfigSource = slsa.ConfigSource{ + URI: p.GetRefSourceURI(), + Digest: p.GetRefSourceDigest(), + EntryPoint: p.GetRefSourceEntrypoint(), + } } - iParams := make(map[string]v1beta1.ParamValue) + iParams := make(map[string]v1.ParamValue) // get implicit parameters from defaults for _, p := range paramSpecs { @@ -108,15 +113,76 @@ func Invocation(obj objects.TektonObject, params []v1beta1.Param, paramSpecs []v return i } -func convertConfigSource(source *v1beta1.RefSource) slsa.ConfigSource { - if source == nil { - return slsa.ConfigSource{} +func StepV1Beta1(step *v1beta1.Step, stepState *v1beta1.StepState) StepAttestation { + attestation := StepAttestation{} + + entrypoint := strings.Join(step.Command, " ") + if step.Script != "" { + entrypoint = step.Script + } + attestation.EntryPoint = entrypoint + attestation.Arguments = step.Args + + env := map[string]interface{}{} + env["image"] = artifacts.OCIScheme + strings.TrimPrefix(stepState.ImageID, "docker-pullable://") + env["container"] = stepState.Name + attestation.Environment = env + + return attestation +} + +func InvocationV1Beta1(obj objects.TektonObject, params []v1beta1.Param, paramSpecs []v1beta1.ParamSpec) slsa.ProvenanceInvocation { + i := slsa.ProvenanceInvocation{ + ConfigSource: slsa.ConfigSource{}, + } + p := obj.GetProvenance() + if !p.IsNil() && !p.RefSourceIsNil() { + i.ConfigSource = slsa.ConfigSource{ + URI: p.GetRefSourceURI(), + Digest: p.GetRefSourceDigest(), + EntryPoint: p.GetRefSourceEntrypoint(), + } } - return slsa.ConfigSource{ - URI: source.URI, - Digest: source.Digest, - EntryPoint: source.EntryPoint, + + iParams := make(map[string]v1beta1.ParamValue) + + // get implicit parameters from defaults + for _, p := range paramSpecs { + if p.Default != nil { + iParams[p.Name] = *p.Default + } } + + // get explicit parameters + for _, p := range params { + iParams[p.Name] = p.Value + } + + i.Parameters = iParams + environment := map[string]map[string]string{} + + annotations := map[string]string{} + for name, value := range obj.GetAnnotations() { + // Ignore annotations that are not relevant to provenance information + if name == corev1.LastAppliedConfigAnnotation || strings.HasPrefix(name, "chains.tekton.dev/") { + continue + } + annotations[name] = value + } + if len(annotations) > 0 { + environment["annotations"] = annotations + } + + labels := obj.GetLabels() + if len(labels) > 0 { + environment["labels"] = labels + } + + if len(environment) > 0 { + i.Environment = environment + } + + return i } // supports the SPDX format which is recommended by in-toto diff --git a/pkg/chains/formats/slsa/extract/extract.go b/pkg/chains/formats/slsa/extract/extract.go index 7a2d093c87..189037267a 100644 --- a/pkg/chains/formats/slsa/extract/extract.go +++ b/pkg/chains/formats/slsa/extract/extract.go @@ -18,6 +18,7 @@ package extract import ( "context" + "encoding/json" "fmt" "strings" @@ -26,9 +27,11 @@ import ( "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" "github.com/tektoncd/chains/internal/backport" "github.com/tektoncd/chains/pkg/artifacts" + extractv1beta1 "github.com/tektoncd/chains/pkg/chains/formats/slsa/extract/v1beta1" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/artifact" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "knative.dev/pkg/logging" ) @@ -46,10 +49,14 @@ func SubjectDigests(ctx context.Context, obj objects.TektonObject, slsaconfig *s var subjects []intoto.Subject switch obj.GetObject().(type) { - case *v1beta1.PipelineRun: + case *v1.PipelineRun: subjects = subjectsFromPipelineRun(ctx, obj, slsaconfig) - case *v1beta1.TaskRun: + case *v1.TaskRun: subjects = subjectsFromTektonObject(ctx, obj) + case *v1beta1.PipelineRun: + subjects = extractv1beta1.SubjectsFromPipelineRunV1Beta1(ctx, obj, slsaconfig) + case *v1beta1.TaskRun: + subjects = extractv1beta1.SubjectsFromTektonObjectV1Beta1(ctx, obj) } return subjects @@ -67,7 +74,7 @@ func subjectsFromPipelineRun(ctx context.Context, obj objects.TektonObject, slsa // If deep inspection is enabled, collect subjects from child taskruns var result []intoto.Subject - pro := obj.(*objects.PipelineRunObject) + pro := obj.(*objects.PipelineRunObjectV1) pSpec := pro.Status.PipelineSpec if pSpec != nil { @@ -135,42 +142,57 @@ func subjectsFromTektonObject(ctx context.Context, obj objects.TektonObject) []i }) } - // Check if object is a Taskrun, if so search for images used in PipelineResources - // Otherwise object is a PipelineRun, where Pipelineresources are not relevant. - // PipelineResources have been deprecated so their support has been left out of - // the POC for TEP-84 - // More info: https://tekton.dev/docs/pipelines/resources/ - tr, ok := obj.GetObject().(*v1beta1.TaskRun) - if !ok || tr.Spec.Resources == nil { - return subjects - } + if trV1, ok := obj.GetObject().(*v1.TaskRun); ok { + serializedResources := trV1.Annotations["tekton.dev/v1beta1-spec-resources"] + var resources v1beta1.TaskRunResources //nolint:staticcheck + shouldReplace := false + if err := json.Unmarshal([]byte(serializedResources), &resources); err == nil { + shouldReplace = true - // go through resourcesResult - for _, output := range tr.Spec.Resources.Outputs { - name := output.Name - if output.PipelineResourceBinding.ResourceSpec == nil { - continue } - // similarly, we could do this for other pipeline resources or whatever thing replaces them - if output.PipelineResourceBinding.ResourceSpec.Type == backport.PipelineResourceTypeImage { - // get the url and digest, and save as a subject - var url, digest string - for _, s := range tr.Status.ResourcesResult { - if s.ResourceName == name { - if s.Key == "url" { - url = s.Value - } - if s.Key == "digest" { - digest = s.Value + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + if err := trV1Beta1.ConvertFrom(ctx, trV1); err == nil { + if shouldReplace { + trV1Beta1.Spec.Resources = &resources //nolint:staticcheck + } + + // Check if object is a Taskrun, if so search for images used in PipelineResources + // Otherwise object is a PipelineRun, where Pipelineresources are not relevant. + // PipelineResources have been deprecated so their support has been left out of + // the POC for TEP-84 + // More info: https://tekton.dev/docs/pipelines/resources/ + if !ok || trV1Beta1.Spec.Resources == nil { //nolint:staticcheck + return subjects + } + + // go through resourcesResult + for _, output := range trV1Beta1.Spec.Resources.Outputs { //nolint:staticcheck + name := output.Name + if output.PipelineResourceBinding.ResourceSpec == nil { + continue + } + // similarly, we could do this for other pipeline resources or whatever thing replaces them + if output.PipelineResourceBinding.ResourceSpec.Type == backport.PipelineResourceTypeImage { + // get the url and digest, and save as a subject + var url, digest string + for _, s := range trV1Beta1.Status.ResourcesResult { + if s.ResourceName == name { + if s.Key == "url" { + url = s.Value + } + if s.Key == "digest" { + digest = s.Value + } + } } + subjects = artifact.AppendSubjects(subjects, intoto.Subject{ + Name: url, + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest, "sha256:"), + }, + }) } } - subjects = artifact.AppendSubjects(subjects, intoto.Subject{ - Name: url, - Digest: common.DigestSet{ - "sha256": strings.TrimPrefix(digest, "sha256:"), - }, - }) } } diff --git a/pkg/chains/formats/slsa/extract/extract_test.go b/pkg/chains/formats/slsa/extract/extract_test.go index 9c7b164679..583a727170 100644 --- a/pkg/chains/formats/slsa/extract/extract_test.go +++ b/pkg/chains/formats/slsa/extract/extract_test.go @@ -28,7 +28,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/compare" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" ) @@ -208,7 +208,7 @@ func TestPipelineRunObserveModeForSubjects(t *testing.T) { { name: "deep inspection enabled: pipelinerun and taskrun have duplicated results", pro: createProWithTaskRunResults( - createProWithPipelineResults(map[string]string{artifactURL1: "sha256:" + artifactDigest1}).(*objects.PipelineRunObject), + createProWithPipelineResults(map[string]string{artifactURL1: "sha256:" + artifactDigest1}).(*objects.PipelineRunObjectV1), []artifact{ {uri: artifactURL1, digest: "sha256:" + artifactDigest1}, }), @@ -228,7 +228,7 @@ func TestPipelineRunObserveModeForSubjects(t *testing.T) { { name: "deep inspection enabled: pipelinerun and taskrun have different results", pro: createProWithTaskRunResults( - createProWithPipelineResults(map[string]string{artifactURL1: "sha256:" + artifactDigest1}).(*objects.PipelineRunObject), + createProWithPipelineResults(map[string]string{artifactURL1: "sha256:" + artifactDigest1}).(*objects.PipelineRunObjectV1), []artifact{ {uri: artifactURL2, digest: "sha256:" + artifactDigest2}, }), @@ -272,21 +272,21 @@ func TestPipelineRunObserveModeForSubjects(t *testing.T) { } func createTaskRunObjectWithResults(results map[string]string) objects.TektonObject { - trResults := []v1beta1.TaskRunResult{} + trResults := []v1.TaskRunResult{} prefix := 0 for url, digest := range results { trResults = append(trResults, - v1beta1.TaskRunResult{Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1beta1.NewStructuredValues(digest)}, - v1beta1.TaskRunResult{Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1beta1.NewStructuredValues(url)}, + v1.TaskRunResult{Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1.NewStructuredValues(digest)}, + v1.TaskRunResult{Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1.NewStructuredValues(url)}, ) prefix++ } - return objects.NewTaskRunObject( - &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: trResults, + return objects.NewTaskRunObjectV1( + &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: trResults, }, }, }, @@ -294,21 +294,21 @@ func createTaskRunObjectWithResults(results map[string]string) objects.TektonObj } func createProWithPipelineResults(results map[string]string) objects.TektonObject { - prResults := []v1beta1.PipelineRunResult{} + prResults := []v1.PipelineRunResult{} prefix := 0 for url, digest := range results { prResults = append(prResults, - v1beta1.PipelineRunResult{Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1beta1.NewStructuredValues(digest)}, - v1beta1.PipelineRunResult{Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1beta1.NewStructuredValues(url)}, + v1.PipelineRunResult{Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1.NewStructuredValues(digest)}, + v1.PipelineRunResult{Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1.NewStructuredValues(url)}, ) prefix++ } - return objects.NewPipelineRunObject( - &v1beta1.PipelineRun{ - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineResults: prResults, + return objects.NewPipelineRunObjectV1( + &v1.PipelineRun{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Results: prResults, }, }, }, @@ -323,19 +323,19 @@ type artifact struct { // create a child taskrun for each result // //nolint:all -func createProWithTaskRunResults(pro *objects.PipelineRunObject, results []artifact) objects.TektonObject { +func createProWithTaskRunResults(pro *objects.PipelineRunObjectV1, results []artifact) objects.TektonObject { if pro == nil { - pro = objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineSpec: &v1beta1.PipelineSpec{}, + pro = objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + PipelineSpec: &v1.PipelineSpec{}, }, }, }) } if pro.Status.PipelineSpec == nil { - pro.Status.PipelineSpec = &v1beta1.PipelineSpec{} + pro.Status.PipelineSpec = &v1.PipelineSpec{} } // create child taskruns with results and pipelinetask @@ -343,21 +343,21 @@ func createProWithTaskRunResults(pro *objects.PipelineRunObject, results []artif for _, r := range results { // simulate child taskruns pipelineTaskName := fmt.Sprintf("task-%d", prefix) - tr := &v1beta1.TaskRun{ + tr := &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{objects.PipelineTaskLabel: pipelineTaskName}}, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1beta1.NewStructuredValues(r.digest)}, - {Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1beta1.NewStructuredValues(r.uri)}, + Results: []v1.TaskRunResult{ + {Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1.NewStructuredValues(r.digest)}, + {Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1.NewStructuredValues(r.uri)}, }, }, }, } pro.AppendTaskRun(tr) - pro.Status.PipelineSpec.Tasks = append(pro.Status.PipelineSpec.Tasks, v1beta1.PipelineTask{Name: pipelineTaskName}) + pro.Status.PipelineSpec.Tasks = append(pro.Status.PipelineSpec.Tasks, v1.PipelineTask{Name: pipelineTaskName}) prefix++ } diff --git a/pkg/chains/formats/slsa/extract/v1beta1/extract.go b/pkg/chains/formats/slsa/extract/v1beta1/extract.go new file mode 100644 index 0000000000..84de33eea6 --- /dev/null +++ b/pkg/chains/formats/slsa/extract/v1beta1/extract.go @@ -0,0 +1,192 @@ +/* +Copyright 2022 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package extractv1beta1 + +import ( + "context" + "fmt" + "strings" + + "github.com/google/go-containerregistry/pkg/name" + intoto "github.com/in-toto/in-toto-golang/in_toto" + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + "github.com/tektoncd/chains/internal/backport" + "github.com/tektoncd/chains/pkg/artifacts" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/artifact" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" + "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + "knative.dev/pkg/logging" +) + +// SubjectDigests returns software artifacts produced from the TaskRun/PipelineRun object +// in the form of standard subject field of intoto statement. +// The type hinting fields expected in results help identify the generated software artifacts. +// Valid type hinting fields must: +// - have suffix `IMAGE_URL` & `IMAGE_DIGEST` or `ARTIFACT_URI` & `ARTIFACT_DIGEST` pair. +// - the `*_DIGEST` field must be in the format of ":" where the algorithm must be "sha256" and actual sha must be valid per https://github.com/opencontainers/image-spec/blob/main/descriptor.md#sha-256. +// - the `*_URL` or `*_URI` fields cannot be empty. +// +//nolint:all +func SubjectDigests(ctx context.Context, obj objects.TektonObject, slsaconfig *slsaconfig.SlsaConfig) []intoto.Subject { + var subjects []intoto.Subject + + switch obj.GetObject().(type) { + case *v1beta1.PipelineRun: + subjects = SubjectsFromPipelineRunV1Beta1(ctx, obj, slsaconfig) + case *v1beta1.TaskRun: + subjects = SubjectsFromTektonObjectV1Beta1(ctx, obj) + } + + return subjects +} + +func SubjectsFromPipelineRunV1Beta1(ctx context.Context, obj objects.TektonObject, slsaconfig *slsaconfig.SlsaConfig) []intoto.Subject { + prSubjects := SubjectsFromTektonObjectV1Beta1(ctx, obj) + + // If deep inspection is not enabled, just return subjects observed on the pipelinerun level + if !slsaconfig.DeepInspectionEnabled { + return prSubjects + } + + logger := logging.FromContext(ctx) + // If deep inspection is enabled, collect subjects from child taskruns + var result []intoto.Subject + + pro := obj.(*objects.PipelineRunObjectV1Beta1) + + pSpec := pro.Status.PipelineSpec + if pSpec != nil { + pipelineTasks := append(pSpec.Tasks, pSpec.Finally...) + for _, t := range pipelineTasks { + tr := pro.GetTaskRunFromTask(t.Name) + // Ignore Tasks that did not execute during the PipelineRun. + if tr == nil || tr.Status.CompletionTime == nil { + logger.Infof("taskrun status not found for task %s", t.Name) + continue + } + trSubjects := SubjectsFromTektonObjectV1Beta1(ctx, tr) + result = artifact.AppendSubjects(result, trSubjects...) + } + } + + // also add subjects observed from pipelinerun level with duplication removed + result = artifact.AppendSubjects(result, prSubjects...) + + return result +} + +func SubjectsFromTektonObjectV1Beta1(ctx context.Context, obj objects.TektonObject) []intoto.Subject { + logger := logging.FromContext(ctx) + var subjects []intoto.Subject + + imgs := artifacts.ExtractOCIImagesFromResults(ctx, obj) + for _, i := range imgs { + if d, ok := i.(name.Digest); ok { + subjects = artifact.AppendSubjects(subjects, intoto.Subject{ + Name: d.Repository.Name(), + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(d.DigestStr(), "sha256:"), + }, + }) + } + } + + sts := artifacts.ExtractSignableTargetFromResults(ctx, obj) + for _, obj := range sts { + splits := strings.Split(obj.Digest, ":") + if len(splits) != 2 { + logger.Errorf("Digest %s should be in the format of: algorthm:abc", obj.Digest) + continue + } + subjects = artifact.AppendSubjects(subjects, intoto.Subject{ + Name: obj.URI, + Digest: common.DigestSet{ + splits[0]: splits[1], + }, + }) + } + + ssts := artifacts.ExtractStructuredTargetFromResults(ctx, obj, artifacts.ArtifactsOutputsResultName) + for _, s := range ssts { + splits := strings.Split(s.Digest, ":") + alg := splits[0] + digest := splits[1] + subjects = artifact.AppendSubjects(subjects, intoto.Subject{ + Name: s.URI, + Digest: common.DigestSet{ + alg: digest, + }, + }) + } + + // Check if object is a Taskrun, if so search for images used in PipelineResources + // Otherwise object is a PipelineRun, where Pipelineresources are not relevant. + // PipelineResources have been deprecated so their support has been left out of + // the POC for TEP-84 + // More info: https://tekton.dev/docs/pipelines/resources/ + tr, ok := obj.GetObject().(*v1beta1.TaskRun) //nolint:staticcheck + if !ok || tr.Spec.Resources == nil { //nolint:staticcheck + return subjects + } + + // go through resourcesResult + for _, output := range tr.Spec.Resources.Outputs { //nolint:staticcheck + name := output.Name + if output.PipelineResourceBinding.ResourceSpec == nil { + continue + } + // similarly, we could do this for other pipeline resources or whatever thing replaces them + if output.PipelineResourceBinding.ResourceSpec.Type == backport.PipelineResourceTypeImage { + // get the url and digest, and save as a subject + var url, digest string + for _, s := range tr.Status.ResourcesResult { + if s.ResourceName == name { + if s.Key == "url" { + url = s.Value + } + if s.Key == "digest" { + digest = s.Value + } + } + } + subjects = artifact.AppendSubjects(subjects, intoto.Subject{ + Name: url, + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest, "sha256:"), + }, + }) + } + } + return subjects +} + +// RetrieveAllArtifactURIs returns all the URIs of the software artifacts produced from the run object. +// - It first extracts intoto subjects from run object results and converts the subjects +// to a slice of string URIs in the format of "NAME" + "@" + "ALGORITHM" + ":" + "DIGEST". +// - If no subjects could be extracted from results, then an empty slice is returned. +func RetrieveAllArtifactURIs(ctx context.Context, obj objects.TektonObject, deepInspectionEnabled bool) []string { + result := []string{} + subjects := SubjectDigests(ctx, obj, &slsaconfig.SlsaConfig{DeepInspectionEnabled: deepInspectionEnabled}) + + for _, s := range subjects { + for algo, digest := range s.Digest { + result = append(result, fmt.Sprintf("%s@%s:%s", s.Name, algo, digest)) + } + } + return result +} diff --git a/pkg/chains/formats/slsa/extract/v1beta1/extract_test.go b/pkg/chains/formats/slsa/extract/v1beta1/extract_test.go new file mode 100644 index 0000000000..fb89f67afc --- /dev/null +++ b/pkg/chains/formats/slsa/extract/v1beta1/extract_test.go @@ -0,0 +1,365 @@ +/* +Copyright 2022 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package extractv1beta1_test + +import ( + "fmt" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + intoto "github.com/in-toto/in-toto-golang/in_toto" + extractv1beta1 "github.com/tektoncd/chains/pkg/chains/formats/slsa/extract/v1beta1" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/compare" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" + "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + logtesting "knative.dev/pkg/logging/testing" +) + +const ( + artifactURL1 = "gcr.io/test/kaniko-chains1" + artifactDigest1 = "a2e500bebfe16cf12fc56316ba72c645e1d29054541dc1ab6c286197434170a9" + artifactURL2 = "us-central1-maven.pkg.dev/test/java" + artifactDigest2 = "b2e500bebfe16cf12fc56316ba72c645e1d29054541dc1ab6c286197434170a9" +) + +func TestSubjectDigestsAndRetrieveAllArtifactURIs(t *testing.T) { + var tests = []struct { + name string + // a map of url:digest pairs for type hinting results + results map[string]string + wantSubjects []intoto.Subject + wantFullURLs []string + }{ + { + name: "valid type hinting result fields", + results: map[string]string{ + artifactURL1: "sha256:" + artifactDigest1, + artifactURL2: "sha256:" + artifactDigest2, + }, + wantSubjects: []intoto.Subject{ + { + Name: artifactURL1, + Digest: map[string]string{ + "sha256": artifactDigest1, + }, + }, + { + Name: artifactURL2, + Digest: map[string]string{ + "sha256": artifactDigest2, + }, + }, + }, + wantFullURLs: []string{ + fmt.Sprintf("%s@sha256:%s", artifactURL1, artifactDigest1), + fmt.Sprintf("%s@sha256:%s", artifactURL2, artifactDigest2), + }, + }, + { + name: "invalid/missing digest algorithm name", + results: map[string]string{ + artifactURL1: "sha1:" + artifactDigest1, + artifactURL2: artifactDigest2, + }, + wantSubjects: nil, + wantFullURLs: []string{}, + }, + { + name: "invalid digest sha", + results: map[string]string{ + artifactURL1: "sha256:a123", + }, + wantSubjects: nil, + wantFullURLs: []string{}, + }, + { + name: "invalid url value", + results: map[string]string{ + "": "sha256:" + artifactDigest1, + }, + wantSubjects: nil, + wantFullURLs: []string{}, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + // test both taskrun object and pipelinerun object + runObjects := []objects.TektonObject{ + createTaskRunObjectV1Beta1WithResults(tc.results), + createProWithPipelineResults(tc.results), + } + for _, o := range runObjects { + gotSubjects := extractv1beta1.SubjectDigests(ctx, o, &slsaconfig.SlsaConfig{DeepInspectionEnabled: false}) + if diff := cmp.Diff(tc.wantSubjects, gotSubjects, compare.SubjectCompareOption()); diff != "" { + t.Errorf("Wrong subjects extracted, diff=%s", diff) + } + + gotURIs := extractv1beta1.RetrieveAllArtifactURIs(ctx, o, false) + if diff := cmp.Diff(tc.wantFullURLs, gotURIs, cmpopts.SortSlices(func(x, y string) bool { return x < y })); diff != "" { + t.Errorf("Wrong URIs extracted, diff=%s", diff) + } + } + + }) + } +} + +func TestPipelineRunObserveModeForSubjects(t *testing.T) { + var tests = []struct { + name string + pro objects.TektonObject + deepInspectionEnabled bool + wantSubjects []intoto.Subject + wantFullURLs []string + }{ + { + name: "deep inspection disabled", + pro: createProWithPipelineResults(map[string]string{artifactURL1: "sha256:" + artifactDigest1}), + deepInspectionEnabled: false, + wantSubjects: []intoto.Subject{ + { + Name: artifactURL1, + Digest: map[string]string{ + "sha256": artifactDigest1, + }, + }, + }, + wantFullURLs: []string{fmt.Sprintf("%s@sha256:%s", artifactURL1, artifactDigest1)}, + }, + { + name: "deep inspection enabled: no duplication", + pro: createProWithTaskRunResults(nil, []artifact{{uri: artifactURL2, digest: "sha256:" + artifactDigest2}}), + deepInspectionEnabled: true, + wantSubjects: []intoto.Subject{ + { + Name: artifactURL2, + Digest: map[string]string{ + "sha256": artifactDigest2, + }, + }, + }, + wantFullURLs: []string{fmt.Sprintf("%s@sha256:%s", artifactURL2, artifactDigest2)}, + }, + { + name: "deep inspection enabled: 2 tasks have same uri with different sha256 digests", + pro: createProWithTaskRunResults(nil, []artifact{ + {uri: artifactURL2, digest: "sha256:" + artifactDigest1}, + {uri: artifactURL2, digest: "sha256:" + artifactDigest2}, + }), + deepInspectionEnabled: true, + wantSubjects: []intoto.Subject{ + { + Name: artifactURL2, + Digest: map[string]string{ + "sha256": artifactDigest2, + }, + }, + { + Name: artifactURL2, + Digest: map[string]string{ + "sha256": artifactDigest1, + }, + }, + }, + wantFullURLs: []string{ + fmt.Sprintf("%s@sha256:%s", artifactURL2, artifactDigest1), + fmt.Sprintf("%s@sha256:%s", artifactURL2, artifactDigest2), + }, + }, + { + name: "deep inspection enabled: 2 taskruns have same uri with same sha256 digests", + pro: createProWithTaskRunResults(nil, []artifact{ + {uri: artifactURL2, digest: "sha256:" + artifactDigest2}, + {uri: artifactURL2, digest: "sha256:" + artifactDigest2}, + }), + deepInspectionEnabled: true, + wantSubjects: []intoto.Subject{ + { + Name: artifactURL2, + Digest: map[string]string{ + "sha256": artifactDigest2, + }, + }, + }, + wantFullURLs: []string{ + fmt.Sprintf("%s@sha256:%s", artifactURL2, artifactDigest2), + }, + }, + { + name: "deep inspection enabled: pipelinerun and taskrun have duplicated results", + pro: createProWithTaskRunResults( + createProWithPipelineResults(map[string]string{artifactURL1: "sha256:" + artifactDigest1}).(*objects.PipelineRunObjectV1Beta1), + []artifact{ + {uri: artifactURL1, digest: "sha256:" + artifactDigest1}, + }), + deepInspectionEnabled: true, + wantSubjects: []intoto.Subject{ + { + Name: artifactURL1, + Digest: map[string]string{ + "sha256": artifactDigest1, + }, + }, + }, + wantFullURLs: []string{ + fmt.Sprintf("%s@sha256:%s", artifactURL1, artifactDigest1), + }, + }, + { + name: "deep inspection enabled: pipelinerun and taskrun have different results", + pro: createProWithTaskRunResults( + createProWithPipelineResults(map[string]string{artifactURL1: "sha256:" + artifactDigest1}).(*objects.PipelineRunObjectV1Beta1), + []artifact{ + {uri: artifactURL2, digest: "sha256:" + artifactDigest2}, + }), + deepInspectionEnabled: true, + wantSubjects: []intoto.Subject{ + { + Name: artifactURL1, + Digest: map[string]string{ + "sha256": artifactDigest1, + }, + }, + { + Name: artifactURL2, + Digest: map[string]string{ + "sha256": artifactDigest2, + }, + }, + }, + wantFullURLs: []string{ + fmt.Sprintf("%s@sha256:%s", artifactURL1, artifactDigest1), + fmt.Sprintf("%s@sha256:%s", artifactURL2, artifactDigest2), + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + + gotSubjects := extractv1beta1.SubjectDigests(ctx, tc.pro, &slsaconfig.SlsaConfig{DeepInspectionEnabled: tc.deepInspectionEnabled}) + if diff := cmp.Diff(tc.wantSubjects, gotSubjects, compare.SubjectCompareOption()); diff != "" { + t.Errorf("Wrong subjects extracted, diff=%s, %s", diff, gotSubjects) + } + + gotURIs := extractv1beta1.RetrieveAllArtifactURIs(ctx, tc.pro, tc.deepInspectionEnabled) + if diff := cmp.Diff(tc.wantFullURLs, gotURIs, cmpopts.SortSlices(func(x, y string) bool { return x < y })); diff != "" { + t.Errorf("Wrong URIs extracted, diff=%s", diff) + } + }) + } +} + +func createTaskRunObjectV1Beta1WithResults(results map[string]string) objects.TektonObject { + trResults := []v1beta1.TaskRunResult{} + prefix := 0 + for url, digest := range results { + trResults = append(trResults, + v1beta1.TaskRunResult{Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1beta1.NewStructuredValues(digest)}, + v1beta1.TaskRunResult{Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1beta1.NewStructuredValues(url)}, + ) + prefix++ + } + + return objects.NewTaskRunObjectV1Beta1( + &v1beta1.TaskRun{ //nolint:staticcheck + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + TaskRunResults: trResults, + }, + }, + }, + ) +} + +func createProWithPipelineResults(results map[string]string) objects.TektonObject { + prResults := []v1beta1.PipelineRunResult{} + prefix := 0 + for url, digest := range results { + prResults = append(prResults, + v1beta1.PipelineRunResult{Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1beta1.NewStructuredValues(digest)}, + v1beta1.PipelineRunResult{Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1beta1.NewStructuredValues(url)}, + ) + prefix++ + } + + return objects.NewPipelineRunObjectV1Beta1( + &v1beta1.PipelineRun{ //nolint:staticcheck + Status: v1beta1.PipelineRunStatus{ + PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ + PipelineResults: prResults, + }, + }, + }, + ) +} + +type artifact struct { + uri string + digest string +} + +// create a child taskrun for each result +// +//nolint:all +func createProWithTaskRunResults(pro *objects.PipelineRunObjectV1Beta1, results []artifact) objects.TektonObject { + if pro == nil { + pro = objects.NewPipelineRunObjectV1Beta1(&v1beta1.PipelineRun{ //nolint:staticcheck + Status: v1beta1.PipelineRunStatus{ + PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ + PipelineSpec: &v1beta1.PipelineSpec{}, + }, + }, + }) + } + + if pro.Status.PipelineSpec == nil { + pro.Status.PipelineSpec = &v1beta1.PipelineSpec{} + } + + // create child taskruns with results and pipelinetask + prefix := 0 + for _, r := range results { + // simulate child taskruns + pipelineTaskName := fmt.Sprintf("task-%d", prefix) + tr := &v1beta1.TaskRun{ //nolint:staticcheck + ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{objects.PipelineTaskLabel: pipelineTaskName}}, + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + TaskRunResults: []v1beta1.TaskRunResult{ + {Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1beta1.NewStructuredValues(r.digest)}, + {Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1beta1.NewStructuredValues(r.uri)}, + }, + }, + }, + } + + pro.AppendTaskRun(tr) + pro.Status.PipelineSpec.Tasks = append(pro.Status.PipelineSpec.Tasks, v1beta1.PipelineTask{Name: pipelineTaskName}) + prefix++ + } + + return pro +} diff --git a/pkg/chains/formats/slsa/internal/artifact/v1beta1/append.go b/pkg/chains/formats/slsa/internal/artifact/v1beta1/append.go new file mode 100644 index 0000000000..ec6ece7424 --- /dev/null +++ b/pkg/chains/formats/slsa/internal/artifact/v1beta1/append.go @@ -0,0 +1,128 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +package artifactv1beta1 + +import ( + intoto "github.com/in-toto/in-toto-golang/in_toto" + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" +) + +// AppendSubjects adds new subject(s) to the original slice. +// It merges the new item with an existing entry if they are duplicate instead of append. +func AppendSubjects(original []intoto.Subject, items ...intoto.Subject) []intoto.Subject { + var artifacts []artifact + for _, s := range original { + artifacts = append(artifacts, subjectToArtifact(s)) + } + + for _, s := range items { + artifacts = addArtifact(artifacts, subjectToArtifact(s)) + } + + var result []intoto.Subject + for _, a := range artifacts { + result = append(result, artifactToSubject(a)) + } + return result +} + +// AppendMaterials adds new material(s) to the original slice. +// It merges the new item with an existing entry if they are duplicate instead of append. +func AppendMaterials(original []common.ProvenanceMaterial, items ...common.ProvenanceMaterial) []common.ProvenanceMaterial { + var artifacts []artifact + for _, m := range original { + artifacts = append(artifacts, materialToArtifact(m)) + } + + for _, m := range items { + artifacts = addArtifact(artifacts, materialToArtifact(m)) + } + + var result []common.ProvenanceMaterial + for _, a := range artifacts { + result = append(result, artifactToMaterial(a)) + } + return result +} + +type artifact struct { + name string + digestSet map[string]string +} + +// AddArtifact adds a new artifact item to the original slice. +func addArtifact(original []artifact, item artifact) []artifact { + + for i, a := range original { + // if there is an equivalent entry in the original slice, merge the + // artifact's DigestSet into the existing entry's DigestSet. + if artifactEqual(a, item) { + mergeMaps(original[i].digestSet, item.digestSet) + return original + } + } + + original = append(original, item) + return original +} + +// two artifacts are equal if and only if they have same name and have at least +// one common algorithm and hex value. +func artifactEqual(x, y artifact) bool { + if x.name != y.name { + return false + } + for algo, hex := range x.digestSet { + if y.digestSet[algo] == hex { + return true + } + } + return false +} + +func mergeMaps(m1 map[string]string, m2 map[string]string) { + for k, v := range m2 { + m1[k] = v + } +} + +func subjectToArtifact(s intoto.Subject) artifact { + return artifact{ + name: s.Name, + digestSet: s.Digest, + } +} + +func artifactToSubject(a artifact) intoto.Subject { + return intoto.Subject{ + Name: a.name, + Digest: a.digestSet, + } +} + +func materialToArtifact(m common.ProvenanceMaterial) artifact { + return artifact{ + name: m.URI, + digestSet: m.Digest, + } +} + +func artifactToMaterial(a artifact) common.ProvenanceMaterial { + return common.ProvenanceMaterial{ + URI: a.name, + Digest: a.digestSet, + } +} diff --git a/pkg/chains/formats/slsa/internal/artifact/v1beta1/append_test.go b/pkg/chains/formats/slsa/internal/artifact/v1beta1/append_test.go new file mode 100644 index 0000000000..d335ee623e --- /dev/null +++ b/pkg/chains/formats/slsa/internal/artifact/v1beta1/append_test.go @@ -0,0 +1,320 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +package artifactv1beta1 + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + intoto "github.com/in-toto/in-toto-golang/in_toto" + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" +) + +func TestAppendSubjects(t *testing.T) { + tests := []struct { + name string + original []intoto.Subject + toAdd []intoto.Subject + want []intoto.Subject + }{{ + name: "add a completely new subject", + original: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, { + Name: "gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "b", + }, + }, + }, + toAdd: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "c", + }, + }, + }, + want: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, { + Name: "gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "b", + }, + }, { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "c", + }, + }, + }, + }, { + name: "add a subject with same uri and digest", + original: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, + }, + toAdd: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, + }, + want: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, + }, + }, { + name: "add a subject with same uri but different digest", + original: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, + }, + toAdd: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b", + }, + }, + }, + want: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b", + }, + }, + }, + }, + { + name: "add a subject with same uri, one common digest and one different digest", + original: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + "sha224": "b", + }, + }, + }, + toAdd: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + "sha512": "c", + }, + }, + }, + want: []intoto.Subject{ + { + Name: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + "sha224": "b", + "sha512": "c", + }, + }, + }, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got := AppendSubjects(tc.original, tc.toAdd...) + + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("materials(): -want +got: %s", diff) + } + }) + } +} + +func TestAppendMaterials(t *testing.T) { + tests := []struct { + name string + original []common.ProvenanceMaterial + toAdd []common.ProvenanceMaterial + want []common.ProvenanceMaterial + }{{ + name: "add a completely new material", + original: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, { + URI: "gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "b", + }, + }, + }, + toAdd: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "c", + }, + }, + }, + want: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, { + URI: "gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "b", + }, + }, { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "c", + }, + }, + }, + }, { + name: "add a material with same uri and digest", + original: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, + }, + toAdd: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, + }, + want: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, + }, + }, { + name: "add a material with same uri but different digest", + original: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, + }, + toAdd: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b", + }, + }, + }, + want: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + }, + }, { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b", + }, + }, + }, + }, + { + name: "add a material with same uri, one common digest and one different digest", + original: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + "sha224": "b", + }, + }, + }, + toAdd: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + "sha512": "c", + }, + }, + }, + want: []common.ProvenanceMaterial{ + { + URI: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "a", + "sha224": "b", + "sha512": "c", + }, + }, + }, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got := AppendMaterials(tc.original, tc.toAdd...) + + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("materials(): -want +got: %s", diff) + } + }) + } +} diff --git a/pkg/chains/formats/slsa/internal/material/material.go b/pkg/chains/formats/slsa/internal/material/material.go index 864dc14ab4..90355575ee 100644 --- a/pkg/chains/formats/slsa/internal/material/material.go +++ b/pkg/chains/formats/slsa/internal/material/material.go @@ -18,6 +18,7 @@ package material import ( "context" + "encoding/json" "fmt" "strings" @@ -28,6 +29,8 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/artifact" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "knative.dev/pkg/logging" ) @@ -37,7 +40,7 @@ const ( ) // TaskMaterials constructs `predicate.materials` section by collecting all the artifacts that influence a taskrun such as source code repo and step&sidecar base images. -func TaskMaterials(ctx context.Context, tro *objects.TaskRunObject) ([]common.ProvenanceMaterial, error) { +func TaskMaterials(ctx context.Context, tro *objects.TaskRunObjectV1) ([]common.ProvenanceMaterial, error) { var mats []common.ProvenanceMaterial // add step images @@ -56,13 +59,26 @@ func TaskMaterials(ctx context.Context, tro *objects.TaskRunObject) ([]common.Pr mats = artifact.AppendMaterials(mats, FromTaskParamsAndResults(ctx, tro)...) - // add task resources - mats = artifact.AppendMaterials(mats, FromTaskResources(ctx, tro)...) + // convert to v1beta1 and add any task resources + serializedResources := tro.Annotations["tekton.dev/v1beta1-spec-resources"] + var resources v1beta1.TaskRunResources //nolint:staticcheck + shouldReplace := false + if err := json.Unmarshal([]byte(serializedResources), &resources); err == nil { + shouldReplace = true + + } + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + if err := trV1Beta1.ConvertFrom(ctx, tro.GetObject().(*v1.TaskRun)); err == nil { + if shouldReplace { + trV1Beta1.Spec.Resources = &resources //nolint:staticcheck + } + mats = artifact.AppendMaterials(mats, FromTaskResources(ctx, trV1Beta1)...) + } return mats, nil } -func PipelineMaterials(ctx context.Context, pro *objects.PipelineRunObject, slsaconfig *slsaconfig.SlsaConfig) ([]common.ProvenanceMaterial, error) { +func PipelineMaterials(ctx context.Context, pro *objects.PipelineRunObjectV1, slsaconfig *slsaconfig.SlsaConfig) ([]common.ProvenanceMaterial, error) { logger := logging.FromContext(ctx) var mats []common.ProvenanceMaterial if p := pro.Status.Provenance; p != nil && p.RefSource != nil { @@ -113,7 +129,7 @@ func PipelineMaterials(ctx context.Context, pro *objects.PipelineRunObject, slsa } // FromStepImages gets predicate.materials from step images -func FromStepImages(tro *objects.TaskRunObject) ([]common.ProvenanceMaterial, error) { +func FromStepImages(tro *objects.TaskRunObjectV1) ([]common.ProvenanceMaterial, error) { mats := []common.ProvenanceMaterial{} for _, image := range tro.GetStepImages() { m, err := fromImageID(image) @@ -126,7 +142,7 @@ func FromStepImages(tro *objects.TaskRunObject) ([]common.ProvenanceMaterial, er } // FromSidecarImages gets predicate.materials from sidecar images -func FromSidecarImages(tro *objects.TaskRunObject) ([]common.ProvenanceMaterial, error) { +func FromSidecarImages(tro *objects.TaskRunObjectV1) ([]common.ProvenanceMaterial, error) { mats := []common.ProvenanceMaterial{} for _, image := range tro.GetSidecarImages() { m, err := fromImageID(image) @@ -158,11 +174,11 @@ func fromImageID(imageID string) (common.ProvenanceMaterial, error) { } // FromTaskResourcesToMaterials gets materials from task resources. -func FromTaskResources(ctx context.Context, tro *objects.TaskRunObject) []common.ProvenanceMaterial { +func FromTaskResources(ctx context.Context, tr *v1beta1.TaskRun) []common.ProvenanceMaterial { //nolint:staticcheck mats := []common.ProvenanceMaterial{} - if tro.Spec.Resources != nil { //nolint:all //incompatible with pipelines v0.45 + if tr.Spec.Resources != nil { //nolint:all //incompatible with pipelines v0.45 // check for a Git PipelineResource - for _, input := range tro.Spec.Resources.Inputs { //nolint:all //incompatible with pipelines v0.45 + for _, input := range tr.Spec.Resources.Inputs { //nolint:all //incompatible with pipelines v0.45 if input.ResourceSpec == nil || input.ResourceSpec.Type != backport.PipelineResourceTypeGit { //nolint:all //incompatible with pipelines v0.45 continue } @@ -171,7 +187,7 @@ func FromTaskResources(ctx context.Context, tro *objects.TaskRunObject) []common Digest: common.DigestSet{}, } - for _, rr := range tro.Status.ResourcesResult { + for _, rr := range tr.Status.ResourcesResult { if rr.ResourceName != input.Name { continue } @@ -202,7 +218,7 @@ func FromTaskResources(ctx context.Context, tro *objects.TaskRunObject) []common // FromTaskParamsAndResults scans over the taskrun, taskspec params and taskrun results // and looks for unstructured type hinted names matching CHAINS-GIT_COMMIT and CHAINS-GIT_URL // to extract the commit and url value for input artifact materials. -func FromTaskParamsAndResults(ctx context.Context, tro *objects.TaskRunObject) []common.ProvenanceMaterial { +func FromTaskParamsAndResults(ctx context.Context, tro *objects.TaskRunObjectV1) []common.ProvenanceMaterial { var commit, url string // Scan for git params to use for materials if tro.Status.TaskSpec != nil { @@ -230,7 +246,7 @@ func FromTaskParamsAndResults(ctx context.Context, tro *objects.TaskRunObject) [ } } - for _, r := range tro.Status.TaskRunResults { + for _, r := range tro.Status.Results { if r.Name == attest.CommitParam { commit = r.Value.StringVal } @@ -257,7 +273,7 @@ func FromTaskParamsAndResults(ctx context.Context, tro *objects.TaskRunObject) [ } // FromPipelineParamsAndResults extracts type hinted params and results and adds the url and digest to materials. -func FromPipelineParamsAndResults(ctx context.Context, pro *objects.PipelineRunObject, slsaconfig *slsaconfig.SlsaConfig) []common.ProvenanceMaterial { +func FromPipelineParamsAndResults(ctx context.Context, pro *objects.PipelineRunObjectV1, slsaconfig *slsaconfig.SlsaConfig) []common.ProvenanceMaterial { mats := []common.ProvenanceMaterial{} sms := artifacts.RetrieveMaterialsFromStructuredResults(ctx, pro, artifacts.ArtifactsInputsResultName) mats = artifact.AppendMaterials(mats, sms...) @@ -308,8 +324,8 @@ func FromPipelineParamsAndResults(ctx context.Context, pro *objects.PipelineRunO } } - // search status.PipelineRunResults - for _, r := range pro.Status.PipelineResults { + // search status.Results + for _, r := range pro.Status.Results { if r.Name == attest.CommitParam { commit = r.Value.StringVal } diff --git a/pkg/chains/formats/slsa/internal/material/material_test.go b/pkg/chains/formats/slsa/internal/material/material_test.go index 2f86b45f37..2ce97f7e1d 100644 --- a/pkg/chains/formats/slsa/internal/material/material_test.go +++ b/pkg/chains/formats/slsa/internal/material/material_test.go @@ -17,6 +17,7 @@ limitations under the License. package material import ( + "encoding/json" "fmt" "reflect" "strings" @@ -31,16 +32,16 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/internal/objectloader" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/apis/resource/v1alpha1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" - "sigs.k8s.io/yaml" ) const digest = "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7" -func createPro(path string) *objects.PipelineRunObject { +func createPro(path string) *objects.PipelineRunObjectV1 { var err error pr, err := objectloader.PipelineRunFromFile(path) if err != nil { @@ -54,34 +55,33 @@ func createPro(path string) *objects.PipelineRunObject { if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p } -func TestMaterialsWithTaskRunResults(t *testing.T) { - // make sure this works with Git resources - taskrun := `apiVersion: tekton.dev/v1beta1 -kind: TaskRun -spec: - taskSpec: - resources: - inputs: - - name: repo - type: git -status: - taskResults: - - name: CHAINS-GIT_COMMIT - value: 50c56a48cfb3a5a80fa36ed91c739bdac8381cbe - - name: CHAINS-GIT_URL - value: https://github.com/GoogleContainerTools/distroless` - - var taskRun *v1beta1.TaskRun - if err := yaml.Unmarshal([]byte(taskrun), &taskRun); err != nil { - t.Fatal(err) +func TestMaterialsWithResults(t *testing.T) { + taskRun := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + { + Name: "CHAINS-GIT_COMMIT", + Value: v1.ParamValue{ + StringVal: "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, + }, + { + Name: "CHAINS-GIT_URL", + Value: v1.ParamValue{ + StringVal: "https://github.com/GoogleContainerTools/distroless", + }, + }, + }, + }, + }, } - want := []common.ProvenanceMaterial{ { URI: artifacts.GitSchemePrefix + "https://github.com/GoogleContainerTools/distroless.git", @@ -92,7 +92,7 @@ status: } ctx := logtesting.TestContextWithLogger(t) - got, err := TaskMaterials(ctx, objects.NewTaskRunObject(taskRun)) + got, err := TaskMaterials(ctx, objects.NewTaskRunObjectV1(taskRun)) if err != nil { t.Fatalf("Did not expect an error but got %v", err) } @@ -103,232 +103,259 @@ status: func TestTaskMaterials(t *testing.T) { tests := []struct { - name string - taskRun *v1beta1.TaskRun - want []common.ProvenanceMaterial - }{{ - name: "materials from pipeline resources", - taskRun: &v1beta1.TaskRun{ - Spec: v1beta1.TaskRunSpec{ - Resources: &v1beta1.TaskRunResources{ - Inputs: []v1beta1.TaskResourceBinding{ - { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ - Name: "nil-resource-spec", - }, - }, { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ - Name: "repo", - ResourceSpec: &v1alpha1.PipelineResourceSpec{ - Params: []v1alpha1.ResourceParam{ - {Name: "url", Value: "https://github.com/GoogleContainerTools/distroless"}, - {Name: "revision", Value: "my-revision"}, + name string + obj objects.TektonObject + want []common.ProvenanceMaterial + }{ + { + name: "materials from pipeline resources", + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck + Spec: v1beta1.TaskRunSpec{ + Resources: &v1beta1.TaskRunResources{ //nolint:staticcheck + Inputs: []v1beta1.TaskResourceBinding{ //nolint:staticcheck + { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck + Name: "nil-resource-spec", + }, + }, { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck + Name: "repo", + ResourceSpec: &v1alpha1.PipelineResourceSpec{ //nolint:staticcheck + Params: []v1alpha1.ResourceParam{ //nolint:staticcheck + {Name: "url", Value: "https://github.com/GoogleContainerTools/distroless"}, + {Name: "revision", Value: "my-revision"}, + }, + Type: backport.PipelineResourceTypeGit, }, - Type: backport.PipelineResourceTypeGit, }, }, }, }, }, - }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - { - Name: "img1_input" + "-" + artifacts.ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ - "uri": "gcr.io/foo/bar", - "digest": digest, - }), + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + TaskRunResults: []v1beta1.TaskRunResult{ + { + Name: "img1_input" + "-" + artifacts.ArtifactsInputsResultName, + Value: *v1beta1.NewObject(map[string]string{ + "uri": "gcr.io/foo/bar", + "digest": digest, + }), + }, }, - }, - ResourcesResult: []v1beta1.PipelineResourceResult{ - { - ResourceName: "repo", - Key: "commit", - Value: "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", - }, { - ResourceName: "repo", - Key: "url", - Value: "https://github.com/GoogleContainerTools/distroless", + ResourcesResult: []v1beta1.PipelineResourceResult{ + { + ResourceName: "repo", + Key: "commit", + Value: "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, { + ResourceName: "repo", + Key: "url", + Value: "https://github.com/GoogleContainerTools/distroless", + }, }, }, }, - }, - }, - want: []common.ProvenanceMaterial{ - { - URI: "gcr.io/foo/bar", - Digest: common.DigestSet{ - "sha256": strings.TrimPrefix(digest, "sha256:"), + }), + want: []common.ProvenanceMaterial{ + { + URI: "gcr.io/foo/bar", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest, "sha256:"), + }, }, - }, - { - URI: artifacts.GitSchemePrefix + "https://github.com/GoogleContainerTools/distroless.git@my-revision", - Digest: common.DigestSet{ - "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + { + URI: artifacts.GitSchemePrefix + "https://github.com/GoogleContainerTools/distroless.git@my-revision", + Digest: common.DigestSet{ + "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, }, }, }, - }, { - name: "materials from git results in task run spec", - taskRun: &v1beta1.TaskRun{ - Spec: v1beta1.TaskRunSpec{ - Params: []v1beta1.Param{{ - Name: "CHAINS-GIT_COMMIT", - Value: *v1beta1.NewStructuredValues("my-commit"), - }, { - Name: "CHAINS-GIT_URL", - Value: *v1beta1.NewStructuredValues("github.com/something"), - }}, - }, - }, - want: []common.ProvenanceMaterial{ - { - URI: artifacts.GitSchemePrefix + "github.com/something.git", - Digest: common.DigestSet{ - "sha1": "my-commit", + { + name: "materials from git results in task run spec", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Spec: v1.TaskRunSpec{ + Params: []v1.Param{{ + Name: "CHAINS-GIT_COMMIT", + Value: *v1.NewStructuredValues("my-commit"), + }, { + Name: "CHAINS-GIT_URL", + Value: *v1.NewStructuredValues("github.com/something"), + }}, }, - }, - }, - }, { - name: "materials from git results in task spec", - taskRun: &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskSpec: &v1beta1.TaskSpec{ - Params: []v1beta1.ParamSpec{{ - Name: "CHAINS-GIT_COMMIT", - Default: &v1beta1.ParamValue{ - StringVal: "my-commit", - }, - }, { - Name: "CHAINS-GIT_URL", - Default: &v1beta1.ParamValue{ - StringVal: "github.com/something", - }, - }}, + }), + want: []common.ProvenanceMaterial{ + { + URI: artifacts.GitSchemePrefix + "github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", }, }, }, }, - want: []common.ProvenanceMaterial{ - { - URI: artifacts.GitSchemePrefix + "github.com/something.git", - Digest: common.DigestSet{ - "sha1": "my-commit", - }, - }, - }, - }, { - name: "materials from git results in task spec and taskrun spec", - taskRun: &v1beta1.TaskRun{ - Spec: v1beta1.TaskRunSpec{ - Params: []v1beta1.Param{{ - Name: "CHAINS-GIT_URL", - Value: v1beta1.ParamValue{ - StringVal: "github.com/something", + { + name: "materials from git results in task spec", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + TaskSpec: &v1.TaskSpec{ + Params: []v1.ParamSpec{{ + Name: "CHAINS-GIT_COMMIT", + Default: &v1.ParamValue{ + StringVal: "my-commit", + }, + }, { + Name: "CHAINS-GIT_URL", + Default: &v1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, }, - }}, - }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskSpec: &v1beta1.TaskSpec{ - Params: []v1beta1.ParamSpec{{ - Name: "CHAINS-GIT_URL", - }, { - Name: "CHAINS-GIT_COMMIT", - Default: &v1beta1.ParamValue{ - StringVal: "my-commit", - }, - }}, + }, + }), + want: []common.ProvenanceMaterial{ + { + URI: artifacts.GitSchemePrefix + "github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", }, }, }, }, - want: []common.ProvenanceMaterial{{ - URI: "git+github.com/something.git", - Digest: common.DigestSet{ - "sha1": "my-commit", - }, - }}, - }, { - name: "materials from step images", - taskRun: &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - Steps: []v1beta1.StepState{{ - Name: "git-source-repo-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, { - Name: "git-source-repo-repeat-again-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, { - Name: "build", - ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + { + name: "materials from git results in task spec and taskrun spec", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Spec: v1.TaskRunSpec{ + Params: []v1.Param{{ + Name: "CHAINS-GIT_URL", + Value: v1.ParamValue{ + StringVal: "github.com/something", + }, }}, }, - }, - }, - want: []common.ProvenanceMaterial{ - { - URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", - Digest: common.DigestSet{ - "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + TaskSpec: &v1.TaskSpec{ + Params: []v1.ParamSpec{{ + Name: "CHAINS-GIT_URL", + }, { + Name: "CHAINS-GIT_COMMIT", + Default: &v1.ParamValue{ + StringVal: "my-commit", + }, + }}, + }, + }, }, - }, - { - URI: artifacts.OCIScheme + "gcr.io/cloud-marketplace-containers/google/bazel", + }), + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", Digest: common.DigestSet{ - "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + "sha1": "my-commit", }, - }, + }}, }, - }, { - name: "materials from step and sidecar images", - taskRun: &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - Steps: []v1beta1.StepState{{ - Name: "git-source-repo-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, { - Name: "git-source-repo-repeat-again-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, { - Name: "build", - ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", - }}, - Sidecars: []v1beta1.SidecarState{{ - Name: "sidecar-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init@sha256:a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", - }}, + { + name: "materials from step images", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Steps: []v1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + }, + }, + }), + want: []common.ProvenanceMaterial{ + { + URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + { + URI: artifacts.OCIScheme + "gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, }, }, }, - want: []common.ProvenanceMaterial{ - { - URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", - Digest: common.DigestSet{ - "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, - }, { - URI: artifacts.OCIScheme + "gcr.io/cloud-marketplace-containers/google/bazel", - Digest: common.DigestSet{ - "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + { + name: "materials from step and sidecar images", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Steps: []v1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + Sidecars: []v1.SidecarState{{ + Name: "sidecar-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init@sha256:a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }}, + }, }, - }, { - URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", - Digest: common.DigestSet{ - "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }), + want: []common.ProvenanceMaterial{ + { + URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + URI: artifacts.OCIScheme + "gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, { + URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }, }, }, }, - }} + } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - mat, err := TaskMaterials(ctx, objects.NewTaskRunObject(tc.taskRun)) + // convert tekton object to taskrun objet + var input *objects.TaskRunObjectV1 + var err error + if obj, ok := tc.obj.(*objects.TaskRunObjectV1); ok { + input = obj + } + + if trV1Beta1, ok := tc.obj.GetObject().(*v1beta1.TaskRun); ok { //nolint:staticcheck + trV1 := &v1.TaskRun{} + if err := trV1Beta1.ConvertTo(ctx, trV1); err == nil { + if trV1Beta1.Spec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Spec.Resources) //nolint:staticcheck + if err != nil { + t.Errorf("Error serializing to JSON: %v", err) + } + trV1.Annotations["tekton.dev/v1beta1-spec-resources"] = string(jsonData) + } + input = objects.NewTaskRunObjectV1(trV1) + } + } + mat, err := TaskMaterials(ctx, input) if err != nil { t.Fatalf("Did not expect an error but got %v", err) } @@ -443,20 +470,20 @@ func TestFromImageID(t *testing.T) { func TestFromPipelineParamsAndResults(t *testing.T) { tests := []struct { name string - pipelineRunObject *objects.PipelineRunObject + pipelineRunObject *objects.PipelineRunObjectV1 enableDeepInspection bool want []common.ProvenanceMaterial }{{ name: "from results", - pipelineRunObject: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineResults: []v1beta1.PipelineRunResult{{ + pipelineRunObject: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Results: []v1.PipelineRunResult{{ Name: "CHAINS-GIT_COMMIT", - Value: *v1beta1.NewStructuredValues("my-commit"), + Value: *v1.NewStructuredValues("my-commit"), }, { Name: "CHAINS-GIT_URL", - Value: *v1beta1.NewStructuredValues("github.com/something"), + Value: *v1.NewStructuredValues("github.com/something"), }}, }, }, @@ -469,18 +496,18 @@ func TestFromPipelineParamsAndResults(t *testing.T) { }}, }, { name: "from pipelinespec", - pipelineRunObject: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineSpec: &v1beta1.PipelineSpec{ - Params: []v1beta1.ParamSpec{{ + pipelineRunObject: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + PipelineSpec: &v1.PipelineSpec{ + Params: []v1.ParamSpec{{ Name: "CHAINS-GIT_COMMIT", - Default: &v1beta1.ParamValue{ + Default: &v1.ParamValue{ StringVal: "my-commit", }, }, { Name: "CHAINS-GIT_URL", - Default: &v1beta1.ParamValue{ + Default: &v1.ParamValue{ StringVal: "github.com/something", }, }}, @@ -496,16 +523,16 @@ func TestFromPipelineParamsAndResults(t *testing.T) { }}, }, { name: "from pipelineRunSpec", - pipelineRunObject: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Spec: v1beta1.PipelineRunSpec{ - Params: []v1beta1.Param{{ + pipelineRunObject: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Spec: v1.PipelineRunSpec{ + Params: []v1.Param{{ Name: "CHAINS-GIT_COMMIT", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ StringVal: "my-commit", }, }, { Name: "CHAINS-GIT_URL", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ StringVal: "github.com/something", }, }}, @@ -519,25 +546,25 @@ func TestFromPipelineParamsAndResults(t *testing.T) { }}, }, { name: "from completeChain", - pipelineRunObject: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Spec: v1beta1.PipelineRunSpec{ - Params: []v1beta1.Param{{ + pipelineRunObject: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Spec: v1.PipelineRunSpec{ + Params: []v1.Param{{ Name: "CHAINS-GIT_URL", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ StringVal: "github.com/something", }, }}, }, - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineSpec: &v1beta1.PipelineSpec{ - Params: []v1beta1.ParamSpec{{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + PipelineSpec: &v1.PipelineSpec{ + Params: []v1.ParamSpec{{ Name: "CHAINS-GIT_URL", }}, }, - PipelineResults: []v1beta1.PipelineRunResult{{ + Results: []v1.PipelineRunResult{{ Name: "CHAINS-GIT_COMMIT", - Value: *v1beta1.NewStructuredValues("my-commit"), + Value: *v1.NewStructuredValues("my-commit"), }}, }, }, @@ -548,24 +575,25 @@ func TestFromPipelineParamsAndResults(t *testing.T) { "sha1": "my-commit", }, }}, - }, { - name: "deep inspection: pipelinerun param and task result", - pipelineRunObject: createProWithPipelineParamAndTaskResult(), - enableDeepInspection: true, - want: []common.ProvenanceMaterial{ - { - URI: "git+github.com/pipelinerun-param.git", - Digest: common.DigestSet{ - "sha1": "115734d92807a80158b4b7af605d768c647fdb3d", - }, - }, { - URI: "github.com/childtask-result", - Digest: common.DigestSet{ - "sha1": "225734d92807a80158b4b7af605d768c647fdb3d", + }, + { + name: "deep inspection: pipelinerun param and task result", + pipelineRunObject: createProWithPipelineParamAndTaskResult(), + enableDeepInspection: true, + want: []common.ProvenanceMaterial{ + { + URI: "git+github.com/pipelinerun-param.git", + Digest: common.DigestSet{ + "sha1": "115734d92807a80158b4b7af605d768c647fdb3d", + }, + }, { + URI: "github.com/childtask-result", + Digest: common.DigestSet{ + "sha1": "225734d92807a80158b4b7af605d768c647fdb3d", + }, }, }, }, - }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { @@ -579,19 +607,19 @@ func TestFromPipelineParamsAndResults(t *testing.T) { } //nolint:all -func createProWithPipelineParamAndTaskResult() *objects.PipelineRunObject { - pro := objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineSpec: &v1beta1.PipelineSpec{ - Params: []v1beta1.ParamSpec{{ +func createProWithPipelineParamAndTaskResult() *objects.PipelineRunObjectV1 { + pro := objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + PipelineSpec: &v1.PipelineSpec{ + Params: []v1.ParamSpec{{ Name: "CHAINS-GIT_COMMIT", - Default: &v1beta1.ParamValue{ + Default: &v1.ParamValue{ StringVal: "115734d92807a80158b4b7af605d768c647fdb3d", }, }, { Name: "CHAINS-GIT_URL", - Default: &v1beta1.ParamValue{ + Default: &v1.ParamValue{ StringVal: "github.com/pipelinerun-param", }, }}, @@ -601,15 +629,15 @@ func createProWithPipelineParamAndTaskResult() *objects.PipelineRunObject { }) pipelineTaskName := "my-clone-task" - tr := &v1beta1.TaskRun{ + tr := &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{objects.PipelineTaskLabel: pipelineTaskName}}, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, - TaskRunResults: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "ARTIFACT_INPUTS", - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "github.com/childtask-result", "digest": "sha1:225734d92807a80158b4b7af605d768c647fdb3d", })}, @@ -619,6 +647,6 @@ func createProWithPipelineParamAndTaskResult() *objects.PipelineRunObject { } pro.AppendTaskRun(tr) - pro.Status.PipelineSpec.Tasks = []v1beta1.PipelineTask{{Name: pipelineTaskName}} + pro.Status.PipelineSpec.Tasks = []v1.PipelineTask{{Name: pipelineTaskName}} return pro } diff --git a/pkg/chains/formats/slsa/internal/material/v1beta1/material.go b/pkg/chains/formats/slsa/internal/material/v1beta1/material.go new file mode 100644 index 0000000000..00ed46ded3 --- /dev/null +++ b/pkg/chains/formats/slsa/internal/material/v1beta1/material.go @@ -0,0 +1,328 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package materialv1beta1 + +import ( + "context" + "fmt" + "strings" + + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + "github.com/tektoncd/chains/internal/backport" + "github.com/tektoncd/chains/pkg/artifacts" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/attest" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/artifact" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" + "github.com/tektoncd/chains/pkg/chains/objects" + "knative.dev/pkg/logging" +) + +const ( + uriSeparator = "@" + digestSeparator = ":" +) + +// TaskMaterials constructs `predicate.materials` section by collecting all the artifacts that influence a taskrun such as source code repo and step&sidecar base images. +func TaskMaterials(ctx context.Context, tro *objects.TaskRunObjectV1Beta1) ([]common.ProvenanceMaterial, error) { + var mats []common.ProvenanceMaterial + + // add step images + stepMaterials, err := FromStepImages(tro) + if err != nil { + return nil, err + } + mats = artifact.AppendMaterials(mats, stepMaterials...) + + // add sidecar images + sidecarMaterials, err := FromSidecarImages(tro) + if err != nil { + return nil, err + } + mats = artifact.AppendMaterials(mats, sidecarMaterials...) + + mats = artifact.AppendMaterials(mats, FromTaskParamsAndResults(ctx, tro)...) + + // add task resources + mats = artifact.AppendMaterials(mats, FromTaskResources(ctx, tro)...) + + return mats, nil +} + +func PipelineMaterials(ctx context.Context, pro *objects.PipelineRunObjectV1Beta1, slsaconfig *slsaconfig.SlsaConfig) ([]common.ProvenanceMaterial, error) { + logger := logging.FromContext(ctx) + var mats []common.ProvenanceMaterial + if p := pro.Status.Provenance; p != nil && p.RefSource != nil { + m := common.ProvenanceMaterial{ + URI: p.RefSource.URI, + Digest: p.RefSource.Digest, + } + mats = artifact.AppendMaterials(mats, m) + } + pSpec := pro.Status.PipelineSpec + if pSpec != nil { + pipelineTasks := append(pSpec.Tasks, pSpec.Finally...) + for _, t := range pipelineTasks { + tr := pro.GetTaskRunFromTask(t.Name) + // Ignore Tasks that did not execute during the PipelineRun. + if tr == nil || tr.Status.CompletionTime == nil { + logger.Infof("taskrun status not found for task %s", t.Name) + continue + } + + stepMaterials, err := FromStepImages(tr) + if err != nil { + return mats, err + } + mats = artifact.AppendMaterials(mats, stepMaterials...) + + // add sidecar images + sidecarMaterials, err := FromSidecarImages(tr) + if err != nil { + return nil, err + } + mats = artifact.AppendMaterials(mats, sidecarMaterials...) + + // add remote task configsource information in materials + if tr.Status.Provenance != nil && tr.Status.Provenance.RefSource != nil { + m := common.ProvenanceMaterial{ + URI: tr.Status.Provenance.RefSource.URI, + Digest: tr.Status.Provenance.RefSource.Digest, + } + mats = artifact.AppendMaterials(mats, m) + } + } + } + + mats = artifact.AppendMaterials(mats, FromPipelineParamsAndResults(ctx, pro, slsaconfig)...) + + return mats, nil +} + +// FromStepImages gets predicate.materials from step images +func FromStepImages(tro *objects.TaskRunObjectV1Beta1) ([]common.ProvenanceMaterial, error) { + mats := []common.ProvenanceMaterial{} + for _, image := range tro.GetStepImages() { + m, err := fromImageID(image) + if err != nil { + return nil, err + } + mats = artifact.AppendMaterials(mats, m) + } + return mats, nil +} + +// FromSidecarImages gets predicate.materials from sidecar images +func FromSidecarImages(tro *objects.TaskRunObjectV1Beta1) ([]common.ProvenanceMaterial, error) { + mats := []common.ProvenanceMaterial{} + for _, image := range tro.GetSidecarImages() { + m, err := fromImageID(image) + if err != nil { + return nil, err + } + mats = artifact.AppendMaterials(mats, m) + } + return mats, nil +} + +// fromImageID converts an imageId with format @sha256: and generates a provenance materials. +func fromImageID(imageID string) (common.ProvenanceMaterial, error) { + uriDigest := strings.Split(imageID, uriSeparator) + if len(uriDigest) != 2 { + return common.ProvenanceMaterial{}, fmt.Errorf("expected imageID %s to be separable by @", imageID) + } + digest := strings.Split(uriDigest[1], digestSeparator) + if len(digest) != 2 { + return common.ProvenanceMaterial{}, fmt.Errorf("expected imageID %s to be separable by @ and :", imageID) + } + uri := strings.TrimPrefix(uriDigest[0], "docker-pullable://") + m := common.ProvenanceMaterial{ + Digest: common.DigestSet{}, + } + m.URI = artifacts.OCIScheme + uri + m.Digest[digest[0]] = digest[1] + return m, nil +} + +// FromTaskResourcesToMaterials gets materials from task resources. +func FromTaskResources(ctx context.Context, tro *objects.TaskRunObjectV1Beta1) []common.ProvenanceMaterial { + mats := []common.ProvenanceMaterial{} + if tro.Spec.Resources != nil { //nolint:all //incompatible with pipelines v0.45 + // check for a Git PipelineResource + for _, input := range tro.Spec.Resources.Inputs { //nolint:all //incompatible with pipelines v0.45 + if input.ResourceSpec == nil || input.ResourceSpec.Type != backport.PipelineResourceTypeGit { //nolint:all //incompatible with pipelines v0.45 + continue + } + + m := common.ProvenanceMaterial{ + Digest: common.DigestSet{}, + } + + for _, rr := range tro.Status.ResourcesResult { + if rr.ResourceName != input.Name { + continue + } + if rr.Key == "url" { + m.URI = attest.SPDXGit(rr.Value, "") + } else if rr.Key == "commit" { + m.Digest["sha1"] = rr.Value + } + } + + var url string + var revision string + for _, param := range input.ResourceSpec.Params { + if param.Name == "url" { + url = param.Value + } + if param.Name == "revision" { + revision = param.Value + } + } + m.URI = attest.SPDXGit(url, revision) + mats = artifact.AppendMaterials(mats, m) + } + } + return mats +} + +// FromTaskParamsAndResults scans over the taskrun, taskspec params and taskrun results +// and looks for unstructured type hinted names matching CHAINS-GIT_COMMIT and CHAINS-GIT_URL +// to extract the commit and url value for input artifact materials. +func FromTaskParamsAndResults(ctx context.Context, tro *objects.TaskRunObjectV1Beta1) []common.ProvenanceMaterial { + var commit, url string + // Scan for git params to use for materials + if tro.Status.TaskSpec != nil { + for _, p := range tro.Status.TaskSpec.Params { + if p.Default == nil { + continue + } + if p.Name == attest.CommitParam { + commit = p.Default.StringVal + continue + } + if p.Name == attest.URLParam { + url = p.Default.StringVal + } + } + } + + for _, p := range tro.Spec.Params { + if p.Name == attest.CommitParam { + commit = p.Value.StringVal + continue + } + if p.Name == attest.URLParam { + url = p.Value.StringVal + } + } + + for _, r := range tro.Status.TaskRunResults { + if r.Name == attest.CommitParam { + commit = r.Value.StringVal + } + if r.Name == attest.URLParam { + url = r.Value.StringVal + } + } + + url = attest.SPDXGit(url, "") + + var mats []common.ProvenanceMaterial + if commit != "" && url != "" { + mats = artifact.AppendMaterials(mats, common.ProvenanceMaterial{ + URI: url, + // TODO. this could be sha256 as well. Fix in another PR. + Digest: map[string]string{"sha1": commit}, + }) + } + + sms := artifacts.RetrieveMaterialsFromStructuredResults(ctx, tro, artifacts.ArtifactsInputsResultName) + mats = artifact.AppendMaterials(mats, sms...) + + return mats +} + +// FromPipelineParamsAndResults extracts type hinted params and results and adds the url and digest to materials. +func FromPipelineParamsAndResults(ctx context.Context, pro *objects.PipelineRunObjectV1Beta1, slsaconfig *slsaconfig.SlsaConfig) []common.ProvenanceMaterial { + mats := []common.ProvenanceMaterial{} + sms := artifacts.RetrieveMaterialsFromStructuredResults(ctx, pro, artifacts.ArtifactsInputsResultName) + mats = artifact.AppendMaterials(mats, sms...) + + var commit, url string + + pSpec := pro.Status.PipelineSpec + if pSpec != nil { + // search type hinting param/results from each individual taskruns + if slsaconfig.DeepInspectionEnabled { + logger := logging.FromContext(ctx) + pipelineTasks := append(pSpec.Tasks, pSpec.Finally...) + for _, t := range pipelineTasks { + tr := pro.GetTaskRunFromTask(t.Name) + // Ignore Tasks that did not execute during the PipelineRun. + if tr == nil || tr.Status.CompletionTime == nil { + logger.Infof("taskrun is not found or not completed for the task %s", t.Name) + continue + } + materialsFromTasks := FromTaskParamsAndResults(ctx, tr) + mats = artifact.AppendMaterials(mats, materialsFromTasks...) + } + } + + // search status.PipelineSpec.params + for _, p := range pSpec.Params { + if p.Default == nil { + continue + } + if p.Name == attest.CommitParam { + commit = p.Default.StringVal + continue + } + if p.Name == attest.URLParam { + url = p.Default.StringVal + } + } + } + + // search pipelineRunSpec.params + for _, p := range pro.Spec.Params { + if p.Name == attest.CommitParam { + commit = p.Value.StringVal + continue + } + if p.Name == attest.URLParam { + url = p.Value.StringVal + } + } + + // search status.PipelineRunResults + for _, r := range pro.Status.PipelineResults { + if r.Name == attest.CommitParam { + commit = r.Value.StringVal + } + if r.Name == attest.URLParam { + url = r.Value.StringVal + } + } + if len(commit) > 0 && len(url) > 0 { + url = attest.SPDXGit(url, "") + mats = artifact.AppendMaterials(mats, common.ProvenanceMaterial{ + URI: url, + Digest: map[string]string{"sha1": commit}, + }) + } + return mats +} diff --git a/pkg/chains/formats/slsa/internal/material/v1beta1/material_test.go b/pkg/chains/formats/slsa/internal/material/v1beta1/material_test.go new file mode 100644 index 0000000000..43476e1613 --- /dev/null +++ b/pkg/chains/formats/slsa/internal/material/v1beta1/material_test.go @@ -0,0 +1,624 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package materialv1beta1 + +import ( + "fmt" + "reflect" + "strings" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + "github.com/tektoncd/chains/internal/backport" + "github.com/tektoncd/chains/pkg/artifacts" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/compare" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" + "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/chains/pkg/internal/objectloader" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + "github.com/tektoncd/pipeline/pkg/apis/resource/v1alpha1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + logtesting "knative.dev/pkg/logging/testing" + "sigs.k8s.io/yaml" +) + +const digest = "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7" + +func createPro(path string) *objects.PipelineRunObjectV1Beta1 { + var err error + pr, err := objectloader.PipelineRunV1Beta1FromFile(path) + if err != nil { + panic(err) + } + tr1, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v1beta1/taskrun1.json") + if err != nil { + panic(err) + } + tr2, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v1beta1/taskrun2.json") + if err != nil { + panic(err) + } + p := objects.NewPipelineRunObjectV1Beta1(pr) + p.AppendTaskRun(tr1) + p.AppendTaskRun(tr2) + return p +} + +func TestMaterialsWithTaskRunResults(t *testing.T) { + // make sure this works with Git resources + taskrun := `apiVersion: tekton.dev/v1beta1 +kind: TaskRun +spec: + taskSpec: + resources: + inputs: + - name: repo + type: git +status: + taskResults: + - name: CHAINS-GIT_COMMIT + value: 50c56a48cfb3a5a80fa36ed91c739bdac8381cbe + - name: CHAINS-GIT_URL + value: https://github.com/GoogleContainerTools/distroless` + + var taskRun *v1beta1.TaskRun //nolint:staticcheck + if err := yaml.Unmarshal([]byte(taskrun), &taskRun); err != nil { + t.Fatal(err) + } + + want := []common.ProvenanceMaterial{ + { + URI: artifacts.GitSchemePrefix + "https://github.com/GoogleContainerTools/distroless.git", + Digest: common.DigestSet{ + "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, + }, + } + + ctx := logtesting.TestContextWithLogger(t) + got, err := TaskMaterials(ctx, objects.NewTaskRunObjectV1Beta1(taskRun)) + if err != nil { + t.Fatalf("Did not expect an error but got %v", err) + } + if !reflect.DeepEqual(got, want) { + t.Fatalf("want %v got %v", want, got) + } +} + +func TestTaskMaterials(t *testing.T) { + tests := []struct { + name string + taskRun *v1beta1.TaskRun //nolint:staticcheck + want []common.ProvenanceMaterial + }{{ + name: "materials from pipeline resources", + taskRun: &v1beta1.TaskRun{ //nolint:staticcheck + Spec: v1beta1.TaskRunSpec{ + Resources: &v1beta1.TaskRunResources{ //nolint:staticcheck + Inputs: []v1beta1.TaskResourceBinding{ //nolint:staticcheck + { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck + Name: "nil-resource-spec", + }, + }, { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck + Name: "repo", + ResourceSpec: &v1alpha1.PipelineResourceSpec{ //nolint:staticcheck + Params: []v1alpha1.ResourceParam{ //nolint:staticcheck + {Name: "url", Value: "https://github.com/GoogleContainerTools/distroless"}, + {Name: "revision", Value: "my-revision"}, + }, + Type: backport.PipelineResourceTypeGit, + }, + }, + }, + }, + }, + }, + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + TaskRunResults: []v1beta1.TaskRunResult{ + { + Name: "img1_input" + "-" + artifacts.ArtifactsInputsResultName, + Value: *v1beta1.NewObject(map[string]string{ + "uri": "gcr.io/foo/bar", + "digest": digest, + }), + }, + }, + ResourcesResult: []v1beta1.PipelineResourceResult{ + { + ResourceName: "repo", + Key: "commit", + Value: "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, { + ResourceName: "repo", + Key: "url", + Value: "https://github.com/GoogleContainerTools/distroless", + }, + }, + }, + }, + }, + want: []common.ProvenanceMaterial{ + { + URI: "gcr.io/foo/bar", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest, "sha256:"), + }, + }, + { + URI: artifacts.GitSchemePrefix + "https://github.com/GoogleContainerTools/distroless.git@my-revision", + Digest: common.DigestSet{ + "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, + }, + }, + }, { + name: "materials from git results in task run spec", + taskRun: &v1beta1.TaskRun{ //nolint:staticcheck + Spec: v1beta1.TaskRunSpec{ + Params: []v1beta1.Param{{ + Name: "CHAINS-GIT_COMMIT", + Value: *v1beta1.NewStructuredValues("my-commit"), + }, { + Name: "CHAINS-GIT_URL", + Value: *v1beta1.NewStructuredValues("github.com/something"), + }}, + }, + }, + want: []common.ProvenanceMaterial{ + { + URI: artifacts.GitSchemePrefix + "github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }, + }, + }, { + name: "materials from git results in task spec", + taskRun: &v1beta1.TaskRun{ //nolint:staticcheck + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + TaskSpec: &v1beta1.TaskSpec{ + Params: []v1beta1.ParamSpec{{ + Name: "CHAINS-GIT_COMMIT", + Default: &v1beta1.ParamValue{ + StringVal: "my-commit", + }, + }, { + Name: "CHAINS-GIT_URL", + Default: &v1beta1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, + }, + }, + }, + want: []common.ProvenanceMaterial{ + { + URI: artifacts.GitSchemePrefix + "github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }, + }, + }, { + name: "materials from git results in task spec and taskrun spec", + taskRun: &v1beta1.TaskRun{ //nolint:staticcheck + Spec: v1beta1.TaskRunSpec{ + Params: []v1beta1.Param{{ + Name: "CHAINS-GIT_URL", + Value: v1beta1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + TaskSpec: &v1beta1.TaskSpec{ + Params: []v1beta1.ParamSpec{{ + Name: "CHAINS-GIT_URL", + }, { + Name: "CHAINS-GIT_COMMIT", + Default: &v1beta1.ParamValue{ + StringVal: "my-commit", + }, + }}, + }, + }, + }, + }, + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }}, + }, { + name: "materials from step images", + taskRun: &v1beta1.TaskRun{ //nolint:staticcheck + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + Steps: []v1beta1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + }, + }, + }, + want: []common.ProvenanceMaterial{ + { + URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + { + URI: artifacts.OCIScheme + "gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, + }, + }, { + name: "materials from step and sidecar images", + taskRun: &v1beta1.TaskRun{ //nolint:staticcheck + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + Steps: []v1beta1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + Sidecars: []v1beta1.SidecarState{{ + Name: "sidecar-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init@sha256:a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }}, + }, + }, + }, + want: []common.ProvenanceMaterial{ + { + URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + URI: artifacts.OCIScheme + "gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, { + URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }, + }, + }, + }} + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + mat, err := TaskMaterials(ctx, objects.NewTaskRunObjectV1Beta1(tc.taskRun)) + if err != nil { + t.Fatalf("Did not expect an error but got %v", err) + } + if diff := cmp.Diff(tc.want, mat); diff != "" { + t.Errorf("Materials(): -want +got: %s", diff) + } + }) + } +} + +func TestPipelineMaterials(t *testing.T) { + expected := []common.ProvenanceMaterial{ + {URI: "github.com/test", Digest: common.DigestSet{"sha1": "28b123"}}, + { + URI: artifacts.OCIScheme + "gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + {URI: "github.com/catalog", Digest: common.DigestSet{"sha1": "x123"}}, + { + URI: artifacts.OCIScheme + "gcr.io/test2/test2", + Digest: common.DigestSet{"sha256": "4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac"}, + }, + { + URI: artifacts.OCIScheme + "gcr.io/test3/test3", + Digest: common.DigestSet{"sha256": "f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478"}, + }, + {URI: "github.com/test", Digest: common.DigestSet{"sha1": "ab123"}}, + {URI: "abc", Digest: common.DigestSet{"sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"}}, + {URI: artifacts.GitSchemePrefix + "https://git.test.com.git", Digest: common.DigestSet{"sha1": "abcd"}}, + } + ctx := logtesting.TestContextWithLogger(t) + got, err := PipelineMaterials(ctx, createPro("../../../testdata/v1beta1/pipelinerun1.json"), &slsaconfig.SlsaConfig{DeepInspectionEnabled: false}) + if err != nil { + t.Error(err) + } + if diff := cmp.Diff(expected, got, compare.MaterialsCompareOption()); diff != "" { + t.Errorf("Materials(): -want +got: %s", diff) + } +} + +func TestStructuredResultPipelineMaterials(t *testing.T) { + want := []common.ProvenanceMaterial{ + {URI: "github.com/test", Digest: common.DigestSet{"sha1": "28b123"}}, + { + URI: artifacts.OCIScheme + "gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + {URI: "github.com/catalog", Digest: common.DigestSet{"sha1": "x123"}}, + { + URI: artifacts.OCIScheme + "gcr.io/test2/test2", + Digest: common.DigestSet{"sha256": "4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac"}, + }, + { + URI: artifacts.OCIScheme + "gcr.io/test3/test3", + Digest: common.DigestSet{"sha256": "f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478"}, + }, + {URI: "github.com/test", Digest: common.DigestSet{"sha1": "ab123"}}, + { + URI: "abcd", + Digest: common.DigestSet{ + "sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7", + }, + }, + } + ctx := logtesting.TestContextWithLogger(t) + got, err := PipelineMaterials(ctx, createPro("../../../testdata/v1beta1/pipelinerun_structured_results.json"), &slsaconfig.SlsaConfig{DeepInspectionEnabled: false}) + if err != nil { + t.Errorf("error while extracting materials: %v", err) + } + if diff := cmp.Diff(want, got, compare.MaterialsCompareOption()); diff != "" { + t.Errorf("materials(): -want +got: %s", diff) + } +} + +func TestFromImageID(t *testing.T) { + tests := []struct { + name string + imageID string + want common.ProvenanceMaterial + wantError error + }{{ + name: "proper ImageID", + imageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + want: common.ProvenanceMaterial{ + URI: artifacts.OCIScheme + "gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, + }, { + name: "bad ImageID", + imageID: "badImageId", + want: common.ProvenanceMaterial{}, + wantError: fmt.Errorf("expected imageID badImageId to be separable by @"), + }} + for _, tc := range tests { + mat, err := fromImageID(tc.imageID) + if err != nil { + if err.Error() != tc.wantError.Error() { + t.Fatalf("Expected error %v but got %v", tc.wantError, err) + } + } + if tc.wantError == nil { + if diff := cmp.Diff(tc.want, mat); diff != "" { + t.Errorf("materials(): -want +got: %s", diff) + } + } + } +} + +//nolint:all +func TestFromPipelineParamsAndResults(t *testing.T) { + tests := []struct { + name string + pipelineRunObjectV1Beta1 *objects.PipelineRunObjectV1Beta1 + enableDeepInspection bool + want []common.ProvenanceMaterial + }{{ + name: "from results", + pipelineRunObjectV1Beta1: objects.NewPipelineRunObjectV1Beta1(&v1beta1.PipelineRun{ + Status: v1beta1.PipelineRunStatus{ + PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ + PipelineResults: []v1beta1.PipelineRunResult{{ + Name: "CHAINS-GIT_COMMIT", + Value: *v1beta1.NewStructuredValues("my-commit"), + }, { + Name: "CHAINS-GIT_URL", + Value: *v1beta1.NewStructuredValues("github.com/something"), + }}, + }, + }, + }), + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }}, + }, { + name: "from pipelinespec", + pipelineRunObjectV1Beta1: objects.NewPipelineRunObjectV1Beta1(&v1beta1.PipelineRun{ + Status: v1beta1.PipelineRunStatus{ + PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ + PipelineSpec: &v1beta1.PipelineSpec{ + Params: []v1beta1.ParamSpec{{ + Name: "CHAINS-GIT_COMMIT", + Default: &v1beta1.ParamValue{ + StringVal: "my-commit", + }, + }, { + Name: "CHAINS-GIT_URL", + Default: &v1beta1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, + }, + }, + }), + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }}, + }, { + name: "from pipelineRunSpec", + pipelineRunObjectV1Beta1: objects.NewPipelineRunObjectV1Beta1(&v1beta1.PipelineRun{ + Spec: v1beta1.PipelineRunSpec{ + Params: []v1beta1.Param{{ + Name: "CHAINS-GIT_COMMIT", + Value: v1beta1.ParamValue{ + StringVal: "my-commit", + }, + }, { + Name: "CHAINS-GIT_URL", + Value: v1beta1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, + }), + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }}, + }, { + name: "from completeChain", + pipelineRunObjectV1Beta1: objects.NewPipelineRunObjectV1Beta1(&v1beta1.PipelineRun{ + Spec: v1beta1.PipelineRunSpec{ + Params: []v1beta1.Param{{ + Name: "CHAINS-GIT_URL", + Value: v1beta1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, + Status: v1beta1.PipelineRunStatus{ + PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ + PipelineSpec: &v1beta1.PipelineSpec{ + Params: []v1beta1.ParamSpec{{ + Name: "CHAINS-GIT_URL", + }}, + }, + PipelineResults: []v1beta1.PipelineRunResult{{ + Name: "CHAINS-GIT_COMMIT", + Value: *v1beta1.NewStructuredValues("my-commit"), + }}, + }, + }, + }), + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }}, + }, { + name: "deep inspection: pipelinerun param and task result", + pipelineRunObjectV1Beta1: createProWithPipelineParamAndTaskResult(), + enableDeepInspection: true, + want: []common.ProvenanceMaterial{ + { + URI: "git+github.com/pipelinerun-param.git", + Digest: common.DigestSet{ + "sha1": "115734d92807a80158b4b7af605d768c647fdb3d", + }, + }, { + URI: "github.com/childtask-result", + Digest: common.DigestSet{ + "sha1": "225734d92807a80158b4b7af605d768c647fdb3d", + }, + }, + }, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + got := FromPipelineParamsAndResults(ctx, tc.pipelineRunObjectV1Beta1, &slsaconfig.SlsaConfig{DeepInspectionEnabled: tc.enableDeepInspection}) + if diff := cmp.Diff(tc.want, got, compare.MaterialsCompareOption()); diff != "" { + t.Errorf("FromPipelineParamsAndResults(): -want +got: %s", diff) + } + }) + } +} + +//nolint:all +func createProWithPipelineParamAndTaskResult() *objects.PipelineRunObjectV1Beta1 { + pro := objects.NewPipelineRunObjectV1Beta1(&v1beta1.PipelineRun{ + Status: v1beta1.PipelineRunStatus{ + PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ + PipelineSpec: &v1beta1.PipelineSpec{ + Params: []v1beta1.ParamSpec{{ + Name: "CHAINS-GIT_COMMIT", + Default: &v1beta1.ParamValue{ + StringVal: "115734d92807a80158b4b7af605d768c647fdb3d", + }, + }, { + Name: "CHAINS-GIT_URL", + Default: &v1beta1.ParamValue{ + StringVal: "github.com/pipelinerun-param", + }, + }}, + }, + }, + }, + }) + + pipelineTaskName := "my-clone-task" + tr := &v1beta1.TaskRun{ + ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{objects.PipelineTaskLabel: pipelineTaskName}}, + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + TaskRunResults: []v1beta1.TaskRunResult{ + { + Name: "ARTIFACT_INPUTS", + Value: *v1beta1.NewObject(map[string]string{ + "uri": "github.com/childtask-result", + "digest": "sha1:225734d92807a80158b4b7af605d768c647fdb3d", + })}, + }, + }, + }, + } + + pro.AppendTaskRun(tr) + pro.Status.PipelineSpec.Tasks = []v1beta1.PipelineTask{{Name: pipelineTaskName}} + return pro +} diff --git a/pkg/chains/formats/slsa/testdata/pipelinerun-childrefs.json b/pkg/chains/formats/slsa/testdata/pipelinerun-childrefs.json index 32030fa0aa..7fc402a02e 100644 --- a/pkg/chains/formats/slsa/testdata/pipelinerun-childrefs.json +++ b/pkg/chains/formats/slsa/testdata/pipelinerun-childrefs.json @@ -9,7 +9,9 @@ "pipelineRef": { "name": "test-pipeline" }, - "serviceAccountName": "pipeline" + "taskRunTemplate": { + "serviceAccountName": "pipeline" + } }, "status": { "startTime": "2021-03-29T09:50:00Z", @@ -23,7 +25,7 @@ "type": "Succeeded" } ], - "pipelineResults": [ + "results": [ { "name": "CHAINS-GIT_COMMIT", "value": "abcd" @@ -113,13 +115,13 @@ }, "childReferences": [ { - "apiVersion": "tekton.dev/v1beta1", + "apiVersion": "tekton.dev/v1", "kind": "TaskRun", "name": "git-clone", "pipelineTaskName": "git-clone" }, { - "apiVersion": "tekton.dev/v1beta1", + "apiVersion": "tekton.dev/v1", "kind": "TaskRun", "name": "taskrun-build", "pipelineTaskName": "build" diff --git a/pkg/chains/formats/slsa/testdata/pipelinerun1.json b/pkg/chains/formats/slsa/testdata/pipelinerun1.json index 879e8b1d84..fda4c6ead3 100644 --- a/pkg/chains/formats/slsa/testdata/pipelinerun1.json +++ b/pkg/chains/formats/slsa/testdata/pipelinerun1.json @@ -9,7 +9,9 @@ "pipelineRef": { "name": "test-pipeline" }, - "serviceAccountName": "pipeline" + "taskRunTemplate": { + "serviceAccountName": "pipeline" + } }, "status": { "startTime": "2021-03-29T09:50:00Z", @@ -23,7 +25,7 @@ "type": "Succeeded" } ], - "pipelineResults": [ + "results": [ { "name": "CHAINS-GIT_COMMIT", "value": "abcd" @@ -155,7 +157,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "commit", "value": "abcd" @@ -238,7 +240,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "IMAGE_DIGEST", "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" diff --git a/pkg/chains/formats/slsa/testdata/pipelinerun_structured_results.json b/pkg/chains/formats/slsa/testdata/pipelinerun_structured_results.json index 2e210c985a..909f821ec0 100644 --- a/pkg/chains/formats/slsa/testdata/pipelinerun_structured_results.json +++ b/pkg/chains/formats/slsa/testdata/pipelinerun_structured_results.json @@ -9,7 +9,9 @@ "pipelineRef": { "name": "test-pipeline" }, - "serviceAccountName": "pipeline" + "taskRunTemplate": { + "serviceAccountName": "pipeline" + } }, "status": { "startTime": "2021-03-29T09:50:00Z", @@ -23,7 +25,7 @@ "type": "Succeeded" } ], - "pipelineResults": [ + "results": [ { "name": "image-ARTIFACT_INPUTS", "value": { @@ -113,7 +115,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "commit", "value": "abcd" @@ -196,7 +198,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "IMAGE_DIGEST", "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" diff --git a/pkg/chains/formats/slsa/testdata/taskrun-multiple-subjects.json b/pkg/chains/formats/slsa/testdata/taskrun-multiple-subjects.json index 32ddbc30e5..38ac438638 100644 --- a/pkg/chains/formats/slsa/testdata/taskrun-multiple-subjects.json +++ b/pkg/chains/formats/slsa/testdata/taskrun-multiple-subjects.json @@ -25,7 +25,7 @@ "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" } ], - "taskResults": [ + "results": [ { "name": "IMAGES", "value": "gcr.io/myimage1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6,gcr.io/myimage2@sha256:daa1a56e13c85cf164e7d9e595006649e3a04c47fe4a8261320e18a0bf3b0367" diff --git a/pkg/chains/formats/slsa/testdata/taskrun1.json b/pkg/chains/formats/slsa/testdata/taskrun1.json index a686452516..d42a4638e4 100644 --- a/pkg/chains/formats/slsa/testdata/taskrun1.json +++ b/pkg/chains/formats/slsa/testdata/taskrun1.json @@ -56,7 +56,7 @@ "imageID": "docker-pullable://gcr.io/test3/test3@sha256:f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478" } ], - "taskResults": [ + "results": [ { "name": "IMAGE_DIGEST", "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" diff --git a/pkg/chains/formats/slsa/testdata/taskrun2.json b/pkg/chains/formats/slsa/testdata/taskrun2.json index 958e992057..63b3f984b7 100644 --- a/pkg/chains/formats/slsa/testdata/taskrun2.json +++ b/pkg/chains/formats/slsa/testdata/taskrun2.json @@ -42,7 +42,7 @@ "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" } ], - "taskResults": [ + "results": [ { "name": "some-uri_DIGEST", "value": "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" diff --git a/pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun-childrefs.json b/pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun-childrefs.json new file mode 100644 index 0000000000..32030fa0aa --- /dev/null +++ b/pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun-childrefs.json @@ -0,0 +1,129 @@ +{ + "spec": { + "params": [ + { + "name": "IMAGE", + "value": "test.io/test/image" + } + ], + "pipelineRef": { + "name": "test-pipeline" + }, + "serviceAccountName": "pipeline" + }, + "status": { + "startTime": "2021-03-29T09:50:00Z", + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "Tasks Completed: 2 (Failed: 0, Cancelled 0), Skipped: 0", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "pipelineResults": [ + { + "name": "CHAINS-GIT_COMMIT", + "value": "abcd" + }, + { + "name": "CHAINS-GIT_URL", + "value": "https://git.test.com" + }, + { + "name": "IMAGE_URL", + "value": "test.io/test/image" + }, + { + "name": "IMAGE_DIGEST", + "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + } + ], + "pipelineSpec": { + "params": [ + { + "description": "Image path on registry", + "name": "IMAGE", + "type": "string" + } + ], + "results": [ + { + "description": "", + "name": "CHAINS-GIT_COMMIT", + "value": "$(tasks.git-clone.results.commit)" + }, + { + "description": "", + "name": "CHAINS-GIT_URL", + "value": "$(tasks.git-clone.results.url)" + }, + { + "description": "", + "name": "IMAGE_URL", + "value": "$(tasks.build.results.IMAGE_URL)" + }, + { + "description": "", + "name": "IMAGE_DIGEST", + "value": "$(tasks.build.results.IMAGE_DIGEST)" + } + ], + "tasks": [ + { + "name": "git-clone", + "params": [ + { + "name": "url", + "value": "https://git.test.com" + }, + { + "name": "revision", + "value": "" + } + ], + "taskRef": { + "kind": "ClusterTask", + "name": "git-clone" + } + }, + { + "name": "build", + "params": [ + { + "name": "CHAINS-GIT_COMMIT", + "value": "$(tasks.git-clone.results.commit)" + }, + { + "name": "CHAINS-GIT_URL", + "value": "$(tasks.git-clone.results.url)" + } + ], + "runAfter": [ + "git-clone" + ], + "taskRef": { + "kind": "ClusterTask", + "name": "build" + } + } + ] + }, + "childReferences": [ + { + "apiVersion": "tekton.dev/v1beta1", + "kind": "TaskRun", + "name": "git-clone", + "pipelineTaskName": "git-clone" + }, + { + "apiVersion": "tekton.dev/v1beta1", + "kind": "TaskRun", + "name": "taskrun-build", + "pipelineTaskName": "build" + } + ] + } +} diff --git a/pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun1.json b/pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun1.json new file mode 100644 index 0000000000..879e8b1d84 --- /dev/null +++ b/pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun1.json @@ -0,0 +1,306 @@ +{ + "spec": { + "params": [ + { + "name": "IMAGE", + "value": "test.io/test/image" + } + ], + "pipelineRef": { + "name": "test-pipeline" + }, + "serviceAccountName": "pipeline" + }, + "status": { + "startTime": "2021-03-29T09:50:00Z", + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "Tasks Completed: 2 (Failed: 0, Cancelled 0), Skipped: 0", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "pipelineResults": [ + { + "name": "CHAINS-GIT_COMMIT", + "value": "abcd" + }, + { + "name": "CHAINS-GIT_URL", + "value": "https://git.test.com" + }, + { + "name": "IMAGE_URL", + "value": "test.io/test/image" + }, + { + "name": "IMAGE_DIGEST", + "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + }, + { + "name": "img-ARTIFACT_INPUTS", + "value": { + "uri": "abc","digest": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + } + }, + { + "name": "img2-ARTIFACT_OUTPUTS", + "value": { + "uri": "def","digest": "sha256:" + } + }, + { + "name": "img_no_uri-ARTIFACT_OUTPUTS", + "value": { + "digest": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + } + } + ], + "pipelineSpec": { + "params": [ + { + "description": "Image path on registry", + "name": "IMAGE", + "type": "string" + } + ], + "results": [ + { + "description": "", + "name": "CHAINS-GIT_COMMIT", + "value": "$(tasks.git-clone.results.commit)" + }, + { + "description": "", + "name": "CHAINS-GIT_URL", + "value": "$(tasks.git-clone.results.url)" + }, + { + "description": "", + "name": "IMAGE_URL", + "value": "$(tasks.build.results.IMAGE_URL)" + }, + { + "description": "", + "name": "IMAGE_DIGEST", + "value": "$(tasks.build.results.IMAGE_DIGEST)" + } + ], + "tasks": [ + { + "name": "git-clone", + "params": [ + { + "name": "url", + "value": "https://git.test.com" + }, + { + "name": "revision", + "value": "" + } + ], + "taskRef": { + "kind": "ClusterTask", + "name": "git-clone" + } + }, + { + "name": "build", + "params": [ + { + "name": "CHAINS-GIT_COMMIT", + "value": "$(tasks.git-clone.results.commit)" + }, + { + "name": "CHAINS-GIT_URL", + "value": "$(tasks.git-clone.results.url)" + } + ], + "taskRef": { + "kind": "ClusterTask", + "name": "build" + } + } + ] + }, + "taskRuns": { + "git-clone": { + "pipelineTaskName": "git-clone", + "status": { + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "All Steps have completed executing", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "podName": "git-clone-pod", + "startTime": "2021-03-29T09:50:00Z", + "steps": [ + { + "container": "step-clone", + "imageID": "test.io/test/clone-image", + "name": "clone", + "terminated": { + "exitCode": 0, + "finishedAt": "2021-03-29T09:50:15Z", + "reason": "Completed", + "startedAt": "2022-05-31T19:13:27Z" + } + } + ], + "taskResults": [ + { + "name": "commit", + "value": "abcd" + }, + { + "name": "url", + "value": "https://git.test.com" + } + ], + "taskSpec": { + "params": [ + { + "description": "Repository URL to clone from.", + "name": "url", + "type": "string" + }, + { + "default": "", + "description": "Revision to checkout. (branch, tag, sha, ref, etc...)", + "name": "revision", + "type": "string" + } + ], + "results": [ + { + "description": "The precise commit SHA that was fetched by this Task.", + "name": "commit" + }, + { + "description": "The precise URL that was fetched by this Task.", + "name": "url" + } + ], + "steps": [ + { + "env": [ + { + "name": "HOME", + "value": "$(params.userHome)" + }, + { + "name": "PARAM_URL", + "value": "$(params.url)" + } + ], + "image": "$(params.gitInitImage)", + "name": "clone", + "resources": {}, + "script": "git clone" + } + ] + } + } + }, + "taskrun-build": { + "pipelineTaskName": "build", + "status": { + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "All Steps have completed executing", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "podName": "build-pod", + "startTime": "2021-03-29T09:50:00Z", + "steps": [ + { + "container": "step-build", + "imageID": "test.io/test/build-image", + "name": "build", + "terminated": { + "exitCode": 0, + "finishedAt": "2022-05-31T19:17:30Z", + "reason": "Completed", + "startedAt": "2021-03-29T09:50:00Z" + } + } + ], + "taskResults": [ + { + "name": "IMAGE_DIGEST", + "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + }, + { + "name": "IMAGE_URL", + "value": "test.io/test/image\n" + } + ], + "taskSpec": { + "params": [ + { + "description": "Git CHAINS URL", + "name": "CHAINS-GIT_URL", + "type": "string" + }, + { + "description": "Git CHAINS Commit", + "name": "CHAINS-GIT_COMMIT", + "type": "string" + } + ], + "results": [ + { + "description": "Digest of the image just built.", + "name": "IMAGE_DIGEST" + }, + { + "description": "URL of the image just built.", + "name": "IMAGE_URL" + } + ], + "steps": [ + { + "command": [ + "buildah", + "build" + ], + "image": "test.io/test/build-image", + "name": "generate" + }, + { + "command": [ + "buildah", + "push" + ], + "image": "test.io/test/build-image", + "name": "push" + } + ] + } + } + } + }, + "provenance": { + "refSource": { + "uri": "github.com/test", + "digest": { + "sha1": "28b123" + }, + "entryPoint": "pipeline.yaml" + } + } + } +} diff --git a/pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun_structured_results.json b/pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun_structured_results.json new file mode 100644 index 0000000000..2e210c985a --- /dev/null +++ b/pkg/chains/formats/slsa/testdata/v1beta1/pipelinerun_structured_results.json @@ -0,0 +1,264 @@ +{ + "spec": { + "params": [ + { + "name": "IMAGE", + "value": "test.io/test/image" + } + ], + "pipelineRef": { + "name": "test-pipeline" + }, + "serviceAccountName": "pipeline" + }, + "status": { + "startTime": "2021-03-29T09:50:00Z", + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "Tasks Completed: 2 (Failed: 0, Cancelled 0), Skipped: 0", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "pipelineResults": [ + { + "name": "image-ARTIFACT_INPUTS", + "value": { + "uri": "abcd", + "digest": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + } + }, + { + "name": "image-ARTIFACT_OUTPUTS", + "value": { + "uri": "hello_world", + "sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + } + } + ], + "pipelineSpec": { + "params": [ + { + "description": "Image path on registry", + "name": "IMAGE", + "type": "string" + } + ], + "tasks": [ + { + "name": "git-clone", + "params": [ + { + "name": "url", + "value": "https://git.test.com" + }, + { + "name": "revision", + "value": "" + } + ], + "taskRef": { + "kind": "ClusterTask", + "name": "git-clone" + } + }, + { + "name": "build", + "params": [ + { + "name": "CHAINS-GIT_COMMIT", + "value": "$(tasks.git-clone.results.commit)" + }, + { + "name": "CHAINS-GIT_URL", + "value": "$(tasks.git-clone.results.url)" + } + ], + "taskRef": { + "kind": "ClusterTask", + "name": "build" + } + } + ] + }, + "taskRuns": { + "git-clone": { + "pipelineTaskName": "git-clone", + "status": { + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "All Steps have completed executing", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "podName": "git-clone-pod", + "startTime": "2021-03-29T09:50:00Z", + "steps": [ + { + "container": "step-clone", + "imageID": "test.io/test/clone-image", + "name": "clone", + "terminated": { + "exitCode": 0, + "finishedAt": "2021-03-29T09:50:15Z", + "reason": "Completed", + "startedAt": "2022-05-31T19:13:27Z" + } + } + ], + "taskResults": [ + { + "name": "commit", + "value": "abcd" + }, + { + "name": "url", + "value": "https://git.test.com" + } + ], + "taskSpec": { + "params": [ + { + "description": "Repository URL to clone from.", + "name": "url", + "type": "string" + }, + { + "default": "", + "description": "Revision to checkout. (branch, tag, sha, ref, etc...)", + "name": "revision", + "type": "string" + } + ], + "results": [ + { + "description": "The precise commit SHA that was fetched by this Task.", + "name": "commit" + }, + { + "description": "The precise URL that was fetched by this Task.", + "name": "url" + } + ], + "steps": [ + { + "env": [ + { + "name": "HOME", + "value": "$(params.userHome)" + }, + { + "name": "PARAM_URL", + "value": "$(params.url)" + } + ], + "image": "$(params.gitInitImage)", + "name": "clone", + "resources": {}, + "script": "git clone" + } + ] + } + } + }, + "taskrun-build": { + "pipelineTaskName": "build", + "status": { + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "All Steps have completed executing", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "podName": "build-pod", + "startTime": "2021-03-29T09:50:00Z", + "steps": [ + { + "container": "step-build", + "imageID": "test.io/test/build-image", + "name": "build", + "terminated": { + "exitCode": 0, + "finishedAt": "2022-05-31T19:17:30Z", + "reason": "Completed", + "startedAt": "2021-03-29T09:50:00Z" + } + } + ], + "taskResults": [ + { + "name": "IMAGE_DIGEST", + "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + }, + { + "name": "IMAGE_URL", + "value": "test.io/test/image\n" + } + ], + "taskSpec": { + "params": [ + { + "description": "Git CHAINS URL", + "name": "CHAINS-GIT_URL", + "type": "string" + }, + { + "description": "Git CHAINS Commit", + "name": "CHAINS-GIT_COMMIT", + "type": "string" + } + ], + "results": [ + { + "description": "Digest of the image just built.", + "name": "IMAGE_DIGEST" + }, + { + "description": "URL of the image just built.", + "name": "IMAGE_URL" + } + ], + "steps": [ + { + "command": [ + "buildah", + "build" + ], + "image": "test.io/test/build-image", + "name": "generate" + }, + { + "command": [ + "buildah", + "push" + ], + "image": "test.io/test/build-image", + "name": "push" + } + ] + } + } + } + }, + "provenance": { + "refSource": { + "uri": "github.com/test", + "digest": { + "sha1": "28b123" + }, + "entryPoint": "pipeline.yaml" + } + } + } +} diff --git a/pkg/chains/formats/slsa/testdata/v1beta1/taskrun-multiple-subjects.json b/pkg/chains/formats/slsa/testdata/v1beta1/taskrun-multiple-subjects.json new file mode 100644 index 0000000000..32ddbc30e5 --- /dev/null +++ b/pkg/chains/formats/slsa/testdata/v1beta1/taskrun-multiple-subjects.json @@ -0,0 +1,56 @@ +{ + "spec": { + "params": [], + "taskRef": { + "name": "test-task", + "kind": "Task" + }, + "serviceAccountName": "default" + }, + "status": { + "conditions": [ + { + "type": "Succeeded", + "status": "True", + "lastTransitionTime": "2021-03-29T09:50:15Z", + "reason": "Succeeded", + "message": "All Steps have completed executing" + } + ], + "podName": "test-pod-name", + "steps": [ + { + "name": "step1", + "container": "step-step1", + "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" + } + ], + "taskResults": [ + { + "name": "IMAGES", + "value": "gcr.io/myimage1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6,gcr.io/myimage2@sha256:daa1a56e13c85cf164e7d9e595006649e3a04c47fe4a8261320e18a0bf3b0367" + } + ], + "taskSpec": { + "params": [], + "results": [ + { + "name": "file1_DIGEST", + "description": "Digest of a file to push." + }, + { + "name": "file1", + "description": "some assembled file" + }, + { + "name": "file2_DIGEST", + "description": "Digest of a file to push." + }, + { + "name": "file2", + "description": "some assembled file" + } + ] + } + } +} diff --git a/pkg/chains/formats/slsa/testdata/v1beta1/taskrun1.json b/pkg/chains/formats/slsa/testdata/v1beta1/taskrun1.json new file mode 100644 index 0000000000..a686452516 --- /dev/null +++ b/pkg/chains/formats/slsa/testdata/v1beta1/taskrun1.json @@ -0,0 +1,136 @@ +{ + "metadata": { + "name": "taskrun-build", + "labels": { + "tekton.dev/pipelineTask": "build" + } + }, + "spec": { + "params": [ + { + "name": "IMAGE", + "value": "test.io/test/image" + }, + { + "name": "CHAINS-GIT_COMMIT", + "value": "sha:taskrun" + }, + { + "name": "CHAINS-GIT_URL", + "value": "https://git.test.com" + } + ], + "taskRef": { + "name": "build", + "kind": "Task" + }, + "serviceAccountName": "default" + }, + "status": { + "startTime": "2021-03-29T09:50:00Z", + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "type": "Succeeded", + "status": "True", + "lastTransitionTime": "2021-03-29T09:50:15Z", + "reason": "Succeeded", + "message": "All Steps have completed executing" + } + ], + "podName": "test-pod-name", + "steps": [ + { + "name": "step1", + "container": "step-step1", + "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" + }, + { + "name": "step2", + "container": "step-step2", + "imageID": "docker-pullable://gcr.io/test2/test2@sha256:4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac" + }, + { + "name": "step3", + "container": "step-step3", + "imageID": "docker-pullable://gcr.io/test3/test3@sha256:f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478" + } + ], + "taskResults": [ + { + "name": "IMAGE_DIGEST", + "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + }, + { + "name": "IMAGE_URL", + "value": "gcr.io/my/image" + } + ], + "taskSpec": { + "params": [ + { + "name": "IMAGE", + "type": "string" + }, + { + "name": "filename", + "type": "string" + }, + { + "name": "DOCKERFILE", + "type": "string" + }, + { + "name": "CONTEXT", + "type": "string" + }, + { + "name": "EXTRA_ARGS", + "type": "string" + }, + { + "name": "BUILDER_IMAGE", + "type": "string" + }, { + "name": "CHAINS-GIT_COMMIT", + "type": "string", + "default": "sha:task" + }, { + "name": "CHAINS-GIT_URL", + "type": "string", + "default": "https://defaultgit.test.com" + } + ], + "steps": [ + { + "name": "step1" + }, + { + "name": "step2" + }, + { + "name": "step3" + } + ], + "results": [ + { + "name": "IMAGE_DIGEST", + "description": "Digest of the image just built." + }, + { + "name": "filename_DIGEST", + "description": "Digest of the file just built." + } + ] + }, + "provenance": { + "refSource": { + "uri": "github.com/test", + "digest": { + "sha1": "ab123" + }, + "entryPoint": "build.yaml" + } + } + } +} diff --git a/pkg/chains/formats/slsa/testdata/v1beta1/taskrun2.json b/pkg/chains/formats/slsa/testdata/v1beta1/taskrun2.json new file mode 100644 index 0000000000..958e992057 --- /dev/null +++ b/pkg/chains/formats/slsa/testdata/v1beta1/taskrun2.json @@ -0,0 +1,105 @@ +{ + "metadata": { + "name": "git-clone", + "labels": { + "tekton.dev/pipelineTask": "git-clone" + } + }, + "spec": { + "params": [ + { + "name": "url", + "value": "https://git.test.com" + }, + { + "name": "revision", + "value": "" + } + ], + "taskRef": { + "name": "git-clone", + "kind": "Task" + }, + "serviceAccountName": "default" + }, + "status": { + "startTime": "2021-03-29T09:50:00Z", + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "type": "Succeeded", + "status": "True", + "lastTransitionTime": "2021-03-29T09:50:15Z", + "reason": "Succeeded", + "message": "All Steps have completed executing" + } + ], + "podName": "test-pod-name", + "steps": [ + { + "name": "step1", + "container": "step-step1", + "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" + } + ], + "taskResults": [ + { + "name": "some-uri_DIGEST", + "value": "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" + }, + { + "name": "some-uri", + "value": "pkg:deb/debian/curl@7.50.3-1" + } + ], + "taskSpec": { + "steps": [ + { + "env": [ + { + "name": "HOME", + "value": "$(params.userHome)" + }, + { + "name": "PARAM_URL", + "value": "$(params.url)" + } + ], + "name": "step1", + "script": "git clone" + } + ], + "params": [ + { + "name": "CHAINS-GIT_COMMIT", + "type": "string", + "default": "sha:taskdefault" + }, + { + "name": "CHAINS-GIT_URL", + "type": "string", + "default": "https://git.test.com" + } + ], + "results": [ + { + "name": "some-uri_DIGEST", + "description": "Digest of a file to push." + }, + { + "name": "some-uri", + "description": "some calculated uri" + } + ] + }, + "provenance": { + "refSource": { + "uri": "github.com/catalog", + "digest": { + "sha1": "x123" + }, + "entryPoint": "git-clone.yaml" + } + } + } +} diff --git a/pkg/chains/formats/slsa/testdata/v2alpha3/pipelinerun1.json b/pkg/chains/formats/slsa/testdata/v2alpha3/pipelinerun1.json new file mode 100644 index 0000000000..7a91957a06 --- /dev/null +++ b/pkg/chains/formats/slsa/testdata/v2alpha3/pipelinerun1.json @@ -0,0 +1,312 @@ +{ + "metadata": { + "name": "pipelinerun-build", + "uid": "abhhf-12354-asjsdbjs23-3435353n" + }, + "spec": { + "params": [ + { + "name": "IMAGE", + "value": "test.io/test/image" + } + ], + "pipelineRef": { + "name": "test-pipeline" + }, + "taskRunTemplate": { + "serviceAccountName": "pipeline" + } + }, + "status": { + "startTime": "2021-03-29T09:50:00Z", + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "Tasks Completed: 2 (Failed: 0, Cancelled 0), Skipped: 0", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "results": [ + { + "name": "CHAINS-GIT_COMMIT", + "value": "abcd" + }, + { + "name": "CHAINS-GIT_URL", + "value": "https://git.test.com" + }, + { + "name": "IMAGE_URL", + "value": "test.io/test/image" + }, + { + "name": "IMAGE_DIGEST", + "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + }, + { + "name": "img-ARTIFACT_INPUTS", + "value": { + "uri": "abc","digest": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + } + }, + { + "name": "img2-ARTIFACT_OUTPUTS", + "value": { + "uri": "def","digest": "sha256:" + } + }, + { + "name": "img_no_uri-ARTIFACT_OUTPUTS", + "value": { + "digest": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + } + } + ], + "pipelineSpec": { + "params": [ + { + "description": "Image path on registry", + "name": "IMAGE", + "type": "string" + } + ], + "results": [ + { + "description": "", + "name": "CHAINS-GIT_COMMIT", + "value": "$(tasks.git-clone.results.commit)" + }, + { + "description": "", + "name": "CHAINS-GIT_URL", + "value": "$(tasks.git-clone.results.url)" + }, + { + "description": "", + "name": "IMAGE_URL", + "value": "$(tasks.build.results.IMAGE_URL)" + }, + { + "description": "", + "name": "IMAGE_DIGEST", + "value": "$(tasks.build.results.IMAGE_DIGEST)" + } + ], + "tasks": [ + { + "name": "git-clone", + "params": [ + { + "name": "url", + "value": "https://git.test.com" + }, + { + "name": "revision", + "value": "" + } + ], + "taskRef": { + "kind": "ClusterTask", + "name": "git-clone" + } + }, + { + "name": "build", + "params": [ + { + "name": "CHAINS-GIT_COMMIT", + "value": "$(tasks.git-clone.results.commit)" + }, + { + "name": "CHAINS-GIT_URL", + "value": "$(tasks.git-clone.results.url)" + } + ], + "taskRef": { + "kind": "ClusterTask", + "name": "build" + } + } + ] + }, + "taskRuns": { + "git-clone": { + "pipelineTaskName": "git-clone", + "status": { + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "All Steps have completed executing", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "podName": "git-clone-pod", + "startTime": "2021-03-29T09:50:00Z", + "steps": [ + { + "container": "step-clone", + "imageID": "test.io/test/clone-image", + "name": "clone", + "terminated": { + "exitCode": 0, + "finishedAt": "2021-03-29T09:50:15Z", + "reason": "Completed", + "startedAt": "2022-05-31T19:13:27Z" + } + } + ], + "results": [ + { + "name": "commit", + "value": "abcd" + }, + { + "name": "url", + "value": "https://git.test.com" + } + ], + "taskSpec": { + "params": [ + { + "description": "Repository URL to clone from.", + "name": "url", + "type": "string" + }, + { + "default": "", + "description": "Revision to checkout. (branch, tag, sha, ref, etc...)", + "name": "revision", + "type": "string" + } + ], + "results": [ + { + "description": "The precise commit SHA that was fetched by this Task.", + "name": "commit" + }, + { + "description": "The precise URL that was fetched by this Task.", + "name": "url" + } + ], + "steps": [ + { + "env": [ + { + "name": "HOME", + "value": "$(params.userHome)" + }, + { + "name": "PARAM_URL", + "value": "$(params.url)" + } + ], + "image": "$(params.gitInitImage)", + "name": "clone", + "resources": {}, + "script": "git clone" + } + ] + } + } + }, + "taskrun-build": { + "pipelineTaskName": "build", + "status": { + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "All Steps have completed executing", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "podName": "build-pod", + "startTime": "2021-03-29T09:50:00Z", + "steps": [ + { + "container": "step-build", + "imageID": "test.io/test/build-image", + "name": "build", + "terminated": { + "exitCode": 0, + "finishedAt": "2022-05-31T19:17:30Z", + "reason": "Completed", + "startedAt": "2021-03-29T09:50:00Z" + } + } + ], + "results": [ + { + "name": "IMAGE_DIGEST", + "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + }, + { + "name": "IMAGE_URL", + "value": "test.io/test/image\n" + } + ], + "taskSpec": { + "params": [ + { + "description": "Git CHAINS URL", + "name": "CHAINS-GIT_URL", + "type": "string" + }, + { + "description": "Git CHAINS Commit", + "name": "CHAINS-GIT_COMMIT", + "type": "string" + } + ], + "results": [ + { + "description": "Digest of the image just built.", + "name": "IMAGE_DIGEST" + }, + { + "description": "URL of the image just built.", + "name": "IMAGE_URL" + } + ], + "steps": [ + { + "command": [ + "buildah", + "build" + ], + "image": "test.io/test/build-image", + "name": "generate" + }, + { + "command": [ + "buildah", + "push" + ], + "image": "test.io/test/build-image", + "name": "push" + } + ] + } + } + } + }, + "provenance": { + "refSource": { + "uri": "git+https://github.com/test", + "digest": { + "sha1": "28b123" + }, + "entryPoint": "pipeline.yaml" + } + } + } +} diff --git a/pkg/chains/formats/slsa/testdata/v2alpha3/pipelinerun_structured_results.json b/pkg/chains/formats/slsa/testdata/v2alpha3/pipelinerun_structured_results.json new file mode 100644 index 0000000000..dbaac82e33 --- /dev/null +++ b/pkg/chains/formats/slsa/testdata/v2alpha3/pipelinerun_structured_results.json @@ -0,0 +1,270 @@ +{ + "metadata": { + "name": "pipelinerun-build", + "uid": "abhhf-12354-asjsdbjs23-3435353n" + }, + "spec": { + "params": [ + { + "name": "IMAGE", + "value": "test.io/test/image" + } + ], + "pipelineRef": { + "name": "test-pipeline" + }, + "taskRunTemplate": { + "serviceAccountName": "pipeline" + } + }, + "status": { + "startTime": "2021-03-29T09:50:00Z", + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "Tasks Completed: 2 (Failed: 0, Cancelled 0), Skipped: 0", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "results": [ + { + "name": "image-ARTIFACT_INPUTS", + "value": { + "uri": "abcd", + "digest": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + } + }, + { + "name": "image-ARTIFACT_OUTPUTS", + "value": { + "uri": "hello_world", + "sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + } + } + ], + "pipelineSpec": { + "params": [ + { + "description": "Image path on registry", + "name": "IMAGE", + "type": "string" + } + ], + "tasks": [ + { + "name": "git-clone", + "params": [ + { + "name": "url", + "value": "https://git.test.com" + }, + { + "name": "revision", + "value": "" + } + ], + "taskRef": { + "kind": "ClusterTask", + "name": "git-clone" + } + }, + { + "name": "build", + "params": [ + { + "name": "CHAINS-GIT_COMMIT", + "value": "$(tasks.git-clone.results.commit)" + }, + { + "name": "CHAINS-GIT_URL", + "value": "$(tasks.git-clone.results.url)" + } + ], + "taskRef": { + "kind": "ClusterTask", + "name": "build" + } + } + ] + }, + "taskRuns": { + "git-clone": { + "pipelineTaskName": "git-clone", + "status": { + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "All Steps have completed executing", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "podName": "git-clone-pod", + "startTime": "2021-03-29T09:50:00Z", + "steps": [ + { + "container": "step-clone", + "imageID": "test.io/test/clone-image", + "name": "clone", + "terminated": { + "exitCode": 0, + "finishedAt": "2021-03-29T09:50:15Z", + "reason": "Completed", + "startedAt": "2022-05-31T19:13:27Z" + } + } + ], + "results": [ + { + "name": "commit", + "value": "abcd" + }, + { + "name": "url", + "value": "https://git.test.com" + } + ], + "taskSpec": { + "params": [ + { + "description": "Repository URL to clone from.", + "name": "url", + "type": "string" + }, + { + "default": "", + "description": "Revision to checkout. (branch, tag, sha, ref, etc...)", + "name": "revision", + "type": "string" + } + ], + "results": [ + { + "description": "The precise commit SHA that was fetched by this Task.", + "name": "commit" + }, + { + "description": "The precise URL that was fetched by this Task.", + "name": "url" + } + ], + "steps": [ + { + "env": [ + { + "name": "HOME", + "value": "$(params.userHome)" + }, + { + "name": "PARAM_URL", + "value": "$(params.url)" + } + ], + "image": "$(params.gitInitImage)", + "name": "clone", + "resources": {}, + "script": "git clone" + } + ] + } + } + }, + "taskrun-build": { + "pipelineTaskName": "build", + "status": { + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "lastTransitionTime": "2021-03-29T09:50:15Z", + "message": "All Steps have completed executing", + "reason": "Succeeded", + "status": "True", + "type": "Succeeded" + } + ], + "podName": "build-pod", + "startTime": "2021-03-29T09:50:00Z", + "steps": [ + { + "container": "step-build", + "imageID": "test.io/test/build-image", + "name": "build", + "terminated": { + "exitCode": 0, + "finishedAt": "2022-05-31T19:17:30Z", + "reason": "Completed", + "startedAt": "2021-03-29T09:50:00Z" + } + } + ], + "results": [ + { + "name": "IMAGE_DIGEST", + "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + }, + { + "name": "IMAGE_URL", + "value": "test.io/test/image\n" + } + ], + "taskSpec": { + "params": [ + { + "description": "Git CHAINS URL", + "name": "CHAINS-GIT_URL", + "type": "string" + }, + { + "description": "Git CHAINS Commit", + "name": "CHAINS-GIT_COMMIT", + "type": "string" + } + ], + "results": [ + { + "description": "Digest of the image just built.", + "name": "IMAGE_DIGEST" + }, + { + "description": "URL of the image just built.", + "name": "IMAGE_URL" + } + ], + "steps": [ + { + "command": [ + "buildah", + "build" + ], + "image": "test.io/test/build-image", + "name": "generate" + }, + { + "command": [ + "buildah", + "push" + ], + "image": "test.io/test/build-image", + "name": "push" + } + ] + } + } + } + }, + "provenance": { + "refSource": { + "uri": "git+https://github.com/test", + "digest": { + "sha1": "28b123" + }, + "entryPoint": "pipeline.yaml" + } + } + } +} diff --git a/pkg/chains/formats/slsa/testdata/v2alpha3/taskrun-multiple-subjects.json b/pkg/chains/formats/slsa/testdata/v2alpha3/taskrun-multiple-subjects.json new file mode 100644 index 0000000000..38ac438638 --- /dev/null +++ b/pkg/chains/formats/slsa/testdata/v2alpha3/taskrun-multiple-subjects.json @@ -0,0 +1,56 @@ +{ + "spec": { + "params": [], + "taskRef": { + "name": "test-task", + "kind": "Task" + }, + "serviceAccountName": "default" + }, + "status": { + "conditions": [ + { + "type": "Succeeded", + "status": "True", + "lastTransitionTime": "2021-03-29T09:50:15Z", + "reason": "Succeeded", + "message": "All Steps have completed executing" + } + ], + "podName": "test-pod-name", + "steps": [ + { + "name": "step1", + "container": "step-step1", + "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" + } + ], + "results": [ + { + "name": "IMAGES", + "value": "gcr.io/myimage1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6,gcr.io/myimage2@sha256:daa1a56e13c85cf164e7d9e595006649e3a04c47fe4a8261320e18a0bf3b0367" + } + ], + "taskSpec": { + "params": [], + "results": [ + { + "name": "file1_DIGEST", + "description": "Digest of a file to push." + }, + { + "name": "file1", + "description": "some assembled file" + }, + { + "name": "file2_DIGEST", + "description": "Digest of a file to push." + }, + { + "name": "file2", + "description": "some assembled file" + } + ] + } + } +} diff --git a/pkg/chains/formats/slsa/testdata/v2alpha3/taskrun1.json b/pkg/chains/formats/slsa/testdata/v2alpha3/taskrun1.json new file mode 100644 index 0000000000..20ebbe1237 --- /dev/null +++ b/pkg/chains/formats/slsa/testdata/v2alpha3/taskrun1.json @@ -0,0 +1,141 @@ +{ + "metadata": { + "name": "taskrun-build", + "labels": { + "tekton.dev/pipelineTask": "build" + }, + "uid": "abhhf-12354-asjsdbjs23-3435353n" + }, + "spec": { + "params": [ + { + "name": "IMAGE", + "value": "test.io/test/image" + }, + { + "name": "CHAINS-GIT_COMMIT", + "value": "taskrun" + }, + { + "name": "CHAINS-GIT_URL", + "value": "https://git.test.com" + } + ], + "taskRef": { + "name": "build", + "kind": "Task" + }, + "serviceAccountName": "default" + }, + "status": { + "startTime": "2021-03-29T09:50:00Z", + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "type": "Succeeded", + "status": "True", + "lastTransitionTime": "2021-03-29T09:50:15Z", + "reason": "Succeeded", + "message": "All Steps have completed executing" + } + ], + "podName": "test-pod-name", + "steps": [ + { + "name": "step1", + "container": "step-step1", + "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" + }, + { + "name": "step2", + "container": "step-step2", + "imageID": "docker-pullable://gcr.io/test2/test2@sha256:4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac" + }, + { + "name": "step3", + "container": "step-step3", + "imageID": "docker-pullable://gcr.io/test3/test3@sha256:f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478" + } + ], + "results": [ + { + "name": "IMAGE_DIGEST", + "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" + }, + { + "name": "IMAGE_URL", + "value": "gcr.io/my/image" + } + ], + "taskSpec": { + "params": [ + { + "name": "IMAGE", + "type": "string" + }, + { + "name": "filename", + "type": "string" + }, + { + "name": "DOCKERFILE", + "type": "string" + }, + { + "name": "CONTEXT", + "type": "string" + }, + { + "name": "EXTRA_ARGS", + "type": "string" + }, + { + "name": "BUILDER_IMAGE", + "type": "string" + }, { + "name": "CHAINS-GIT_COMMIT", + "type": "string", + "default": "task" + }, { + "name": "CHAINS-GIT_URL", + "type": "string", + "default": "https://defaultgit.test.com" + } + ], + "steps": [ + { + "name": "step1" + }, + { + "name": "step2" + }, + { + "name": "step3" + } + ], + "results": [ + { + "name": "IMAGE_DIGEST", + "description": "Digest of the image just built." + }, + { + "name": "filename_DIGEST", + "description": "Digest of the file just built." + } + ] + }, + "provenance": { + "refSource": { + "uri": "git+https://github.com/test", + "digest": { + "sha1": "ab123" + }, + "entryPoint": "build.yaml" + }, + "featureFlags": { + "EnableAPIFields": "beta", + "ResultExtractionMethod": "termination-message" + } + } + } +} diff --git a/pkg/chains/formats/slsa/testdata/v2alpha3/taskrun2.json b/pkg/chains/formats/slsa/testdata/v2alpha3/taskrun2.json new file mode 100644 index 0000000000..1b83d1c2a0 --- /dev/null +++ b/pkg/chains/formats/slsa/testdata/v2alpha3/taskrun2.json @@ -0,0 +1,106 @@ +{ + "metadata": { + "name": "git-clone", + "labels": { + "tekton.dev/pipelineTask": "git-clone" + }, + "uid": "abhhf-12354-asjsdbjs23-3435353n" + }, + "spec": { + "params": [ + { + "name": "url", + "value": "https://git.test.com" + }, + { + "name": "revision", + "value": "" + } + ], + "taskRef": { + "name": "git-clone", + "kind": "Task" + }, + "serviceAccountName": "default" + }, + "status": { + "startTime": "2021-03-29T09:50:00Z", + "completionTime": "2021-03-29T09:50:15Z", + "conditions": [ + { + "type": "Succeeded", + "status": "True", + "lastTransitionTime": "2021-03-29T09:50:15Z", + "reason": "Succeeded", + "message": "All Steps have completed executing" + } + ], + "podName": "test-pod-name", + "steps": [ + { + "name": "step1", + "container": "step-step1", + "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" + } + ], + "results": [ + { + "name": "some-uri_DIGEST", + "value": "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" + }, + { + "name": "some-uri", + "value": "pkg:deb/debian/curl@7.50.3-1" + } + ], + "taskSpec": { + "steps": [ + { + "env": [ + { + "name": "HOME", + "value": "$(params.userHome)" + }, + { + "name": "PARAM_URL", + "value": "$(params.url)" + } + ], + "name": "step1", + "script": "git clone" + } + ], + "params": [ + { + "name": "CHAINS-GIT_COMMIT", + "type": "string", + "default": "sha:taskdefault" + }, + { + "name": "CHAINS-GIT_URL", + "type": "string", + "default": "https://git.test.com" + } + ], + "results": [ + { + "name": "some-uri_DIGEST", + "description": "Digest of a file to push." + }, + { + "name": "some-uri", + "description": "some calculated uri" + } + ] + }, + "provenance": { + "refSource": { + "uri": "git+https://github.com/catalog", + "digest": { + "sha1": "x123" + }, + "entryPoint": "git-clone.yaml" + } + } + } +} diff --git a/pkg/chains/formats/slsa/v1/intotoite6.go b/pkg/chains/formats/slsa/v1/intotoite6.go index 4ab3c8d0bf..f4c3bc8d8f 100644 --- a/pkg/chains/formats/slsa/v1/intotoite6.go +++ b/pkg/chains/formats/slsa/v1/intotoite6.go @@ -26,6 +26,8 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/v1/taskrun" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" ) const ( @@ -57,9 +59,32 @@ func (i *InTotoIte6) Wrap() bool { func (i *InTotoIte6) CreatePayload(ctx context.Context, obj interface{}) (interface{}, error) { switch v := obj.(type) { - case *objects.TaskRunObject: + case *objects.TaskRunObjectV1: + tro := obj.(*objects.TaskRunObjectV1) + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + if err := trV1Beta1.ConvertFrom(ctx, tro.GetObject().(*v1.TaskRun)); err != nil { + return nil, fmt.Errorf("error converting Tekton TaskRun from version v1 to v1beta1: %s", err) + } + return taskrun.GenerateAttestation(ctx, objects.NewTaskRunObjectV1Beta1(trV1Beta1), i.slsaConfig) + case *objects.PipelineRunObjectV1: + pro := obj.(*objects.PipelineRunObjectV1) + prV1Beta1 := &v1beta1.PipelineRun{} //nolint:staticcheck + if err := prV1Beta1.ConvertFrom(ctx, pro.GetObject().(*v1.PipelineRun)); err != nil { + return nil, fmt.Errorf("error converting Tekton PipelineRun from version v1 to v1beta1: %s", err) + } + proV1Beta1 := objects.NewPipelineRunObjectV1Beta1(prV1Beta1) + trs := pro.GetTaskRuns() + for _, tr := range trs { + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + if err := trV1Beta1.ConvertFrom(ctx, tr); err != nil { + return nil, fmt.Errorf("error converting Tekton TaskRun from version v1 to v1beta1: %s", err) + } + proV1Beta1.AppendTaskRun(trV1Beta1) + } + return pipelinerun.GenerateAttestation(ctx, proV1Beta1, i.slsaConfig) + case *objects.TaskRunObjectV1Beta1: return taskrun.GenerateAttestation(ctx, v, i.slsaConfig) - case *objects.PipelineRunObject: + case *objects.PipelineRunObjectV1Beta1: return pipelinerun.GenerateAttestation(ctx, v, i.slsaConfig) default: return nil, fmt.Errorf("intoto does not support type: %s", v) diff --git a/pkg/chains/formats/slsa/v1/intotoite6_test.go b/pkg/chains/formats/slsa/v1/intotoite6_test.go index a61bf2489b..e9754161c6 100644 --- a/pkg/chains/formats/slsa/v1/intotoite6_test.go +++ b/pkg/chains/formats/slsa/v1/intotoite6_test.go @@ -44,7 +44,7 @@ var e1BuildFinished = time.Unix(1617011415, 0) func TestTaskRunCreatePayload1(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - tr, err := objectloader.TaskRunFromFile("../testdata/taskrun1.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v1beta1/taskrun1.json") if err != nil { t.Fatal(err) } @@ -135,7 +135,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1Beta1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -148,7 +148,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { func TestPipelineRunCreatePayload(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - pr, err := objectloader.PipelineRunFromFile("../testdata/pipelinerun1.json") + pr, err := objectloader.PipelineRunV1Beta1FromFile("../testdata/v1beta1/pipelinerun1.json") if err != nil { t.Fatal(err) } @@ -347,15 +347,15 @@ func TestPipelineRunCreatePayload(t *testing.T) { }, } - tr1, err := objectloader.TaskRunFromFile("../testdata/taskrun1.json") + tr1, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v1beta1/taskrun1.json") if err != nil { t.Errorf("error reading taskrun1: %s", err.Error()) } - tr2, err := objectloader.TaskRunFromFile("../testdata/taskrun2.json") + tr2, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v1beta1/taskrun2.json") if err != nil { t.Errorf("error reading taskrun: %s", err.Error()) } - pro := objects.NewPipelineRunObject(pr) + pro := objects.NewPipelineRunObjectV1Beta1(pr) pro.AppendTaskRun(tr1) pro.AppendTaskRun(tr2) @@ -372,7 +372,7 @@ func TestPipelineRunCreatePayload(t *testing.T) { } func TestPipelineRunCreatePayloadChildRefs(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - pr, err := objectloader.PipelineRunFromFile("../testdata/pipelinerun-childrefs.json") + pr, err := objectloader.PipelineRunV1Beta1FromFile("../testdata/v1beta1/pipelinerun-childrefs.json") if err != nil { t.Fatal(err) } @@ -565,15 +565,15 @@ func TestPipelineRunCreatePayloadChildRefs(t *testing.T) { }, } - tr1, err := objectloader.TaskRunFromFile("../testdata/taskrun1.json") + tr1, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v1beta1/taskrun1.json") if err != nil { t.Errorf("error reading taskrun1: %s", err.Error()) } - tr2, err := objectloader.TaskRunFromFile("../testdata/taskrun2.json") + tr2, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v1beta1/taskrun2.json") if err != nil { t.Errorf("error reading taskrun: %s", err.Error()) } - pro := objects.NewPipelineRunObject(pr) + pro := objects.NewPipelineRunObjectV1Beta1(pr) pro.AppendTaskRun(tr1) pro.AppendTaskRun(tr2) @@ -590,7 +590,7 @@ func TestPipelineRunCreatePayloadChildRefs(t *testing.T) { func TestTaskRunCreatePayload2(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - tr, err := objectloader.TaskRunFromFile("../testdata/taskrun2.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v1beta1/taskrun2.json") if err != nil { t.Fatal(err) } @@ -653,7 +653,7 @@ func TestTaskRunCreatePayload2(t *testing.T) { }, } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1Beta1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -666,7 +666,7 @@ func TestTaskRunCreatePayload2(t *testing.T) { func TestMultipleSubjects(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - tr, err := objectloader.TaskRunFromFile("../testdata/taskrun-multiple-subjects.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v1beta1/taskrun-multiple-subjects.json") if err != nil { t.Fatal(err) } @@ -724,7 +724,7 @@ func TestMultipleSubjects(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1Beta1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) } diff --git a/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go b/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go index e652111e5f..a828e1597a 100644 --- a/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go +++ b/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go @@ -22,7 +22,7 @@ import ( slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" "github.com/tektoncd/chains/pkg/chains/formats/slsa/attest" "github.com/tektoncd/chains/pkg/chains/formats/slsa/extract" - "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material" + materialv1beta1 "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material/v1beta1" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" @@ -47,10 +47,10 @@ type TaskAttestation struct { Results []v1beta1.TaskRunResult `json:"results,omitempty"` } -func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObject, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { +func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObjectV1Beta1, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { subjects := extract.SubjectDigests(ctx, pro, slsaConfig) - mat, err := material.PipelineMaterials(ctx, pro, slsaConfig) + mat, err := materialv1beta1.PipelineMaterials(ctx, pro, slsaConfig) if err != nil { return nil, err } @@ -74,15 +74,15 @@ func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObject, sl return att, nil } -func invocation(pro *objects.PipelineRunObject) slsa.ProvenanceInvocation { +func invocation(pro *objects.PipelineRunObjectV1Beta1) slsa.ProvenanceInvocation { var paramSpecs []v1beta1.ParamSpec if ps := pro.Status.PipelineSpec; ps != nil { paramSpecs = ps.Params } - return attest.Invocation(pro, pro.Spec.Params, paramSpecs) + return attest.InvocationV1Beta1(pro, pro.Spec.Params, paramSpecs) } -func buildConfig(ctx context.Context, pro *objects.PipelineRunObject) BuildConfig { +func buildConfig(ctx context.Context, pro *objects.PipelineRunObjectV1Beta1) BuildConfig { logger := logging.FromContext(ctx) tasks := []TaskAttestation{} @@ -104,7 +104,7 @@ func buildConfig(ctx context.Context, pro *objects.PipelineRunObject) BuildConfi steps := []attest.StepAttestation{} for i, stepState := range tr.Status.Steps { step := tr.Status.TaskSpec.Steps[i] - steps = append(steps, attest.Step(&step, &stepState)) + steps = append(steps, attest.StepV1Beta1(&step, &stepState)) } after := t.RunAfter @@ -146,7 +146,7 @@ func buildConfig(ctx context.Context, pro *objects.PipelineRunObject) BuildConfi FinishedOn: tr.Status.CompletionTime.Time.UTC(), Status: getStatus(tr.Status.Conditions), Steps: steps, - Invocation: attest.Invocation(tr, params, paramSpecs), + Invocation: attest.InvocationV1Beta1(tr, params, paramSpecs), Results: tr.Status.TaskRunResults, } @@ -162,7 +162,7 @@ func buildConfig(ctx context.Context, pro *objects.PipelineRunObject) BuildConfi return BuildConfig{Tasks: tasks} } -func metadata(pro *objects.PipelineRunObject) *slsa.ProvenanceMetadata { +func metadata(pro *objects.PipelineRunObjectV1Beta1) *slsa.ProvenanceMetadata { m := &slsa.ProvenanceMetadata{} if pro.Status.StartTime != nil { utc := pro.Status.StartTime.Time.UTC() diff --git a/pkg/chains/formats/slsa/v1/pipelinerun/provenance_test.go b/pkg/chains/formats/slsa/v1/pipelinerun/provenance_test.go index 35137767c6..6a10591617 100644 --- a/pkg/chains/formats/slsa/v1/pipelinerun/provenance_test.go +++ b/pkg/chains/formats/slsa/v1/pipelinerun/provenance_test.go @@ -36,31 +36,31 @@ import ( ) // Global pro is only read from, never modified -var pro *objects.PipelineRunObject -var proStructuredResults *objects.PipelineRunObject +var pro *objects.PipelineRunObjectV1Beta1 +var proStructuredResults *objects.PipelineRunObjectV1Beta1 var e1BuildStart = time.Unix(1617011400, 0) var e1BuildFinished = time.Unix(1617011415, 0) func init() { - pro = createPro("../../testdata/pipelinerun1.json") - proStructuredResults = createPro("../../testdata/pipelinerun_structured_results.json") + pro = createPro("../../testdata/v1beta1/pipelinerun1.json") + proStructuredResults = createPro("../../testdata/v1beta1/pipelinerun_structured_results.json") } -func createPro(path string) *objects.PipelineRunObject { +func createPro(path string) *objects.PipelineRunObjectV1Beta1 { var err error - pr, err := objectloader.PipelineRunFromFile(path) + pr, err := objectloader.PipelineRunV1Beta1FromFile(path) if err != nil { panic(err) } - tr1, err := objectloader.TaskRunFromFile("../../testdata/taskrun1.json") + tr1, err := objectloader.TaskRunV1Beta1FromFile("../../testdata/v1beta1/taskrun1.json") if err != nil { panic(err) } - tr2, err := objectloader.TaskRunFromFile("../../testdata/taskrun2.json") + tr2, err := objectloader.TaskRunV1Beta1FromFile("../../testdata/v1beta1/taskrun2.json") if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1Beta1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p @@ -420,7 +420,7 @@ func TestBuildConfigTaskOrder(t *testing.T) { WhenExpressions: tt.whenExpressions, RunAfter: tt.runAfter, } - pro := createPro("../../testdata/pipelinerun1.json") + pro := createPro("../../testdata/v1beta1/pipelinerun1.json") pro.Status.PipelineSpec.Tasks[BUILD_TASK] = pt ctx := logtesting.TestContextWithLogger(t) got := buildConfig(ctx, pro) @@ -461,7 +461,7 @@ func TestMetadataInTimeZone(t *testing.T) { Reproducible: false, } - zoned := objects.NewPipelineRunObject(pro.DeepCopy()) + zoned := objects.NewPipelineRunObjectV1Beta1(pro.DeepCopy()) tz := time.FixedZone("Test Time", int((12 * time.Hour).Seconds())) zoned.Status.StartTime.Time = zoned.Status.StartTime.Time.In(tz) zoned.Status.CompletionTime.Time = zoned.Status.CompletionTime.Time.In(tz) diff --git a/pkg/chains/formats/slsa/v1/taskrun/buildconfig.go b/pkg/chains/formats/slsa/v1/taskrun/buildconfig.go index 39f513d4c9..e9ead2695a 100644 --- a/pkg/chains/formats/slsa/v1/taskrun/buildconfig.go +++ b/pkg/chains/formats/slsa/v1/taskrun/buildconfig.go @@ -36,16 +36,16 @@ type Step struct { Annotations map[string]string `json:"annotations"` } -func buildConfig(tro *objects.TaskRunObject) BuildConfig { +func buildConfig(tro *objects.TaskRunObjectV1Beta1) BuildConfig { attestations := []attest.StepAttestation{} for _, stepState := range tro.Status.Steps { step := stepFromTaskRun(stepState.Name, tro) - attestations = append(attestations, attest.Step(step, &stepState)) + attestations = append(attestations, attest.StepV1Beta1(step, &stepState)) } return BuildConfig{Steps: attestations} } -func stepFromTaskRun(name string, tro *objects.TaskRunObject) *v1beta1.Step { +func stepFromTaskRun(name string, tro *objects.TaskRunObjectV1Beta1) *v1beta1.Step { if tro.Status.TaskSpec != nil { for _, s := range tro.Status.TaskSpec.Steps { if s.Name == name { diff --git a/pkg/chains/formats/slsa/v1/taskrun/buildconfig_test.go b/pkg/chains/formats/slsa/v1/taskrun/buildconfig_test.go index 6ae7d9451b..8cb616995f 100644 --- a/pkg/chains/formats/slsa/v1/taskrun/buildconfig_test.go +++ b/pkg/chains/formats/slsa/v1/taskrun/buildconfig_test.go @@ -59,7 +59,7 @@ status: terminated: containerID: containerd://e2fadd134495619cccd1c48d8a9df2aed2afd64e6c62ea55135f90796102231e` - var taskRun *v1beta1.TaskRun + var taskRun *v1beta1.TaskRun //nolint:staticcheck if err := yaml.Unmarshal([]byte(taskrun), &taskRun); err != nil { t.Fatal(err) } @@ -84,7 +84,7 @@ status: }, } - got := buildConfig(objects.NewTaskRunObject(taskRun)) + got := buildConfig(objects.NewTaskRunObjectV1Beta1(taskRun)) if !reflect.DeepEqual(expected, got) { if d := cmp.Diff(expected, got); d != "" { t.Log(d) diff --git a/pkg/chains/formats/slsa/v1/taskrun/provenance_test.go b/pkg/chains/formats/slsa/v1/taskrun/provenance_test.go index 9a5ebe0d5a..f36e5ae360 100644 --- a/pkg/chains/formats/slsa/v1/taskrun/provenance_test.go +++ b/pkg/chains/formats/slsa/v1/taskrun/provenance_test.go @@ -34,7 +34,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/apis/resource/v1alpha1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" "sigs.k8s.io/yaml" ) @@ -48,8 +48,8 @@ const ( ) func TestMetadata(t *testing.T) { - tr := &v1beta1.TaskRun{ - ObjectMeta: v1.ObjectMeta{ + tr := &v1beta1.TaskRun{ //nolint:staticcheck + ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", Annotations: map[string]string{ @@ -58,8 +58,8 @@ func TestMetadata(t *testing.T) { }, Status: v1beta1.TaskRunStatus{ TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - StartTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, }, }, } @@ -69,7 +69,7 @@ func TestMetadata(t *testing.T) { BuildStartedOn: &start, BuildFinishedOn: &end, } - got := Metadata(objects.NewTaskRunObject(tr)) + got := Metadata(objects.NewTaskRunObjectV1Beta1(tr)) if !reflect.DeepEqual(expected, got) { t.Fatalf("expected %v got %v", expected, got) } @@ -77,8 +77,8 @@ func TestMetadata(t *testing.T) { func TestMetadataInTimeZone(t *testing.T) { tz := time.FixedZone("Test Time", int((12 * time.Hour).Seconds())) - tr := &v1beta1.TaskRun{ - ObjectMeta: v1.ObjectMeta{ + tr := &v1beta1.TaskRun{ //nolint:staticcheck + ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", Annotations: map[string]string{ @@ -87,8 +87,8 @@ func TestMetadataInTimeZone(t *testing.T) { }, Status: v1beta1.TaskRunStatus{ TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - StartTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, }, }, } @@ -98,7 +98,7 @@ func TestMetadataInTimeZone(t *testing.T) { BuildStartedOn: &start, BuildFinishedOn: &end, } - got := Metadata(objects.NewTaskRunObject(tr)) + got := Metadata(objects.NewTaskRunObjectV1Beta1(tr)) if !reflect.DeepEqual(expected, got) { t.Fatalf("expected %v got %v", expected, got) } @@ -161,7 +161,7 @@ status: default: [] ` - var taskRun *v1beta1.TaskRun + var taskRun *v1beta1.TaskRun //nolint:staticcheck if err := yaml.Unmarshal([]byte(taskrun), &taskRun); err != nil { t.Fatal(err) } @@ -189,7 +189,7 @@ status: }, } - got := invocation(objects.NewTaskRunObject(taskRun)) + got := invocation(objects.NewTaskRunObjectV1Beta1(taskRun)) if !reflect.DeepEqual(expected, got) { if d := cmp.Diff(expected, got); d != "" { t.Log(d) @@ -199,18 +199,18 @@ status: } func TestGetSubjectDigests(t *testing.T) { - tr := &v1beta1.TaskRun{ + tr := &v1beta1.TaskRun{ //nolint:staticcheck Spec: v1beta1.TaskRunSpec{ - Resources: &v1beta1.TaskRunResources{ - Outputs: []v1beta1.TaskResourceBinding{ + Resources: &v1beta1.TaskRunResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResourceBinding{ //nolint:staticcheck { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck Name: "nil-check", }, }, { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck Name: "built-image", - ResourceSpec: &v1alpha1.PipelineResourceSpec{ + ResourceSpec: &v1alpha1.PipelineResourceSpec{ //nolint:staticcheck Type: backport.PipelineResourceTypeImage, }, }, @@ -333,7 +333,7 @@ func TestGetSubjectDigests(t *testing.T) { }, } ctx := logtesting.TestContextWithLogger(t) - tro := objects.NewTaskRunObject(tr) + tro := objects.NewTaskRunObjectV1Beta1(tr) got := extract.SubjectDigests(ctx, tro, nil) if d := cmp.Diff(want, got, compare.SubjectCompareOption()); d != "" { diff --git a/pkg/chains/formats/slsa/v1/taskrun/taskrun.go b/pkg/chains/formats/slsa/v1/taskrun/taskrun.go index 36f185a3ea..4e9fee3ca9 100644 --- a/pkg/chains/formats/slsa/v1/taskrun/taskrun.go +++ b/pkg/chains/formats/slsa/v1/taskrun/taskrun.go @@ -21,16 +21,16 @@ import ( slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" "github.com/tektoncd/chains/pkg/chains/formats/slsa/attest" "github.com/tektoncd/chains/pkg/chains/formats/slsa/extract" - "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material" + materialv1beta1 "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material/v1beta1" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" ) -func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObject, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { +func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObjectV1Beta1, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { subjects := extract.SubjectDigests(ctx, tro, slsaConfig) - mat, err := material.TaskMaterials(ctx, tro) + mat, err := materialv1beta1.TaskMaterials(ctx, tro) if err != nil { return nil, err } @@ -57,17 +57,17 @@ func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObject, slsaCo // invocation describes the event that kicked off the build // we currently don't set ConfigSource because we don't know // which material the Task definition came from -func invocation(tro *objects.TaskRunObject) slsa.ProvenanceInvocation { +func invocation(tro *objects.TaskRunObjectV1Beta1) slsa.ProvenanceInvocation { var paramSpecs []v1beta1.ParamSpec if ts := tro.Status.TaskSpec; ts != nil { paramSpecs = ts.Params } - return attest.Invocation(tro, tro.Spec.Params, paramSpecs) + return attest.InvocationV1Beta1(tro, tro.Spec.Params, paramSpecs) } // Metadata adds taskrun's start time, completion time and reproducibility labels // to the metadata section of the generated provenance. -func Metadata(tro *objects.TaskRunObject) *slsa.ProvenanceMetadata { +func Metadata(tro *objects.TaskRunObjectV1Beta1) *slsa.ProvenanceMetadata { m := &slsa.ProvenanceMetadata{} if tro.Status.StartTime != nil { utc := tro.Status.StartTime.Time.UTC() diff --git a/pkg/chains/formats/slsa/v2alpha1/README.md b/pkg/chains/formats/slsa/v2alpha1/README.md index 6ac8ec267d..fe0060aa26 100644 --- a/pkg/chains/formats/slsa/v2alpha1/README.md +++ b/pkg/chains/formats/slsa/v2alpha1/README.md @@ -2,7 +2,7 @@ When running the following taskrun with bundle resolver referencing the [remote task](https://github.com/tektoncd/catalog/tree/main/task/git-clone/0.9): ```yaml -apiVersion: tekton.dev/v1beta1 +apiVersion: tekton.dev/v1 kind: TaskRun metadata: generateName: bundles-resolver- @@ -44,7 +44,7 @@ The following output was generated. Notice the following below: "builder": { "id": "https://tekton.dev/chains/v2" }, - "buildType": "https://chains.tekton.dev/format/slsa/v2alpha1/type/tekton.dev/v1beta1/TaskRun", + "buildType": "https://chains.tekton.dev/format/slsa/v2alpha1/type/tekton.dev/v1/TaskRun", "invocation": { "configSource": { "uri": "gcr.io/tekton-releases/catalog/upstream/git-clone", @@ -102,7 +102,7 @@ The following output was generated. Notice the following below: "EnableProvenanceInStatus": true, "ResultExtractionMethod": "termination-message", "MaxResultSize": 4096, - "CustomTaskVersion": "v1beta1" + "CustomTaskVersion": "v1" } } }, diff --git a/pkg/chains/formats/slsa/v2alpha1/slsav2.go b/pkg/chains/formats/slsa/v2alpha1/slsav2.go index bb7e1b68ee..30bd808971 100644 --- a/pkg/chains/formats/slsa/v2alpha1/slsav2.go +++ b/pkg/chains/formats/slsa/v2alpha1/slsav2.go @@ -24,6 +24,8 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha1/taskrun" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" ) const ( @@ -50,8 +52,16 @@ func (s *Slsa) Wrap() bool { func (s *Slsa) CreatePayload(ctx context.Context, obj interface{}) (interface{}, error) { switch v := obj.(type) { - case *objects.TaskRunObject: + case *objects.TaskRunObjectV1: + tro := obj.(*objects.TaskRunObjectV1) + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + if err := trV1Beta1.ConvertFrom(ctx, tro.GetObject().(*v1.TaskRun)); err != nil { + return nil, fmt.Errorf("error converting Tekton TaskRun from version v1 to v1beta1: %s", err) + } + return taskrun.GenerateAttestation(ctx, s.builderID, s.Type(), objects.NewTaskRunObjectV1Beta1(trV1Beta1)) + case *objects.TaskRunObjectV1Beta1: return taskrun.GenerateAttestation(ctx, s.builderID, s.Type(), v) + default: return nil, fmt.Errorf("intoto does not support type: %s", v) } diff --git a/pkg/chains/formats/slsa/v2alpha1/slsav2_test.go b/pkg/chains/formats/slsa/v2alpha1/slsav2_test.go index c06e126ca7..ef18d4123f 100644 --- a/pkg/chains/formats/slsa/v2alpha1/slsav2_test.go +++ b/pkg/chains/formats/slsa/v2alpha1/slsav2_test.go @@ -47,7 +47,7 @@ var ( func TestTaskRunCreatePayload1(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - tr, err := objectloader.TaskRunFromFile("../testdata/taskrun1.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v1beta1/taskrun1.json") if err != nil { t.Fatal(err) } @@ -117,7 +117,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { }, }, "PodTemplate": (*pod.Template)(nil), - "Resources": (*v1beta1.TaskRunResources)(nil), + "Resources": (*v1beta1.TaskRunResources)(nil), //nolint:staticcheck "Retries": 0, "ServiceAccountName": "default", "SidecarOverrides": []v1beta1.TaskRunSidecarOverride(nil), @@ -162,7 +162,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1Beta1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -174,7 +174,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { func TestTaskRunCreatePayload2(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - tr, err := objectloader.TaskRunFromFile("../testdata/taskrun2.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v1beta1/taskrun2.json") if err != nil { t.Fatal(err) } @@ -225,7 +225,7 @@ func TestTaskRunCreatePayload2(t *testing.T) { {Name: "revision", Value: v1beta1.ParamValue{Type: "string"}}, }, "PodTemplate": (*pod.Template)(nil), - "Resources": (*v1beta1.TaskRunResources)(nil), + "Resources": (*v1beta1.TaskRunResources)(nil), //nolint:staticcheck "Retries": 0, "ServiceAccountName": "default", "SidecarOverrides": []v1beta1.TaskRunSidecarOverride(nil), @@ -263,7 +263,7 @@ func TestTaskRunCreatePayload2(t *testing.T) { }, } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1Beta1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -276,7 +276,7 @@ func TestTaskRunCreatePayload2(t *testing.T) { func TestMultipleSubjects(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - tr, err := objectloader.TaskRunFromFile("../testdata/taskrun-multiple-subjects.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v1beta1/taskrun-multiple-subjects.json") if err != nil { t.Fatal(err) } @@ -326,7 +326,7 @@ func TestMultipleSubjects(t *testing.T) { "Debug": (*v1beta1.TaskRunDebug)(nil), "Params": v1beta1.Params{}, "PodTemplate": (*pod.Template)(nil), - "Resources": (*v1beta1.TaskRunResources)(nil), + "Resources": (*v1beta1.TaskRunResources)(nil), //nolint:staticcheck "Retries": 0, "ServiceAccountName": "default", "SidecarOverrides": []v1beta1.TaskRunSidecarOverride(nil), @@ -361,7 +361,7 @@ func TestMultipleSubjects(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1Beta1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) } diff --git a/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun.go b/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun.go index f9c01dbc1a..fe63e4f019 100644 --- a/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun.go +++ b/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun.go @@ -22,7 +22,7 @@ import ( "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" "github.com/tektoncd/chains/pkg/chains/formats/slsa/extract" - "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material" + materialv1beta1 "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material/v1beta1" slsav1 "github.com/tektoncd/chains/pkg/chains/formats/slsa/v1/taskrun" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" @@ -37,9 +37,9 @@ type BuildConfig struct { TaskRunResults []v1beta1.TaskRunResult `json:"taskRunResults"` } -func GenerateAttestation(ctx context.Context, builderID string, payloadType config.PayloadType, tro *objects.TaskRunObject) (interface{}, error) { +func GenerateAttestation(ctx context.Context, builderID string, payloadType config.PayloadType, tro *objects.TaskRunObjectV1Beta1) (interface{}, error) { subjects := extract.SubjectDigests(ctx, tro, nil) - mat, err := material.TaskMaterials(ctx, tro) + mat, err := materialv1beta1.TaskMaterials(ctx, tro) if err != nil { return nil, err } @@ -63,7 +63,7 @@ func GenerateAttestation(ctx context.Context, builderID string, payloadType conf return att, nil } -func metadata(tro *objects.TaskRunObject) *slsa.ProvenanceMetadata { +func metadata(tro *objects.TaskRunObjectV1Beta1) *slsa.ProvenanceMetadata { m := slsav1.Metadata(tro) m.Completeness = slsa.ProvenanceComplete{ Parameters: true, @@ -74,7 +74,7 @@ func metadata(tro *objects.TaskRunObject) *slsa.ProvenanceMetadata { // invocation describes the event that kicked off the build // we currently don't set ConfigSource because we don't know // which material the Task definition came from -func invocation(tro *objects.TaskRunObject) slsa.ProvenanceInvocation { +func invocation(tro *objects.TaskRunObjectV1Beta1) slsa.ProvenanceInvocation { i := slsa.ProvenanceInvocation{} if p := tro.Status.Provenance; p != nil && p.RefSource != nil { i.ConfigSource = slsa.ConfigSource{ @@ -94,7 +94,7 @@ func invocation(tro *objects.TaskRunObject) slsa.ProvenanceInvocation { // invocationEnv adds the tekton feature flags that were enabled // for the taskrun. In the future, we can populate versioning information // here as well. -func invocationEnv(tro *objects.TaskRunObject) map[string]any { +func invocationEnv(tro *objects.TaskRunObjectV1Beta1) map[string]any { var iEnv map[string]any = make(map[string]any) if tro.Status.Provenance != nil && tro.Status.Provenance.FeatureFlags != nil { iEnv["tekton-pipelines-feature-flags"] = tro.Status.Provenance.FeatureFlags @@ -104,7 +104,7 @@ func invocationEnv(tro *objects.TaskRunObject) map[string]any { // invocationParams adds all fields from the task run object except // TaskRef or TaskSpec since they are in the ConfigSource or buildConfig. -func invocationParams(tro *objects.TaskRunObject) map[string]any { +func invocationParams(tro *objects.TaskRunObjectV1Beta1) map[string]any { var iParams map[string]any = make(map[string]any) skipFields := sets.NewString("TaskRef", "TaskSpec") v := reflect.ValueOf(tro.Spec) diff --git a/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun_test.go b/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun_test.go index 3327ce0222..2aff0c355d 100644 --- a/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun_test.go +++ b/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun_test.go @@ -53,7 +53,7 @@ const ( ) func TestMetadata(t *testing.T) { - tr := &v1beta1.TaskRun{ + tr := &v1beta1.TaskRun{ //nolint:staticcheck ObjectMeta: v1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", @@ -74,7 +74,7 @@ func TestMetadata(t *testing.T) { BuildStartedOn: &start, BuildFinishedOn: &end, } - got := slsav1.Metadata(objects.NewTaskRunObject(tr)) + got := slsav1.Metadata(objects.NewTaskRunObjectV1Beta1(tr)) if !reflect.DeepEqual(expected, got) { t.Fatalf("expected %v got %v", expected, got) } @@ -82,7 +82,7 @@ func TestMetadata(t *testing.T) { func TestMetadataInTimeZone(t *testing.T) { tz := time.FixedZone("Test Time", int((12 * time.Hour).Seconds())) - tr := &v1beta1.TaskRun{ + tr := &v1beta1.TaskRun{ //nolint:staticcheck ObjectMeta: v1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", @@ -103,7 +103,7 @@ func TestMetadataInTimeZone(t *testing.T) { BuildStartedOn: &start, BuildFinishedOn: &end, } - got := slsav1.Metadata(objects.NewTaskRunObject(tr)) + got := slsav1.Metadata(objects.NewTaskRunObjectV1Beta1(tr)) if !reflect.DeepEqual(expected, got) { t.Fatalf("expected %v got %v", expected, got) } @@ -167,7 +167,7 @@ status: RunningInEnvWithInjectedSidecars: true ` - var taskRun *v1beta1.TaskRun + var taskRun *v1beta1.TaskRun //nolint:staticcheck if err := yaml.Unmarshal([]byte(taskrun), &taskRun); err != nil { t.Fatal(err) } @@ -192,7 +192,7 @@ status: "ComputeResources": (*corev1.ResourceRequirements)(nil), "Debug": (*v1beta1.TaskRunDebug)(nil), "PodTemplate": (*pod.Template)(nil), - "Resources": (*v1beta1.TaskRunResources)(nil), + "Resources": (*v1beta1.TaskRunResources)(nil), //nolint:staticcheck "Retries": 0, "ServiceAccountName": "", "SidecarOverrides": []v1beta1.TaskRunSidecarOverride(nil), @@ -214,7 +214,7 @@ status: }, }, } - got := invocation(objects.NewTaskRunObject(taskRun)) + got := invocation(objects.NewTaskRunObjectV1Beta1(taskRun)) if !reflect.DeepEqual(expected, got) { if d := cmp.Diff(expected, got); d != "" { t.Log(d) @@ -224,18 +224,18 @@ status: } func TestGetSubjectDigests(t *testing.T) { - tr := &v1beta1.TaskRun{ + tr := &v1beta1.TaskRun{ //nolint:staticcheck Spec: v1beta1.TaskRunSpec{ - Resources: &v1beta1.TaskRunResources{ - Outputs: []v1beta1.TaskResourceBinding{ + Resources: &v1beta1.TaskRunResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResourceBinding{ //nolint:staticcheck { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck Name: "nil-check", }, }, { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck Name: "built-image", - ResourceSpec: &v1alpha1.PipelineResourceSpec{ + ResourceSpec: &v1alpha1.PipelineResourceSpec{ //nolint:staticcheck Type: backport.PipelineResourceTypeImage, }, }, @@ -357,7 +357,7 @@ func TestGetSubjectDigests(t *testing.T) { }, }, } - tro := objects.NewTaskRunObject(tr) + tro := objects.NewTaskRunObjectV1Beta1(tr) ctx := logtesting.TestContextWithLogger(t) got := extract.SubjectDigests(ctx, tro, nil) diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters.go b/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters.go index 27185766a4..03f373c9dd 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters.go @@ -20,25 +20,24 @@ import ( "fmt" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" ) -func buildConfigSource(provenance *v1beta1.Provenance) map[string]string { +func buildConfigSource(provenance objects.GenericProvenance) map[string]string { ref := "" - for alg, hex := range provenance.RefSource.Digest { + for alg, hex := range provenance.GetRefSourceDigest() { ref = fmt.Sprintf("%s:%s", alg, hex) break } buildConfigSource := map[string]string{ "ref": ref, - "repository": provenance.RefSource.URI, - "path": provenance.RefSource.EntryPoint, + "repository": provenance.GetRefSourceURI(), + "path": provenance.GetRefSourceEntrypoint(), } return buildConfigSource } // PipelineRun adds the pipeline run spec and provenance if available -func PipelineRun(pro *objects.PipelineRunObject) map[string]any { +func PipelineRun(pro *objects.PipelineRunObjectV1Beta1) map[string]any { externalParams := make(map[string]any) if provenance := pro.GetRemoteProvenance(); provenance != nil { @@ -49,7 +48,7 @@ func PipelineRun(pro *objects.PipelineRunObject) map[string]any { } // TaskRun adds the task run spec and provenance if available -func TaskRun(tro *objects.TaskRunObject) map[string]any { +func TaskRun(tro *objects.TaskRunObjectV1Beta1) map[string]any { externalParams := make(map[string]any) if provenance := tro.GetRemoteProvenance(); provenance != nil { diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters_test.go b/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters_test.go index a389224676..14a56987b7 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters_test.go @@ -28,13 +28,14 @@ import ( func TestBuildConfigSource(t *testing.T) { digest := map[string]string{"alg1": "hex1", "alg2": "hex2"} - provenance := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ - Digest: digest, - URI: "https://tekton.com", - EntryPoint: "/path/to/entry", - }, - } + provenance := &objects.ProvenanceV1Beta1{ + Provenance: &v1beta1.Provenance{ + RefSource: &v1beta1.RefSource{ + Digest: digest, + URI: "https://tekton.com", + EntryPoint: "/path/to/entry", + }, + }} want := map[string]string{ "repository": "https://tekton.com", @@ -65,20 +66,20 @@ func TestBuildConfigSource(t *testing.T) { } } -func createPro(path string) *objects.PipelineRunObject { - pr, err := objectloader.PipelineRunFromFile(path) +func createPro(path string) *objects.PipelineRunObjectV1Beta1 { + pr, err := objectloader.PipelineRunV1Beta1FromFile(path) if err != nil { panic(err) } - tr1, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun1.json") + tr1, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun1.json") if err != nil { panic(err) } - tr2, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun2.json") + tr2, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun2.json") if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1Beta1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p @@ -108,11 +109,11 @@ func TestPipelineRun(t *testing.T) { } func TestTaskRun(t *testing.T) { - tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun1.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun1.json") if err != nil { t.Fatal(err) } - got := TaskRun(objects.NewTaskRunObject(tr)) + got := TaskRun(objects.NewTaskRunObjectV1Beta1(tr)) want := map[string]any{ "runSpec": v1beta1.TaskRunSpec{ diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters.go b/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters.go index 844588fb54..cffe9626d5 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters.go @@ -18,14 +18,13 @@ package internalparameters import ( "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" ) // SLSAInternalParameters provides the chains config as internalparameters func SLSAInternalParameters(tko objects.TektonObject) map[string]any { internalParams := make(map[string]any) - if provenance := tko.GetProvenance(); provenance != (*v1beta1.Provenance)(nil) && provenance.FeatureFlags != nil { - internalParams["tekton-pipelines-feature-flags"] = *provenance.FeatureFlags + if provenance := tko.GetProvenance(); !provenance.IsNil() && !provenance.FeatureFlagsIsNil() { + internalParams["tekton-pipelines-feature-flags"] = *provenance.GetFeatureFlags() } return internalParams } @@ -33,8 +32,8 @@ func SLSAInternalParameters(tko objects.TektonObject) map[string]any { // TektonInternalParameters provides the chains config as well as annotations and labels func TektonInternalParameters(tko objects.TektonObject) map[string]any { internalParams := make(map[string]any) - if provenance := tko.GetProvenance(); provenance != (*v1beta1.Provenance)(nil) && provenance.FeatureFlags != nil { - internalParams["tekton-pipelines-feature-flags"] = *provenance.FeatureFlags + if provenance := tko.GetProvenance(); !provenance.IsNil() && !provenance.FeatureFlagsIsNil() { + internalParams["tekton-pipelines-feature-flags"] = *provenance.GetFeatureFlags() } internalParams["labels"] = tko.GetLabels() internalParams["annotations"] = tko.GetAnnotations() diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters_test.go b/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters_test.go index 95cbee3cc0..0a941591a4 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters_test.go @@ -26,11 +26,11 @@ import ( ) func TestTektonInternalParameters(t *testing.T) { - tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun1.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun1.json") if err != nil { t.Fatal(err) } - tro := objects.NewTaskRunObject(tr) + tro := objects.NewTaskRunObjectV1Beta1(tr) got := TektonInternalParameters(tro) want := map[string]any{ "labels": tro.GetLabels(), @@ -44,11 +44,11 @@ func TestTektonInternalParameters(t *testing.T) { } func TestSLSAInternalParameters(t *testing.T) { - tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun1.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun1.json") if err != nil { t.Fatal(err) } - tro := objects.NewTaskRunObject(tr) + tro := objects.NewTaskRunObjectV1Beta1(tr) got := SLSAInternalParameters(tro) want := map[string]any{ "tekton-pipelines-feature-flags": config.FeatureFlags{EnableAPIFields: "beta", ResultExtractionMethod: "termination-message"}, diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun.go b/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun.go index cab493d5f6..2820808555 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun.go @@ -36,7 +36,7 @@ const ( ) // GenerateAttestation generates a provenance statement with SLSA v1.0 predicate for a pipeline run. -func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObject, slsaconfig *slsaconfig.SlsaConfig) (interface{}, error) { +func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObjectV1Beta1, slsaconfig *slsaconfig.SlsaConfig) (interface{}, error) { bp, err := byproducts(pro) if err != nil { return nil, err @@ -67,7 +67,7 @@ func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObject, sl return att, nil } -func metadata(pro *objects.PipelineRunObject) slsa.BuildMetadata { +func metadata(pro *objects.PipelineRunObjectV1Beta1) slsa.BuildMetadata { m := slsa.BuildMetadata{ InvocationID: string(pro.ObjectMeta.UID), } @@ -83,7 +83,7 @@ func metadata(pro *objects.PipelineRunObject) slsa.BuildMetadata { } // byproducts contains the pipelineRunResults -func byproducts(pro *objects.PipelineRunObject) ([]slsa.ResourceDescriptor, error) { +func byproducts(pro *objects.PipelineRunObjectV1Beta1) ([]slsa.ResourceDescriptor, error) { byProd := []slsa.ResourceDescriptor{} for _, key := range pro.Status.PipelineResults { content, err := json.Marshal(key.Value) @@ -101,7 +101,7 @@ func byproducts(pro *objects.PipelineRunObject) ([]slsa.ResourceDescriptor, erro } // getBuildDefinition get the buildDefinition based on the configured buildType. This will default to the slsa buildType -func getBuildDefinition(ctx context.Context, slsaconfig *slsaconfig.SlsaConfig, pro *objects.PipelineRunObject) (slsa.ProvenanceBuildDefinition, error) { +func getBuildDefinition(ctx context.Context, slsaconfig *slsaconfig.SlsaConfig, pro *objects.PipelineRunObjectV1Beta1) (slsa.ProvenanceBuildDefinition, error) { // if buildType is not set in the chains-config, default to slsa build type buildDefinitionType := slsaconfig.BuildType if slsaconfig.BuildType == "" { diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun_test.go b/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun_test.go index 34a12edc48..72cdd99fa5 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun_test.go @@ -63,7 +63,7 @@ func TestMetadata(t *testing.T) { StartedOn: &start, FinishedOn: &end, } - got := metadata(objects.NewPipelineRunObject(pr)) + got := metadata(objects.NewPipelineRunObjectV1Beta1(pr)) if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } @@ -94,7 +94,7 @@ func TestMetadataInTimeZone(t *testing.T) { StartedOn: &start, FinishedOn: &end, } - got := metadata(objects.NewPipelineRunObject(pr)) + got := metadata(objects.NewPipelineRunObjectV1Beta1(pr)) if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } @@ -126,7 +126,7 @@ func TestByProducts(t *testing.T) { MediaType: JsonMediaType, }, } - got, err := byproducts(objects.NewPipelineRunObject(pr)) + got, err := byproducts(objects.NewPipelineRunObjectV1Beta1(pr)) if err != nil { t.Fatalf("Could not extract byproducts: %s", err) } @@ -135,20 +135,20 @@ func TestByProducts(t *testing.T) { } } -func createPro(path string) *objects.PipelineRunObject { - pr, err := objectloader.PipelineRunFromFile(path) +func createPro(path string) *objects.PipelineRunObjectV1Beta1 { + pr, err := objectloader.PipelineRunV1Beta1FromFile(path) if err != nil { panic(err) } - tr1, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun1.json") + tr1, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun1.json") if err != nil { panic(err) } - tr2, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun2.json") + tr2, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun2.json") if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1Beta1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p @@ -275,7 +275,7 @@ func TestGenerateAttestation(t *testing.T) { } } -func getResolvedDependencies(addTasks func(*objects.TaskRunObject) (*v1resourcedescriptor.ResourceDescriptor, error)) []v1resourcedescriptor.ResourceDescriptor { //nolint:staticcheck +func getResolvedDependencies(addTasks func(*objects.TaskRunObjectV1Beta1) (*v1resourcedescriptor.ResourceDescriptor, error)) []v1resourcedescriptor.ResourceDescriptor { //nolint:staticcheck pr := createPro("../../../testdata/v2alpha2/pipelinerun1.json") rd, err := resolveddependencies.PipelineRun(context.Background(), pr, &slsaconfig.SlsaConfig{DeepInspectionEnabled: false}, addTasks) if err != nil { @@ -294,7 +294,7 @@ func TestGetBuildDefinition(t *testing.T) { } tests := []struct { name string - taskContent func(*objects.TaskRunObject) (*v1resourcedescriptor.ResourceDescriptor, error) //nolint:staticcheck + taskContent func(*objects.TaskRunObjectV1Beta1) (*v1resourcedescriptor.ResourceDescriptor, error) //nolint:staticcheck config *slsaconfig.SlsaConfig want slsa.ProvenanceBuildDefinition }{ diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies.go b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies.go index 54fb4e1454..38c8d6dce3 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies.go @@ -22,7 +22,7 @@ import ( "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" v1 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" - "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material" + materialv1beta1 "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material/v1beta1" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" "go.uber.org/zap" @@ -44,11 +44,11 @@ const ( // used to toggle the fields in resolvedDependencies. see AddTektonTaskDescriptor // and AddSLSATaskDescriptor -type addTaskDescriptorContent func(*objects.TaskRunObject) (*v1.ResourceDescriptor, error) //nolint:staticcheck +type addTaskDescriptorContent func(*objects.TaskRunObjectV1Beta1) (*v1.ResourceDescriptor, error) //nolint:staticcheck // the more verbose resolved dependency content. this adds the name, uri, digest // and content if possible. -func AddTektonTaskDescriptor(tr *objects.TaskRunObject) (*v1.ResourceDescriptor, error) { //nolint:staticcheck +func AddTektonTaskDescriptor(tr *objects.TaskRunObjectV1Beta1) (*v1.ResourceDescriptor, error) { //nolint:staticcheck rd := v1.ResourceDescriptor{} storedTr, err := json.Marshal(tr) if err != nil { @@ -67,7 +67,7 @@ func AddTektonTaskDescriptor(tr *objects.TaskRunObject) (*v1.ResourceDescriptor, // resolved dependency content for the more generic slsa verifiers. just logs // the name, uri and digest. -func AddSLSATaskDescriptor(tr *objects.TaskRunObject) (*v1.ResourceDescriptor, error) { //nolint:staticcheck +func AddSLSATaskDescriptor(tr *objects.TaskRunObjectV1Beta1) (*v1.ResourceDescriptor, error) { //nolint:staticcheck if tr.Status.Provenance != nil && tr.Status.Provenance.RefSource != nil { return &v1.ResourceDescriptor{ Name: pipelineTaskConfigName, @@ -131,7 +131,7 @@ func removeDuplicateResolvedDependencies(resolvedDependencies []v1.ResourceDescr // fromPipelineTask adds the resolved dependencies from pipeline tasks // such as pipeline task uri/digest for remote pipeline tasks and step and sidecar images. -func fromPipelineTask(logger *zap.SugaredLogger, pro *objects.PipelineRunObject, addTasks addTaskDescriptorContent) ([]v1.ResourceDescriptor, error) { +func fromPipelineTask(logger *zap.SugaredLogger, pro *objects.PipelineRunObjectV1Beta1, addTasks addTaskDescriptorContent) ([]v1.ResourceDescriptor, error) { pSpec := pro.Status.PipelineSpec resolvedDependencies := []v1.ResourceDescriptor{} if pSpec != nil { @@ -156,14 +156,14 @@ func fromPipelineTask(logger *zap.SugaredLogger, pro *objects.PipelineRunObject, mats := []common.ProvenanceMaterial{} // add step images - stepMaterials, err := material.FromStepImages(tr) + stepMaterials, err := materialv1beta1.FromStepImages(tr) if err != nil { return nil, err } mats = append(mats, stepMaterials...) // add sidecar images - sidecarMaterials, err := material.FromSidecarImages(tr) + sidecarMaterials, err := materialv1beta1.FromSidecarImages(tr) if err != nil { return nil, err } @@ -177,30 +177,30 @@ func fromPipelineTask(logger *zap.SugaredLogger, pro *objects.PipelineRunObject, } // taskDependencies gather all dependencies in a task and adds them to resolvedDependencies -func taskDependencies(ctx context.Context, tr *objects.TaskRunObject) ([]v1.ResourceDescriptor, error) { +func taskDependencies(ctx context.Context, tr *objects.TaskRunObjectV1Beta1) ([]v1.ResourceDescriptor, error) { var resolvedDependencies []v1.ResourceDescriptor var err error mats := []common.ProvenanceMaterial{} // add step and sidecar images - stepMaterials, err := material.FromStepImages(tr) + stepMaterials, err := materialv1beta1.FromStepImages(tr) mats = append(mats, stepMaterials...) if err != nil { return nil, err } - sidecarMaterials, err := material.FromSidecarImages(tr) + sidecarMaterials, err := materialv1beta1.FromSidecarImages(tr) if err != nil { return nil, err } mats = append(mats, sidecarMaterials...) resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, "")...) - mats = material.FromTaskParamsAndResults(ctx, tr) + mats = materialv1beta1.FromTaskParamsAndResults(ctx, tr) // convert materials to resolved dependencies resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, inputResultName)...) // add task resources - mats = material.FromTaskResources(ctx, tr) + mats = materialv1beta1.FromTaskResources(ctx, tr) // convert materials to resolved dependencies resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, pipelineResourceName)...) @@ -214,7 +214,7 @@ func taskDependencies(ctx context.Context, tr *objects.TaskRunObject) ([]v1.Reso } // TaskRun constructs `predicate.resolvedDependencies` section by collecting all the artifacts that influence a taskrun such as source code repo and step&sidecar base images. -func TaskRun(ctx context.Context, tro *objects.TaskRunObject) ([]v1.ResourceDescriptor, error) { +func TaskRun(ctx context.Context, tro *objects.TaskRunObjectV1Beta1) ([]v1.ResourceDescriptor, error) { var resolvedDependencies []v1.ResourceDescriptor var err error @@ -238,7 +238,7 @@ func TaskRun(ctx context.Context, tro *objects.TaskRunObject) ([]v1.ResourceDesc } // PipelineRun constructs `predicate.resolvedDependencies` section by collecting all the artifacts that influence a pipeline run such as source code repo and step&sidecar base images. -func PipelineRun(ctx context.Context, pro *objects.PipelineRunObject, slsaconfig *slsaconfig.SlsaConfig, addTasks addTaskDescriptorContent) ([]v1.ResourceDescriptor, error) { +func PipelineRun(ctx context.Context, pro *objects.PipelineRunObjectV1Beta1, slsaconfig *slsaconfig.SlsaConfig, addTasks addTaskDescriptorContent) ([]v1.ResourceDescriptor, error) { var err error var resolvedDependencies []v1.ResourceDescriptor logger := logging.FromContext(ctx) @@ -261,7 +261,7 @@ func PipelineRun(ctx context.Context, pro *objects.PipelineRunObject, slsaconfig resolvedDependencies = append(resolvedDependencies, rds...) // add resolved dependencies from pipeline results - mats := material.FromPipelineParamsAndResults(ctx, pro, slsaconfig) + mats := materialv1beta1.FromPipelineParamsAndResults(ctx, pro, slsaconfig) // convert materials to resolved dependencies resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, inputResultName)...) diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies_test.go b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies_test.go index d8013bdf07..7bb958d9cf 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies_test.go @@ -38,29 +38,29 @@ import ( const digest = "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7" -var pro *objects.PipelineRunObject -var proStructuredResults *objects.PipelineRunObject +var pro *objects.PipelineRunObjectV1Beta1 +var proStructuredResults *objects.PipelineRunObjectV1Beta1 func init() { pro = createPro("../../../testdata/v2alpha2/pipelinerun1.json") proStructuredResults = createPro("../../../testdata/v2alpha2/pipelinerun_structured_results.json") } -func createPro(path string) *objects.PipelineRunObject { +func createPro(path string) *objects.PipelineRunObjectV1Beta1 { var err error - pr, err := objectloader.PipelineRunFromFile(path) + pr, err := objectloader.PipelineRunV1Beta1FromFile(path) if err != nil { panic(err) } - tr1, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun1.json") + tr1, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun1.json") if err != nil { panic(err) } - tr2, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun2.json") + tr2, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun2.json") if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1Beta1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p @@ -68,11 +68,11 @@ func createPro(path string) *objects.PipelineRunObject { func tektonTaskRuns() map[string][]byte { trs := make(map[string][]byte) - tr1, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun1.json") + tr1, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun1.json") if err != nil { panic(err) } - tr2, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun2.json") + tr2, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun2.json") if err != nil { panic(err) } @@ -501,7 +501,7 @@ func TestTaskRun(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - rd, err := TaskRun(ctx, objects.NewTaskRunObject(tc.taskRun)) + rd, err := TaskRun(ctx, objects.NewTaskRunObjectV1Beta1(tc.taskRun)) if err != nil { t.Fatalf("Did not expect an error but got %v", err) } diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun.go b/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun.go index 9f53d253f0..fb813b0d67 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun.go @@ -32,7 +32,7 @@ import ( const taskRunResults = "taskRunResults/%s" // GenerateAttestation generates a provenance statement with SLSA v1.0 predicate for a task run. -func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObject, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { +func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObjectV1Beta1, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { bp, err := byproducts(tro) if err != nil { return nil, err @@ -63,7 +63,7 @@ func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObject, slsaCo return att, nil } -func metadata(tro *objects.TaskRunObject) slsa.BuildMetadata { +func metadata(tro *objects.TaskRunObjectV1Beta1) slsa.BuildMetadata { m := slsa.BuildMetadata{ InvocationID: string(tro.ObjectMeta.UID), } @@ -79,7 +79,7 @@ func metadata(tro *objects.TaskRunObject) slsa.BuildMetadata { } // byproducts contains the taskRunResults -func byproducts(tro *objects.TaskRunObject) ([]slsa.ResourceDescriptor, error) { +func byproducts(tro *objects.TaskRunObjectV1Beta1) ([]slsa.ResourceDescriptor, error) { byProd := []slsa.ResourceDescriptor{} for _, key := range tro.Status.TaskRunResults { content, err := json.Marshal(key.Value) @@ -97,7 +97,7 @@ func byproducts(tro *objects.TaskRunObject) ([]slsa.ResourceDescriptor, error) { } // getBuildDefinition get the buildDefinition based on the configured buildType. This will default to the slsa buildType -func getBuildDefinition(ctx context.Context, buildType string, tro *objects.TaskRunObject) (slsa.ProvenanceBuildDefinition, error) { +func getBuildDefinition(ctx context.Context, buildType string, tro *objects.TaskRunObjectV1Beta1) (slsa.ProvenanceBuildDefinition, error) { // if buildType is not set in the chains-config, default to slsa build type buildDefinitionType := buildType if buildType == "" { diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun_test.go b/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun_test.go index 731d74a1cd..9ec6b8ba81 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun_test.go @@ -65,7 +65,7 @@ func TestMetadata(t *testing.T) { StartedOn: &start, FinishedOn: &end, } - got := metadata(objects.NewTaskRunObject(tr)) + got := metadata(objects.NewTaskRunObjectV1Beta1(tr)) if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } @@ -96,7 +96,7 @@ func TestMetadataInTimeZone(t *testing.T) { StartedOn: &start, FinishedOn: &end, } - got := metadata(objects.NewTaskRunObject(tr)) + got := metadata(objects.NewTaskRunObjectV1Beta1(tr)) if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } @@ -128,7 +128,7 @@ func TestByProducts(t *testing.T) { MediaType: pipelinerun.JsonMediaType, }, } - got, err := byproducts(objects.NewTaskRunObject(tr)) + got, err := byproducts(objects.NewTaskRunObjectV1Beta1(tr)) if err != nil { t.Fatalf("Could not extract byproducts: %s", err) } @@ -139,7 +139,7 @@ func TestByProducts(t *testing.T) { func TestTaskRunGenerateAttestation(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun1.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun1.json") if err != nil { t.Fatal(err) } @@ -225,7 +225,7 @@ func TestTaskRunGenerateAttestation(t *testing.T) { }, } - got, err := GenerateAttestation(ctx, objects.NewTaskRunObject(tr), &slsaconfig.SlsaConfig{ + got, err := GenerateAttestation(ctx, objects.NewTaskRunObjectV1Beta1(tr), &slsaconfig.SlsaConfig{ BuilderID: "test_builder-1", BuildType: "https://tekton.dev/chains/v2/slsa", }) @@ -238,7 +238,7 @@ func TestTaskRunGenerateAttestation(t *testing.T) { } } -func getResolvedDependencies(tro *objects.TaskRunObject) []v1resourcedescriptor.ResourceDescriptor { +func getResolvedDependencies(tro *objects.TaskRunObjectV1Beta1) []v1resourcedescriptor.ResourceDescriptor { rd, err := resolveddependencies.TaskRun(context.Background(), tro) if err != nil { return []v1resourcedescriptor.ResourceDescriptor{} @@ -247,7 +247,7 @@ func getResolvedDependencies(tro *objects.TaskRunObject) []v1resourcedescriptor. } func TestGetBuildDefinition(t *testing.T) { - tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun1.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun1.json") if err != nil { t.Fatal(err) } @@ -259,7 +259,7 @@ func TestGetBuildDefinition(t *testing.T) { "label1": "label1", } - tro := objects.NewTaskRunObject(tr) + tro := objects.NewTaskRunObjectV1Beta1(tr) tests := []struct { name string buildType string @@ -317,12 +317,12 @@ func TestGetBuildDefinition(t *testing.T) { } func TestUnsupportedBuildType(t *testing.T) { - tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha2/taskrun1.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../../../testdata/v2alpha2/taskrun1.json") if err != nil { t.Fatal(err) } - got, err := getBuildDefinition(context.Background(), "bad-buildType", objects.NewTaskRunObject(tr)) + got, err := getBuildDefinition(context.Background(), "bad-buildType", objects.NewTaskRunObjectV1Beta1(tr)) if err == nil { t.Error("getBuildDefinition(): expected error got nil") } diff --git a/pkg/chains/formats/slsa/v2alpha2/slsav2.go b/pkg/chains/formats/slsa/v2alpha2/slsav2.go index 2368b459c5..561c191ff5 100644 --- a/pkg/chains/formats/slsa/v2alpha2/slsav2.go +++ b/pkg/chains/formats/slsa/v2alpha2/slsav2.go @@ -26,6 +26,8 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha2/internal/taskrun" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" ) const ( @@ -56,9 +58,32 @@ func (s *Slsa) Wrap() bool { func (s *Slsa) CreatePayload(ctx context.Context, obj interface{}) (interface{}, error) { switch v := obj.(type) { - case *objects.TaskRunObject: + case *objects.TaskRunObjectV1: + tro := obj.(*objects.TaskRunObjectV1) + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + if err := trV1Beta1.ConvertFrom(ctx, tro.GetObject().(*v1.TaskRun)); err != nil { + return nil, fmt.Errorf("error converting Tekton TaskRun from version v1 to v1beta1: %s", err) + } + return taskrun.GenerateAttestation(ctx, objects.NewTaskRunObjectV1Beta1(trV1Beta1), s.slsaConfig) + case *objects.PipelineRunObjectV1: + pro := obj.(*objects.PipelineRunObjectV1) + prV1Beta1 := &v1beta1.PipelineRun{} //nolint:staticcheck + if err := prV1Beta1.ConvertFrom(ctx, pro.GetObject().(*v1.PipelineRun)); err != nil { + return nil, fmt.Errorf("error converting Tekton PipelineRun from version v1 to v1beta1: %s", err) + } + proV1Beta1 := objects.NewPipelineRunObjectV1Beta1(prV1Beta1) + trs := pro.GetTaskRuns() + for _, tr := range trs { + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + if err := trV1Beta1.ConvertFrom(ctx, tr); err != nil { + return nil, fmt.Errorf("error converting Tekton TaskRun from version v1 to v1beta1: %s", err) + } + proV1Beta1.AppendTaskRun(trV1Beta1) + } + return pipelinerun.GenerateAttestation(ctx, proV1Beta1, s.slsaConfig) + case *objects.TaskRunObjectV1Beta1: return taskrun.GenerateAttestation(ctx, v, s.slsaConfig) - case *objects.PipelineRunObject: + case *objects.PipelineRunObjectV1Beta1: return pipelinerun.GenerateAttestation(ctx, v, s.slsaConfig) default: return nil, fmt.Errorf("intoto does not support type: %s", v) diff --git a/pkg/chains/formats/slsa/v2alpha2/slsav2_test.go b/pkg/chains/formats/slsa/v2alpha2/slsav2_test.go index 38c0107ca9..2b49799e31 100644 --- a/pkg/chains/formats/slsa/v2alpha2/slsav2_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/slsav2_test.go @@ -96,7 +96,7 @@ func TestCorrectPayloadType(t *testing.T) { func TestTaskRunCreatePayload1(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - tr, err := objectloader.TaskRunFromFile("../testdata/v2alpha2/taskrun1.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v2alpha2/taskrun1.json") if err != nil { t.Fatal(err) } @@ -187,7 +187,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1Beta1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -199,7 +199,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { func TestTaskRunCreatePayload2(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - tr, err := objectloader.TaskRunFromFile("../testdata/v2alpha2/taskrun2.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v2alpha2/taskrun2.json") if err != nil { t.Fatal(err) } @@ -272,7 +272,7 @@ func TestTaskRunCreatePayload2(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1Beta1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -285,7 +285,7 @@ func TestTaskRunCreatePayload2(t *testing.T) { func TestMultipleSubjects(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - tr, err := objectloader.TaskRunFromFile("../testdata/v2alpha2/taskrun-multiple-subjects.json") + tr, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v2alpha2/taskrun-multiple-subjects.json") if err != nil { t.Fatal(err) } @@ -352,7 +352,7 @@ func TestMultipleSubjects(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1Beta1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) } @@ -361,20 +361,20 @@ func TestMultipleSubjects(t *testing.T) { } } -func createPro(path string) *objects.PipelineRunObject { - pr, err := objectloader.PipelineRunFromFile(path) +func createPro(path string) *objects.PipelineRunObjectV1Beta1 { + pr, err := objectloader.PipelineRunV1Beta1FromFile(path) if err != nil { panic(err) } - tr1, err := objectloader.TaskRunFromFile("../testdata/v2alpha2/taskrun1.json") + tr1, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v2alpha2/taskrun1.json") if err != nil { panic(err) } - tr2, err := objectloader.TaskRunFromFile("../testdata/v2alpha2/taskrun2.json") + tr2, err := objectloader.TaskRunV1Beta1FromFile("../testdata/v2alpha2/taskrun2.json") if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1Beta1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p diff --git a/pkg/chains/formats/slsa/v2alpha3/internal/build_types/build_types.go b/pkg/chains/formats/slsa/v2alpha3/internal/build_types/build_types.go new file mode 100644 index 0000000000..ef11ff453d --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/internal/build_types/build_types.go @@ -0,0 +1,22 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package buildtypes + +const ( + SlsaBuildType = "https://tekton.dev/chains/v2/slsa" + TektonBuildType = "https://tekton.dev/chains/v2/slsa-tekton" +) diff --git a/pkg/chains/formats/slsa/v2alpha3/internal/external_parameters/external_parameters.go b/pkg/chains/formats/slsa/v2alpha3/internal/external_parameters/external_parameters.go new file mode 100644 index 0000000000..3ae576bcd4 --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/internal/external_parameters/external_parameters.go @@ -0,0 +1,59 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package externalparameters + +import ( + "fmt" + + "github.com/tektoncd/chains/pkg/chains/objects" +) + +func buildConfigSource(provenance objects.GenericProvenance) map[string]string { + ref := "" + for alg, hex := range provenance.GetRefSourceDigest() { + ref = fmt.Sprintf("%s:%s", alg, hex) + break + } + buildConfigSource := map[string]string{ + "ref": ref, + "repository": provenance.GetRefSourceURI(), + "path": provenance.GetRefSourceEntrypoint(), + } + return buildConfigSource +} + +// PipelineRun adds the pipeline run spec and provenance if available +func PipelineRun(pro *objects.PipelineRunObjectV1) map[string]any { + externalParams := make(map[string]any) + + if provenance := pro.GetRemoteProvenance(); provenance != nil { + externalParams["buildConfigSource"] = buildConfigSource(provenance) + } + externalParams["runSpec"] = pro.Spec + return externalParams +} + +// TaskRun adds the task run spec and provenance if available +func TaskRun(tro *objects.TaskRunObjectV1) map[string]any { + externalParams := make(map[string]any) + + if provenance := tro.GetRemoteProvenance(); provenance != nil { + externalParams["buildConfigSource"] = buildConfigSource(provenance) + } + externalParams["runSpec"] = tro.Spec + return externalParams +} diff --git a/pkg/chains/formats/slsa/v2alpha3/internal/external_parameters/external_parameters_test.go b/pkg/chains/formats/slsa/v2alpha3/internal/external_parameters/external_parameters_test.go new file mode 100644 index 0000000000..c0ec99e9e8 --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/internal/external_parameters/external_parameters_test.go @@ -0,0 +1,136 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package externalparameters + +import ( + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/chains/pkg/internal/objectloader" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" +) + +func TestBuildConfigSource(t *testing.T) { + digest := map[string]string{"alg1": "hex1", "alg2": "hex2"} + provenance := &objects.ProvenanceV1{ + Provenance: &v1.Provenance{ + RefSource: &v1.RefSource{ + Digest: digest, + URI: "https://tekton.com", + EntryPoint: "/path/to/entry", + }, + }, + } + + want := map[string]string{ + "repository": "https://tekton.com", + "path": "/path/to/entry", + } + + got := buildConfigSource(provenance) + + gotRef := strings.Split(got["ref"], ":") + if len(gotRef) != 2 { + t.Errorf("buildConfigSource() does not return the proper ref: want one of: %s got: %s", digest, got["ref"]) + } + refValue, ok := digest[gotRef[0]] + if !ok { + t.Errorf("buildConfigSource() does not contain correct ref: want one of: %s got: %s:%s", digest, gotRef[0], gotRef[1]) + } + + if refValue != gotRef[1] { + t.Errorf("buildConfigSource() does not contain correct ref: want one of: %s got: %s:%s", digest, gotRef[0], gotRef[1]) + } + + if got["repository"] != want["repository"] { + t.Errorf("buildConfigSource() does not contain correct repository: want: %s got: %s", want["repository"], want["repository"]) + } + + if got["path"] != want["path"] { + t.Errorf("buildConfigSource() does not contain correct path: want: %s got: %s", want["path"], got["path"]) + } +} + +func createPro(path string) *objects.PipelineRunObjectV1 { + pr, err := objectloader.PipelineRunFromFile(path) + if err != nil { + panic(err) + } + tr1, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun1.json") + if err != nil { + panic(err) + } + tr2, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun2.json") + if err != nil { + panic(err) + } + p := objects.NewPipelineRunObjectV1(pr) + p.AppendTaskRun(tr1) + p.AppendTaskRun(tr2) + return p +} + +func TestPipelineRun(t *testing.T) { + pro := createPro("../../../testdata/v2alpha3/pipelinerun1.json") + + got := PipelineRun(pro) + + want := map[string]any{ + "runSpec": v1.PipelineRunSpec{ + PipelineRef: &v1.PipelineRef{Name: "test-pipeline"}, + Params: v1.Params{ + { + Name: "IMAGE", + Value: v1.ParamValue{Type: "string", StringVal: "test.io/test/image"}, + }, + }, + TaskRunTemplate: v1.PipelineTaskRunTemplate{ + ServiceAccountName: "pipeline", + }, + }, + } + + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("PipelineRun(): -want +got: %s", diff) + } +} + +func TestTaskRun(t *testing.T) { + tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun1.json") + if err != nil { + t.Fatal(err) + } + got := TaskRun(objects.NewTaskRunObjectV1(tr)) + + want := map[string]any{ + "runSpec": v1.TaskRunSpec{ + Params: v1.Params{ + {Name: "IMAGE", Value: v1.ParamValue{Type: "string", StringVal: "test.io/test/image"}}, + {Name: "CHAINS-GIT_COMMIT", Value: v1.ParamValue{Type: "string", StringVal: "taskrun"}}, + {Name: "CHAINS-GIT_URL", Value: v1.ParamValue{Type: "string", StringVal: "https://git.test.com"}}, + }, + ServiceAccountName: "default", + TaskRef: &v1.TaskRef{Name: "build", Kind: "Task"}, + }, + } + + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("TaskRun(): -want +got: %s", diff) + } +} diff --git a/pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters/internal_parameters.go b/pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters/internal_parameters.go new file mode 100644 index 0000000000..cffe9626d5 --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters/internal_parameters.go @@ -0,0 +1,41 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package internalparameters + +import ( + "github.com/tektoncd/chains/pkg/chains/objects" +) + +// SLSAInternalParameters provides the chains config as internalparameters +func SLSAInternalParameters(tko objects.TektonObject) map[string]any { + internalParams := make(map[string]any) + if provenance := tko.GetProvenance(); !provenance.IsNil() && !provenance.FeatureFlagsIsNil() { + internalParams["tekton-pipelines-feature-flags"] = *provenance.GetFeatureFlags() + } + return internalParams +} + +// TektonInternalParameters provides the chains config as well as annotations and labels +func TektonInternalParameters(tko objects.TektonObject) map[string]any { + internalParams := make(map[string]any) + if provenance := tko.GetProvenance(); !provenance.IsNil() && !provenance.FeatureFlagsIsNil() { + internalParams["tekton-pipelines-feature-flags"] = *provenance.GetFeatureFlags() + } + internalParams["labels"] = tko.GetLabels() + internalParams["annotations"] = tko.GetAnnotations() + return internalParams +} diff --git a/pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters/internal_parameters_test.go b/pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters/internal_parameters_test.go new file mode 100644 index 0000000000..a135039204 --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters/internal_parameters_test.go @@ -0,0 +1,60 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package internalparameters + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/chains/pkg/internal/objectloader" + "github.com/tektoncd/pipeline/pkg/apis/config" +) + +func TestTektonInternalParameters(t *testing.T) { + tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun1.json") + if err != nil { + t.Fatal(err) + } + tro := objects.NewTaskRunObjectV1(tr) + got := TektonInternalParameters(tro) + want := map[string]any{ + "labels": tro.GetLabels(), + "annotations": tro.GetAnnotations(), + "tekton-pipelines-feature-flags": config.FeatureFlags{EnableAPIFields: "beta", ResultExtractionMethod: "termination-message"}, + } + + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("TaskRun(): -want +got: %s", diff) + } +} + +func TestSLSAInternalParameters(t *testing.T) { + tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun1.json") + if err != nil { + t.Fatal(err) + } + tro := objects.NewTaskRunObjectV1(tr) + got := SLSAInternalParameters(tro) + want := map[string]any{ + "tekton-pipelines-feature-flags": config.FeatureFlags{EnableAPIFields: "beta", ResultExtractionMethod: "termination-message"}, + } + + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("TaskRun(): -want +got: %s", diff) + } +} diff --git a/pkg/chains/formats/slsa/v2alpha3/internal/pipelinerun/pipelinerun.go b/pkg/chains/formats/slsa/v2alpha3/internal/pipelinerun/pipelinerun.go new file mode 100644 index 0000000000..a0ff91e9bd --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/internal/pipelinerun/pipelinerun.go @@ -0,0 +1,137 @@ +/* +Copyright 2023 The Tekton Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package pipelinerun + +import ( + "context" + "encoding/json" + "fmt" + + intoto "github.com/in-toto/in-toto-golang/in_toto" + slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/extract" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" + buildtypes "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/build_types" + externalparameters "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/external_parameters" + internalparameters "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters" + resolveddependencies "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies" + "github.com/tektoncd/chains/pkg/chains/objects" +) + +const ( + pipelineRunResults = "pipelineRunResults/%s" + // JsonMediaType is the media type of json encoded content used in resource descriptors + JsonMediaType = "application/json" +) + +// GenerateAttestation generates a provenance statement with SLSA v1.0 predicate for a pipeline run. +func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObjectV1, slsaconfig *slsaconfig.SlsaConfig) (interface{}, error) { + bp, err := byproducts(pro) + if err != nil { + return nil, err + } + + bd, err := getBuildDefinition(ctx, slsaconfig, pro) + if err != nil { + return nil, err + } + + att := intoto.ProvenanceStatementSLSA1{ + StatementHeader: intoto.StatementHeader{ + Type: intoto.StatementInTotoV01, + PredicateType: slsa.PredicateSLSAProvenance, + Subject: extract.SubjectDigests(ctx, pro, slsaconfig), + }, + Predicate: slsa.ProvenancePredicate{ + BuildDefinition: bd, + RunDetails: slsa.ProvenanceRunDetails{ + Builder: slsa.Builder{ + ID: slsaconfig.BuilderID, + }, + BuildMetadata: metadata(pro), + Byproducts: bp, + }, + }, + } + return att, nil +} + +func metadata(pro *objects.PipelineRunObjectV1) slsa.BuildMetadata { + m := slsa.BuildMetadata{ + InvocationID: string(pro.ObjectMeta.UID), + } + if pro.Status.StartTime != nil { + utc := pro.Status.StartTime.Time.UTC() + m.StartedOn = &utc + } + if pro.Status.CompletionTime != nil { + utc := pro.Status.CompletionTime.Time.UTC() + m.FinishedOn = &utc + } + return m +} + +// byproducts contains the pipelineRunResults +func byproducts(pro *objects.PipelineRunObjectV1) ([]slsa.ResourceDescriptor, error) { + byProd := []slsa.ResourceDescriptor{} + for _, key := range pro.Status.Results { + content, err := json.Marshal(key.Value) + if err != nil { + return nil, err + } + bp := slsa.ResourceDescriptor{ + Name: fmt.Sprintf(pipelineRunResults, key.Name), + Content: content, + MediaType: JsonMediaType, + } + byProd = append(byProd, bp) + } + return byProd, nil +} + +// getBuildDefinition get the buildDefinition based on the configured buildType. This will default to the slsa buildType +func getBuildDefinition(ctx context.Context, slsaconfig *slsaconfig.SlsaConfig, pro *objects.PipelineRunObjectV1) (slsa.ProvenanceBuildDefinition, error) { + // if buildType is not set in the chains-config, default to slsa build type + buildDefinitionType := slsaconfig.BuildType + if slsaconfig.BuildType == "" { + buildDefinitionType = buildtypes.SlsaBuildType + } + + switch buildDefinitionType { + case buildtypes.SlsaBuildType: + rd, err := resolveddependencies.PipelineRun(ctx, pro, slsaconfig, resolveddependencies.AddSLSATaskDescriptor) + if err != nil { + return slsa.ProvenanceBuildDefinition{}, err + } + return slsa.ProvenanceBuildDefinition{ + BuildType: buildDefinitionType, + ExternalParameters: externalparameters.PipelineRun(pro), + InternalParameters: internalparameters.SLSAInternalParameters(pro), + ResolvedDependencies: rd, + }, nil + case buildtypes.TektonBuildType: + rd, err := resolveddependencies.PipelineRun(ctx, pro, slsaconfig, resolveddependencies.AddTektonTaskDescriptor) + if err != nil { + return slsa.ProvenanceBuildDefinition{}, err + } + return slsa.ProvenanceBuildDefinition{ + BuildType: buildDefinitionType, + ExternalParameters: externalparameters.PipelineRun(pro), + InternalParameters: internalparameters.TektonInternalParameters(pro), + ResolvedDependencies: rd, + }, nil + default: + return slsa.ProvenanceBuildDefinition{}, fmt.Errorf("unsupported buildType %v", buildDefinitionType) + } +} diff --git a/pkg/chains/formats/slsa/v2alpha3/internal/pipelinerun/pipelinerun_test.go b/pkg/chains/formats/slsa/v2alpha3/internal/pipelinerun/pipelinerun_test.go new file mode 100644 index 0000000000..43d1b56db2 --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/internal/pipelinerun/pipelinerun_test.go @@ -0,0 +1,360 @@ +/* +Copyright 2021 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package pipelinerun + +import ( + "context" + "encoding/json" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/in-toto/in-toto-golang/in_toto" + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" + v1resourcedescriptor "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/compare" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" + externalparameters "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/external_parameters" + internalparameters "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters" + resolveddependencies "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies" + "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/chains/pkg/internal/objectloader" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + logtesting "knative.dev/pkg/logging/testing" +) + +func TestMetadata(t *testing.T) { + pr := &v1.PipelineRun{ //nolint:staticcheck + ObjectMeta: metav1.ObjectMeta{ + Name: "my-taskrun", + Namespace: "my-namespace", + Annotations: map[string]string{ + "chains.tekton.dev/reproducible": "true", + }, + UID: "abhhf-12354-asjsdbjs23-3435353n", + }, + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + }, + }, + } + start := time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC) + end := time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC) + want := slsa.BuildMetadata{ + InvocationID: "abhhf-12354-asjsdbjs23-3435353n", + StartedOn: &start, + FinishedOn: &end, + } + got := metadata(objects.NewPipelineRunObjectV1(pr)) + if d := cmp.Diff(want, got); d != "" { + t.Fatalf("metadata (-want, +got):\n%s", d) + } +} + +func TestMetadataInTimeZone(t *testing.T) { + tz := time.FixedZone("Test Time", int((12 * time.Hour).Seconds())) + pr := &v1.PipelineRun{ //nolint:staticcheck + ObjectMeta: metav1.ObjectMeta{ + Name: "my-taskrun", + Namespace: "my-namespace", + Annotations: map[string]string{ + "chains.tekton.dev/reproducible": "true", + }, + UID: "abhhf-12354-asjsdbjs23-3435353n", + }, + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, + }, + }, + } + start := time.Date(1995, time.December, 24, 6, 12, 12, 12, tz).UTC() + end := time.Date(1995, time.December, 24, 6, 12, 12, 24, tz).UTC() + want := slsa.BuildMetadata{ + InvocationID: "abhhf-12354-asjsdbjs23-3435353n", + StartedOn: &start, + FinishedOn: &end, + } + got := metadata(objects.NewPipelineRunObjectV1(pr)) + if d := cmp.Diff(want, got); d != "" { + t.Fatalf("metadata (-want, +got):\n%s", d) + } +} + +func TestByProducts(t *testing.T) { + resultValue := v1.ResultValue{Type: "string", StringVal: "result-value"} + pr := &v1.PipelineRun{ //nolint:staticcheck + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Results: []v1.PipelineRunResult{ + { + Name: "result-name", + Value: resultValue, + }, + }, + }, + }, + } + + resultBytes, err := json.Marshal(resultValue) + if err != nil { + t.Fatalf("Could not marshal results: %s", err) + } + want := []slsa.ResourceDescriptor{ + { + Name: "pipelineRunResults/result-name", + Content: resultBytes, + MediaType: JsonMediaType, + }, + } + got, err := byproducts(objects.NewPipelineRunObjectV1(pr)) + if err != nil { + t.Fatalf("Could not extract byproducts: %s", err) + } + if d := cmp.Diff(want, got); d != "" { + t.Fatalf("byproducts (-want, +got):\n%s", d) + } +} + +func createPro(path string) *objects.PipelineRunObjectV1 { + pr, err := objectloader.PipelineRunFromFile(path) + if err != nil { + panic(err) + } + tr1, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun1.json") + if err != nil { + panic(err) + } + tr2, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun2.json") + if err != nil { + panic(err) + } + p := objects.NewPipelineRunObjectV1(pr) + p.AppendTaskRun(tr1) + p.AppendTaskRun(tr2) + return p +} + +func TestGenerateAttestation(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + pr := createPro("../../../testdata/v2alpha3/pipelinerun1.json") + + e1BuildStart := time.Unix(1617011400, 0) + e1BuildFinished := time.Unix(1617011415, 0) + + want := in_toto.ProvenanceStatementSLSA1{ + StatementHeader: in_toto.StatementHeader{ + Type: in_toto.StatementInTotoV01, + PredicateType: slsa.PredicateSLSAProvenance, + Subject: []in_toto.Subject{ + { + Name: "test.io/test/image", + Digest: common.DigestSet{ + "sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7", + }, + }, + }, + }, + Predicate: slsa.ProvenancePredicate{ + BuildDefinition: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa", + ExternalParameters: map[string]any{ + "runSpec": pr.Spec, + }, + InternalParameters: map[string]any{}, + ResolvedDependencies: []slsa.ResourceDescriptor{ + { + URI: "git+https://github.com/test", + Digest: common.DigestSet{"sha1": "28b123"}, + Name: "pipeline", + }, + { + URI: "git+https://github.com/catalog", + Digest: common.DigestSet{"sha1": "x123"}, + Name: "pipelineTask", + }, + { + URI: "oci://gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + { + URI: "git+https://github.com/test", + Digest: common.DigestSet{"sha1": "ab123"}, + Name: "pipelineTask", + }, + { + URI: "oci://gcr.io/test2/test2", + Digest: common.DigestSet{"sha256": "4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac"}, + }, + { + URI: "oci://gcr.io/test3/test3", + Digest: common.DigestSet{"sha256": "f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478"}, + }, + { + URI: "abc", + Digest: common.DigestSet{"sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"}, + Name: "inputs/result", + }, + {Name: "inputs/result", URI: "git+https://git.test.com.git", Digest: common.DigestSet{"sha1": "abcd"}}, + }, + }, + RunDetails: slsa.ProvenanceRunDetails{ + Builder: slsa.Builder{ + ID: "test_builder-1", + }, + BuildMetadata: slsa.BuildMetadata{ + InvocationID: "abhhf-12354-asjsdbjs23-3435353n", + StartedOn: &e1BuildStart, + FinishedOn: &e1BuildFinished, + }, + Byproducts: []slsa.ResourceDescriptor{ + { + Name: "pipelineRunResults/CHAINS-GIT_COMMIT", + Content: []uint8(`"abcd"`), + MediaType: JsonMediaType, + }, { + Name: "pipelineRunResults/CHAINS-GIT_URL", + Content: []uint8(`"https://git.test.com"`), + MediaType: JsonMediaType, + }, { + Name: "pipelineRunResults/IMAGE_URL", + Content: []uint8(`"test.io/test/image"`), + MediaType: JsonMediaType, + }, { + Name: "pipelineRunResults/IMAGE_DIGEST", + Content: []uint8(`"sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"`), + MediaType: JsonMediaType, + }, { + Name: "pipelineRunResults/img-ARTIFACT_INPUTS", + Content: []uint8(`{"digest":"sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7","uri":"abc"}`), + MediaType: JsonMediaType, + }, { + Name: "pipelineRunResults/img2-ARTIFACT_OUTPUTS", + Content: []uint8(`{"digest":"sha256:","uri":"def"}`), + MediaType: JsonMediaType, + }, { + Name: "pipelineRunResults/img_no_uri-ARTIFACT_OUTPUTS", + Content: []uint8(`{"digest":"sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"}`), + MediaType: JsonMediaType, + }, + }, + }, + }, + } + + got, err := GenerateAttestation(ctx, pr, &slsaconfig.SlsaConfig{ + BuilderID: "test_builder-1", + DeepInspectionEnabled: false, + BuildType: "https://tekton.dev/chains/v2/slsa", + }) + + if err != nil { + t.Errorf("unwant error: %s", err.Error()) + } + if diff := cmp.Diff(want, got, compare.SLSAV1CompareOptions()...); diff != "" { + t.Errorf("GenerateAttestation(): -want +got: %s", diff) + } +} + +func getResolvedDependencies(addTasks func(*objects.TaskRunObjectV1) (*v1resourcedescriptor.ResourceDescriptor, error)) []v1resourcedescriptor.ResourceDescriptor { //nolint:staticcheck + pr := createPro("../../../testdata/v2alpha3/pipelinerun1.json") + rd, err := resolveddependencies.PipelineRun(context.Background(), pr, &slsaconfig.SlsaConfig{DeepInspectionEnabled: false}, addTasks) + if err != nil { + return []v1resourcedescriptor.ResourceDescriptor{} + } + return rd +} + +func TestGetBuildDefinition(t *testing.T) { + pr := createPro("../../../testdata/v2alpha3/pipelinerun1.json") + pr.Annotations = map[string]string{ + "annotation1": "annotation1", + } + pr.Labels = map[string]string{ + "label1": "label1", + } + tests := []struct { + name string + taskContent func(*objects.TaskRunObjectV1) (*v1resourcedescriptor.ResourceDescriptor, error) //nolint:staticcheck + config *slsaconfig.SlsaConfig + want slsa.ProvenanceBuildDefinition + }{ + { + name: "test slsa build type", + taskContent: resolveddependencies.AddSLSATaskDescriptor, + config: &slsaconfig.SlsaConfig{BuildType: "https://tekton.dev/chains/v2/slsa"}, + want: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa", + ExternalParameters: externalparameters.PipelineRun(pr), + InternalParameters: internalparameters.SLSAInternalParameters(pr), + ResolvedDependencies: getResolvedDependencies(resolveddependencies.AddSLSATaskDescriptor), + }, + }, + { + name: "test tekton build type", + config: &slsaconfig.SlsaConfig{BuildType: "https://tekton.dev/chains/v2/slsa-tekton"}, + taskContent: resolveddependencies.AddSLSATaskDescriptor, + want: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa-tekton", + ExternalParameters: externalparameters.PipelineRun(pr), + InternalParameters: internalparameters.TektonInternalParameters(pr), + ResolvedDependencies: getResolvedDependencies(resolveddependencies.AddTektonTaskDescriptor), + }, + }, + { + name: "test default build type", + config: &slsaconfig.SlsaConfig{BuildType: "https://tekton.dev/chains/v2/slsa"}, + taskContent: resolveddependencies.AddSLSATaskDescriptor, + want: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa", + ExternalParameters: externalparameters.PipelineRun(pr), + InternalParameters: internalparameters.SLSAInternalParameters(pr), + ResolvedDependencies: getResolvedDependencies(resolveddependencies.AddSLSATaskDescriptor), + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + bd, err := getBuildDefinition(context.Background(), tc.config, pr) + if err != nil { + t.Fatalf("Did not expect an error but got %v", err) + } + + if diff := cmp.Diff(tc.want, bd); diff != "" { + t.Errorf("getBuildDefinition(): -want +got: %v", diff) + } + }) + } +} + +func TestUnsupportedBuildType(t *testing.T) { + pr := createPro("../../../testdata/v2alpha3/pipelinerun1.json") + + got, err := getBuildDefinition(context.Background(), &slsaconfig.SlsaConfig{BuildType: "bad-buildtype"}, pr) + if err == nil { + t.Error("getBuildDefinition(): expected error got nil") + } + if diff := cmp.Diff(slsa.ProvenanceBuildDefinition{}, got); diff != "" { + t.Errorf("getBuildDefinition(): -want +got: %s", diff) + } +} diff --git a/pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies/resolved_dependencies.go b/pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies/resolved_dependencies.go new file mode 100644 index 0000000000..ced8a4fa53 --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies/resolved_dependencies.go @@ -0,0 +1,295 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package resolveddependencies + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" + "github.com/tektoncd/chains/pkg/chains/objects" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + "go.uber.org/zap" + "knative.dev/pkg/logging" +) + +const ( + // pipelineConfigName is the name of the resolved dependency of the pipelineRef. + pipelineConfigName = "pipeline" + // taskConfigName is the name of the resolved dependency of the top level taskRef. + taskConfigName = "task" + // pipelineTaskConfigName is the name of the resolved dependency of the pipeline task. + pipelineTaskConfigName = "pipelineTask" + // inputResultName is the name of the resolved dependency generated from Type hinted parameters or results. + inputResultName = "inputs/result" + // pipelineResourceName is the name of the resolved dependency of pipeline resource. + pipelineResourceName = "pipelineResource" +) + +// used to toggle the fields in resolvedDependencies. see AddTektonTaskDescriptor +// and AddSLSATaskDescriptor +type addTaskDescriptorContent func(*objects.TaskRunObjectV1) (*slsa.ResourceDescriptor, error) //nolint:staticcheck + +// the more verbose resolved dependency content. this adds the name, uri, digest +// and content if possible. +func AddTektonTaskDescriptor(tr *objects.TaskRunObjectV1) (*slsa.ResourceDescriptor, error) { //nolint:staticcheck + rd := slsa.ResourceDescriptor{} + storedTr, err := json.Marshal(tr) + if err != nil { + return nil, err + } + + rd.Name = pipelineTaskConfigName + rd.Content = storedTr + if tr.Status.Provenance != nil && tr.Status.Provenance.RefSource != nil { + rd.URI = tr.Status.Provenance.RefSource.URI + rd.Digest = tr.Status.Provenance.RefSource.Digest + } + + return &rd, nil +} + +// resolved dependency content for the more generic slsa verifiers. just logs +// the name, uri and digest. +func AddSLSATaskDescriptor(tr *objects.TaskRunObjectV1) (*slsa.ResourceDescriptor, error) { //nolint:staticcheck + if tr.Status.Provenance != nil && tr.Status.Provenance.RefSource != nil { + return &slsa.ResourceDescriptor{ + Name: pipelineTaskConfigName, + URI: tr.Status.Provenance.RefSource.URI, + Digest: tr.Status.Provenance.RefSource.Digest, + }, nil + } + return nil, nil +} + +// convertMaterialToResolvedDependency converts a SLSAv0.2 Material to a resolved dependency +func convertMaterialsToResolvedDependencies(mats []common.ProvenanceMaterial, name string) []slsa.ResourceDescriptor { + rds := []slsa.ResourceDescriptor{} + for _, mat := range mats { + rd := slsa.ResourceDescriptor{} + rd.URI = mat.URI + rd.Digest = mat.Digest + if len(name) > 0 { + rd.Name = name + } + rds = append(rds, rd) + } + return rds +} + +// removeDuplicateResolvedDependencies removes duplicate resolved dependencies from the slice of resolved dependencies. +// Original order of resolved dependencies is retained. +func removeDuplicateResolvedDependencies(resolvedDependencies []slsa.ResourceDescriptor) ([]slsa.ResourceDescriptor, error) { + out := make([]slsa.ResourceDescriptor, 0, len(resolvedDependencies)) + + // make map to store seen resolved dependencies + seen := map[string]bool{} + for _, resolvedDependency := range resolvedDependencies { + // Since resolvedDependencies contain names, we want to ignore those while checking for duplicates. + // Therefore, make a copy of the resolved dependency that only contains the uri and digest fields. + rDep := slsa.ResourceDescriptor{} + rDep.URI = resolvedDependency.URI + rDep.Digest = resolvedDependency.Digest + // pipelinTasks store content with the slsa-tekton buildType + rDep.Content = resolvedDependency.Content + // This allows us to ignore dependencies that have the same uri and digest. + rd, err := json.Marshal(rDep) + if err != nil { + return nil, err + } + if seen[string(rd)] { + // We dont want to remove the top level pipeline/task config from the resolved dependencies + // because its critical to provide that information in the provenance. In SLSAv0.2 spec, + // we would put this in invocation.ConfigSource. In order to ensure that it is present in + // the resolved dependencies, we dont want to skip it if another resolved dependency from the same + // uri+digest pair was already included before. + if !(resolvedDependency.Name == taskConfigName || resolvedDependency.Name == pipelineConfigName) { + continue + } + } + seen[string(rd)] = true + out = append(out, resolvedDependency) + } + return out, nil +} + +// fromPipelineTask adds the resolved dependencies from pipeline tasks +// such as pipeline task uri/digest for remote pipeline tasks and step and sidecar images. +func fromPipelineTask(logger *zap.SugaredLogger, pro *objects.PipelineRunObjectV1, addTasks addTaskDescriptorContent) ([]slsa.ResourceDescriptor, error) { + pSpec := pro.Status.PipelineSpec + resolvedDependencies := []slsa.ResourceDescriptor{} + if pSpec != nil { + pipelineTasks := append(pSpec.Tasks, pSpec.Finally...) + for _, t := range pipelineTasks { + tr := pro.GetTaskRunFromTask(t.Name) + // Ignore Tasks that did not execute during the PipelineRun. + if tr == nil || tr.Status.CompletionTime == nil { + logger.Infof("taskrun status not found for task %s", t.Name) + continue + } + rd, err := addTasks(tr) + if err != nil { + logger.Errorf("error storing taskRun %s, error: %s", t.Name, err) + continue + } + + if rd != nil { + resolvedDependencies = append(resolvedDependencies, *rd) + } + + mats := []common.ProvenanceMaterial{} + + // add step images + stepMaterials, err := material.FromStepImages(tr) + if err != nil { + return nil, err + } + mats = append(mats, stepMaterials...) + + // add sidecar images + sidecarMaterials, err := material.FromSidecarImages(tr) + if err != nil { + return nil, err + } + mats = append(mats, sidecarMaterials...) + + // convert materials to resolved dependencies + resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, "")...) + } + } + return resolvedDependencies, nil +} + +// taskDependencies gather all dependencies in a task and adds them to resolvedDependencies +func taskDependencies(ctx context.Context, tro *objects.TaskRunObjectV1) ([]slsa.ResourceDescriptor, error) { + var resolvedDependencies []slsa.ResourceDescriptor + var err error + mats := []common.ProvenanceMaterial{} + + // add step and sidecar images + stepMaterials, err := material.FromStepImages(tro) + mats = append(mats, stepMaterials...) + if err != nil { + return nil, err + } + sidecarMaterials, err := material.FromSidecarImages(tro) + if err != nil { + return nil, err + } + mats = append(mats, sidecarMaterials...) + resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, "")...) + + mats = material.FromTaskParamsAndResults(ctx, tro) + // convert materials to resolved dependencies + resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, inputResultName)...) + + // add task resources + // ===== + // convert to v1beta1 and add any task resources + serializedResources := tro.Annotations["tekton.dev/v1beta1-spec-resources"] + var resources v1beta1.TaskRunResources //nolint:staticcheck + shouldReplace := false + if err := json.Unmarshal([]byte(serializedResources), &resources); err == nil { + shouldReplace = true + + } + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + fmt.Printf("%v", tro.GetObject().(*v1.TaskRun)) + if err := trV1Beta1.ConvertFrom(ctx, tro.GetObject().(*v1.TaskRun)); err == nil { + if shouldReplace { + trV1Beta1.Spec.Resources = &resources //nolint:staticcheck + } + mats = material.FromTaskResources(ctx, trV1Beta1) + + } + + // convert materials to resolved dependencies + resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, pipelineResourceName)...) + + // remove duplicate resolved dependencies + resolvedDependencies, err = removeDuplicateResolvedDependencies(resolvedDependencies) + if err != nil { + return nil, err + } + + return resolvedDependencies, nil +} + +// TaskRun constructs `predicate.resolvedDependencies` section by collecting all the artifacts that influence a taskrun such as source code repo and step&sidecar base images. +func TaskRun(ctx context.Context, tro *objects.TaskRunObjectV1) ([]slsa.ResourceDescriptor, error) { + var resolvedDependencies []slsa.ResourceDescriptor + var err error + + // add top level task config + if p := tro.Status.Provenance; p != nil && p.RefSource != nil { + rd := slsa.ResourceDescriptor{ + Name: taskConfigName, + URI: p.RefSource.URI, + Digest: p.RefSource.Digest, + } + resolvedDependencies = append(resolvedDependencies, rd) + } + + rds, err := taskDependencies(ctx, tro) + if err != nil { + return nil, err + } + resolvedDependencies = append(resolvedDependencies, rds...) + + return resolvedDependencies, nil +} + +// PipelineRun constructs `predicate.resolvedDependencies` section by collecting all the artifacts that influence a pipeline run such as source code repo and step&sidecar base images. +func PipelineRun(ctx context.Context, pro *objects.PipelineRunObjectV1, slsaconfig *slsaconfig.SlsaConfig, addTasks addTaskDescriptorContent) ([]slsa.ResourceDescriptor, error) { + var err error + var resolvedDependencies []slsa.ResourceDescriptor + logger := logging.FromContext(ctx) + + // add pipeline config to resolved dependencies + if p := pro.Status.Provenance; p != nil && p.RefSource != nil { + rd := slsa.ResourceDescriptor{ + Name: pipelineConfigName, + URI: p.RefSource.URI, + Digest: p.RefSource.Digest, + } + resolvedDependencies = append(resolvedDependencies, rd) + } + + // add resolved dependencies from pipeline tasks + rds, err := fromPipelineTask(logger, pro, addTasks) + if err != nil { + return nil, err + } + resolvedDependencies = append(resolvedDependencies, rds...) + + // add resolved dependencies from pipeline results + mats := material.FromPipelineParamsAndResults(ctx, pro, slsaconfig) + // convert materials to resolved dependencies + resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, inputResultName)...) + + // remove duplicate resolved dependencies + resolvedDependencies, err = removeDuplicateResolvedDependencies(resolvedDependencies) + if err != nil { + return nil, err + } + return resolvedDependencies, nil +} diff --git a/pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies/resolved_dependencies_test.go b/pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies/resolved_dependencies_test.go new file mode 100644 index 0000000000..7042a12819 --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies/resolved_dependencies_test.go @@ -0,0 +1,647 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package resolveddependencies + +import ( + "encoding/json" + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + v1slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" + "github.com/tektoncd/chains/internal/backport" + "github.com/tektoncd/chains/pkg/artifacts" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/compare" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" + "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/chains/pkg/internal/objectloader" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + "github.com/tektoncd/pipeline/pkg/apis/resource/v1alpha1" + logtesting "knative.dev/pkg/logging/testing" +) + +const digest = "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7" + +var pro *objects.PipelineRunObjectV1 +var proStructuredResults *objects.PipelineRunObjectV1 + +func init() { + pro = createPro("../../../testdata/v2alpha3/pipelinerun1.json") + proStructuredResults = createPro("../../../testdata/v2alpha3/pipelinerun_structured_results.json") +} + +func createPro(path string) *objects.PipelineRunObjectV1 { + var err error + pr, err := objectloader.PipelineRunFromFile(path) + if err != nil { + panic(err) + } + tr1, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun1.json") + if err != nil { + panic(err) + } + tr2, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun2.json") + if err != nil { + panic(err) + } + p := objects.NewPipelineRunObjectV1(pr) + p.AppendTaskRun(tr1) + p.AppendTaskRun(tr2) + return p +} + +func tektonTaskRuns() map[string][]byte { + trs := make(map[string][]byte) + tr1, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun1.json") + if err != nil { + panic(err) + } + tr2, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun2.json") + if err != nil { + panic(err) + } + + tr1Desc, err := json.Marshal(tr1) + if err != nil { + panic(err) + } + trs[tr1.Name] = tr1Desc + + tr2Desc, err := json.Marshal(tr2) + if err != nil { + panic(err) + } + trs[tr2.Name] = tr2Desc + + return trs +} + +func TestRemoveDuplicates(t *testing.T) { + tests := []struct { + name string + rds []v1slsa.ResourceDescriptor + want []v1slsa.ResourceDescriptor + }{{ + name: "no duplicate resolvedDependencies", + rds: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }, + }, + }, + want: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }, + }, + }, + }, { + name: "same uri and digest", + rds: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + want: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + }, { + name: "same uri but different digest", + rds: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, + }, + want: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, + }, + }, { + name: "same uri but different digest, swap order", + rds: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + want: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + }, { + name: "task config must be present", + rds: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + Name: "task", + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + want: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + Name: "task", + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + }, { + name: "pipeline config must be present", + rds: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + Name: "pipeline", + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + want: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01248", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + Name: "pipeline", + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + }, + }} + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + rds, err := removeDuplicateResolvedDependencies(tc.rds) + if err != nil { + t.Fatalf("Did not expect an error but got %v", err) + } + if diff := cmp.Diff(tc.want, rds); diff != "" { + t.Errorf("resolvedDependencies(): -want +got: %s", diff) + } + }) + } +} + +func TestTaskRun(t *testing.T) { + tests := []struct { + name string + obj objects.TektonObject //nolint:staticcheck + want []v1slsa.ResourceDescriptor + }{ + { + name: "resolvedDependencies from pipeline resources", + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck + Spec: v1beta1.TaskRunSpec{ + Resources: &v1beta1.TaskRunResources{ //nolint:all //incompatible with pipelines v0.45 + Inputs: []v1beta1.TaskResourceBinding{ //nolint:all //incompatible with pipelines v0.45 + { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:all //incompatible with pipelines v0.45 + Name: "nil-resource-spec", + }, + }, { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:all //incompatible with pipelines v0.45 + Name: "repo", + ResourceSpec: &v1alpha1.PipelineResourceSpec{ //nolint:all //incompatible with pipelines v0.45 + Params: []v1alpha1.ResourceParam{ //nolint:all //incompatible with pipelines v0.45 + {Name: "url", Value: "https://github.com/GoogleContainerTools/distroless"}, + }, + Type: backport.PipelineResourceTypeGit, + }, + }, + }, + }, + }, + }, + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + TaskRunResults: []v1beta1.TaskRunResult{ + { + Name: "img1_input" + "-" + artifacts.ArtifactsInputsResultName, + Value: *v1beta1.NewObject(map[string]string{ + "uri": "gcr.io/foo/bar", + "digest": digest, + }), + }, + }, + ResourcesResult: []v1beta1.PipelineResourceResult{ + { + ResourceName: "repo", + Key: "commit", + Value: "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, { + ResourceName: "repo", + Key: "url", + Value: "https://github.com/GoogleContainerTools/distroless", + }, + }, + }, + }, + }), + want: []v1slsa.ResourceDescriptor{ + { + Name: "inputs/result", + URI: "gcr.io/foo/bar", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest, "sha256:"), + }, + }, + { + Name: "pipelineResource", + URI: "git+https://github.com/GoogleContainerTools/distroless.git", + Digest: common.DigestSet{ + "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, + }, + }, + }, + { + name: "resolvedDependencies from remote task", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ //nolint:staticcheck + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Provenance: &v1.Provenance{ + RefSource: &v1.RefSource{ + URI: "git+github.com/something.git", + Digest: map[string]string{ + "sha1": "abcd1234", + }, + }, + }, + }, + }, + }), + want: []v1slsa.ResourceDescriptor{ + { + Name: "task", + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "abcd1234", + }, + }, + }, + }, + { + name: "git resolvedDependencies from taskrun params", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ //nolint:staticcheck + Spec: v1.TaskRunSpec{ + Params: []v1.Param{{ + Name: "CHAINS-GIT_COMMIT", + Value: *v1.NewStructuredValues("my-commit"), + }, { + Name: "CHAINS-GIT_URL", + Value: *v1.NewStructuredValues("github.com/something"), + }}, + }, + }), + want: []v1slsa.ResourceDescriptor{ + { + Name: "inputs/result", + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }, + }, + }, + { + name: "resolvedDependencies from step images", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ //nolint:staticcheck + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Steps: []v1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + }, + }, + }), + want: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + { + URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, + }, + }, + { + name: "resolvedDependencies from step and sidecar images", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ //nolint:staticcheck + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Steps: []v1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + Sidecars: []v1.SidecarState{{ + Name: "sidecar-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init@sha256:a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }}, + }, + }, + }), + want: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }, + }, + }, + }} + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + var input *objects.TaskRunObjectV1 + var err error + if obj, ok := tc.obj.(*objects.TaskRunObjectV1); ok { + input = obj + } + + if trV1Beta1, ok := tc.obj.GetObject().(*v1beta1.TaskRun); ok { //nolint:staticcheck + trV1 := &v1.TaskRun{} + if err := trV1Beta1.ConvertTo(ctx, trV1); err == nil { + if trV1Beta1.Spec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Spec.Resources) //nolint:staticcheck + if err != nil { + t.Errorf("Error serializing to JSON: %v", err) + } + trV1.Annotations["tekton.dev/v1beta1-spec-resources"] = string(jsonData) + } + input = objects.NewTaskRunObjectV1(trV1) + } + } + + rd, err := TaskRun(ctx, input) + if err != nil { + t.Fatalf("Did not expect an error but got %v", err) + } + if diff := cmp.Diff(tc.want, rd); diff != "" { + t.Errorf("ResolvedDependencies(): -want +got: %s", diff) + } + }) + } +} + +func TestPipelineRun(t *testing.T) { + taskRuns := tektonTaskRuns() + tests := []struct { + name string + taskDescriptor addTaskDescriptorContent + want []v1slsa.ResourceDescriptor + }{ + { + name: "test slsa build type", + taskDescriptor: AddSLSATaskDescriptor, + want: []v1slsa.ResourceDescriptor{ + {Name: "pipeline", URI: "git+https://github.com/test", Digest: common.DigestSet{"sha1": "28b123"}}, + {Name: "pipelineTask", URI: "git+https://github.com/catalog", Digest: common.DigestSet{"sha1": "x123"}}, + { + URI: "oci://gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + {Name: "pipelineTask", URI: "git+https://github.com/test", Digest: common.DigestSet{"sha1": "ab123"}}, + { + URI: "oci://gcr.io/test2/test2", + Digest: common.DigestSet{"sha256": "4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac"}, + }, + { + URI: "oci://gcr.io/test3/test3", + Digest: common.DigestSet{"sha256": "f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478"}, + }, + {Name: "inputs/result", URI: "abc", Digest: common.DigestSet{"sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"}}, + {Name: "inputs/result", URI: "git+https://git.test.com.git", Digest: common.DigestSet{"sha1": "abcd"}}, + }, + }, + { + name: "test tekton build type", + taskDescriptor: AddTektonTaskDescriptor, + want: []v1slsa.ResourceDescriptor{ + {Name: "pipeline", URI: "git+https://github.com/test", Digest: common.DigestSet{"sha1": "28b123"}}, + {Name: "pipelineTask", URI: "git+https://github.com/catalog", Digest: common.DigestSet{"sha1": "x123"}, Content: taskRuns["git-clone"]}, + { + URI: "oci://gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + {Name: "pipelineTask", URI: "git+https://github.com/test", Digest: common.DigestSet{"sha1": "ab123"}, Content: taskRuns["taskrun-build"]}, + { + URI: "oci://gcr.io/test2/test2", + Digest: common.DigestSet{"sha256": "4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac"}, + }, + { + URI: "oci://gcr.io/test3/test3", + Digest: common.DigestSet{"sha256": "f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478"}, + }, + {Name: "inputs/result", URI: "abc", Digest: common.DigestSet{"sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"}}, + {Name: "inputs/result", URI: "git+https://git.test.com.git", Digest: common.DigestSet{"sha1": "abcd"}}, + }, + }, + } + + ctx := logtesting.TestContextWithLogger(t) + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, err := PipelineRun(ctx, pro, &slsaconfig.SlsaConfig{DeepInspectionEnabled: false}, tc.taskDescriptor) + if err != nil { + t.Error(err) + } + if d := cmp.Diff(tc.want, got); d != "" { + t.Errorf("PipelineRunResolvedDependencies(): -want +got: %s", got) + } + }) + } +} + +func TestPipelineRunStructuredResult(t *testing.T) { + want := []v1slsa.ResourceDescriptor{ + {Name: "pipeline", URI: "git+https://github.com/test", Digest: common.DigestSet{"sha1": "28b123"}}, + {Name: "pipelineTask", URI: "git+https://github.com/catalog", Digest: common.DigestSet{"sha1": "x123"}}, + { + URI: "oci://gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + {Name: "pipelineTask", URI: "git+https://github.com/test", Digest: common.DigestSet{"sha1": "ab123"}}, + { + URI: "oci://gcr.io/test2/test2", + Digest: common.DigestSet{"sha256": "4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac"}, + }, + { + URI: "oci://gcr.io/test3/test3", + Digest: common.DigestSet{"sha256": "f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478"}, + }, + { + Name: "inputs/result", + URI: "abc", + Digest: common.DigestSet{ + "sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7", + }, + }, + { + URI: "git+https://git.test.com.git", + Digest: common.DigestSet{"sha1": "abcd"}, + Name: "inputs/result", + }, + } + ctx := logtesting.TestContextWithLogger(t) + got, err := PipelineRun(ctx, pro, &slsaconfig.SlsaConfig{DeepInspectionEnabled: false}, AddSLSATaskDescriptor) + if err != nil { + t.Errorf("error while extracting resolvedDependencies: %v", err) + } + if diff := cmp.Diff(want, got, compare.SLSAV1CompareOptions()...); diff != "" { + t.Errorf("resolvedDependencies(): -want +got: %s", diff) + } +} diff --git a/pkg/chains/formats/slsa/v2alpha3/internal/taskrun/taskrun.go b/pkg/chains/formats/slsa/v2alpha3/internal/taskrun/taskrun.go new file mode 100644 index 0000000000..53cf079606 --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/internal/taskrun/taskrun.go @@ -0,0 +1,133 @@ +/* +Copyright 2023 The Tekton Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package taskrun + +import ( + "context" + "encoding/json" + "fmt" + + intoto "github.com/in-toto/in-toto-golang/in_toto" + slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/extract" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" + buildtypes "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/build_types" + externalparameters "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/external_parameters" + internalparameters "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters" + resolveddependencies "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies" + "github.com/tektoncd/chains/pkg/chains/objects" +) + +const taskRunResults = "taskRunResults/%s" + +// GenerateAttestation generates a provenance statement with SLSA v1.0 predicate for a task run. +func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObjectV1, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { + bp, err := byproducts(tro) + if err != nil { + return nil, err + } + + bd, err := getBuildDefinition(ctx, slsaConfig.BuildType, tro) + if err != nil { + return nil, err + } + + att := intoto.ProvenanceStatementSLSA1{ + StatementHeader: intoto.StatementHeader{ + Type: intoto.StatementInTotoV01, + PredicateType: slsa.PredicateSLSAProvenance, + Subject: extract.SubjectDigests(ctx, tro, slsaConfig), + }, + Predicate: slsa.ProvenancePredicate{ + BuildDefinition: bd, + RunDetails: slsa.ProvenanceRunDetails{ + Builder: slsa.Builder{ + ID: slsaConfig.BuilderID, + }, + BuildMetadata: metadata(tro), + Byproducts: bp, + }, + }, + } + return att, nil +} + +func metadata(tro *objects.TaskRunObjectV1) slsa.BuildMetadata { + m := slsa.BuildMetadata{ + InvocationID: string(tro.ObjectMeta.UID), + } + if tro.Status.StartTime != nil { + utc := tro.Status.StartTime.Time.UTC() + m.StartedOn = &utc + } + if tro.Status.CompletionTime != nil { + utc := tro.Status.CompletionTime.Time.UTC() + m.FinishedOn = &utc + } + return m +} + +// byproducts contains the taskRunResults +func byproducts(tro *objects.TaskRunObjectV1) ([]slsa.ResourceDescriptor, error) { + byProd := []slsa.ResourceDescriptor{} + for _, key := range tro.Status.Results { + content, err := json.Marshal(key.Value) + if err != nil { + return nil, err + } + bp := slsa.ResourceDescriptor{ + Name: fmt.Sprintf(taskRunResults, key.Name), + Content: content, + MediaType: "application/json", + } + byProd = append(byProd, bp) + } + return byProd, nil +} + +// getBuildDefinition get the buildDefinition based on the configured buildType. This will default to the slsa buildType +func getBuildDefinition(ctx context.Context, buildType string, tro *objects.TaskRunObjectV1) (slsa.ProvenanceBuildDefinition, error) { + // if buildType is not set in the chains-config, default to slsa build type + buildDefinitionType := buildType + if buildType == "" { + buildDefinitionType = buildtypes.SlsaBuildType + } + + switch buildDefinitionType { + case buildtypes.SlsaBuildType: + rd, err := resolveddependencies.TaskRun(ctx, tro) + if err != nil { + return slsa.ProvenanceBuildDefinition{}, err + } + return slsa.ProvenanceBuildDefinition{ + BuildType: buildDefinitionType, + ExternalParameters: externalparameters.TaskRun(tro), + InternalParameters: internalparameters.SLSAInternalParameters(tro), + ResolvedDependencies: rd, + }, nil + case buildtypes.TektonBuildType: + rd, err := resolveddependencies.TaskRun(ctx, tro) + if err != nil { + return slsa.ProvenanceBuildDefinition{}, err + } + return slsa.ProvenanceBuildDefinition{ + BuildType: buildDefinitionType, + ExternalParameters: externalparameters.TaskRun(tro), + InternalParameters: internalparameters.TektonInternalParameters(tro), + ResolvedDependencies: rd, + }, nil + default: + return slsa.ProvenanceBuildDefinition{}, fmt.Errorf("unsupported buildType %v", buildType) + } +} diff --git a/pkg/chains/formats/slsa/v2alpha3/internal/taskrun/taskrun_test.go b/pkg/chains/formats/slsa/v2alpha3/internal/taskrun/taskrun_test.go new file mode 100644 index 0000000000..7262b39691 --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/internal/taskrun/taskrun_test.go @@ -0,0 +1,332 @@ +/* +Copyright 2021 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package taskrun + +import ( + "context" + "encoding/json" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/in-toto/in-toto-golang/in_toto" + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" + + v1resourcedescriptor "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" + externalparameters "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/external_parameters" + internalparameters "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/internal_parameters" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/pipelinerun" + resolveddependencies "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/resolved_dependencies" + "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/chains/pkg/internal/objectloader" + "github.com/tektoncd/pipeline/pkg/apis/config" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + logtesting "knative.dev/pkg/logging/testing" +) + +func TestMetadata(t *testing.T) { + tr := &v1.TaskRun{ //nolint:staticcheck + ObjectMeta: metav1.ObjectMeta{ + Name: "my-taskrun", + Namespace: "my-namespace", + Annotations: map[string]string{ + "chains.tekton.dev/reproducible": "true", + }, + UID: "abhhf-12354-asjsdbjs23-3435353n", + }, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + }, + }, + } + start := time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC) + end := time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC) + want := slsa.BuildMetadata{ + InvocationID: "abhhf-12354-asjsdbjs23-3435353n", + StartedOn: &start, + FinishedOn: &end, + } + got := metadata(objects.NewTaskRunObjectV1(tr)) + if d := cmp.Diff(want, got); d != "" { + t.Fatalf("metadata (-want, +got):\n%s", d) + } +} + +func TestMetadataInTimeZone(t *testing.T) { + tz := time.FixedZone("Test Time", int((12 * time.Hour).Seconds())) + tr := &v1.TaskRun{ //nolint:staticcheck + ObjectMeta: metav1.ObjectMeta{ + Name: "my-taskrun", + Namespace: "my-namespace", + Annotations: map[string]string{ + "chains.tekton.dev/reproducible": "true", + }, + UID: "abhhf-12354-asjsdbjs23-3435353n", + }, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, + }, + }, + } + start := time.Date(1995, time.December, 24, 6, 12, 12, 12, tz).UTC() + end := time.Date(1995, time.December, 24, 6, 12, 12, 24, tz).UTC() + want := slsa.BuildMetadata{ + InvocationID: "abhhf-12354-asjsdbjs23-3435353n", + StartedOn: &start, + FinishedOn: &end, + } + got := metadata(objects.NewTaskRunObjectV1(tr)) + if d := cmp.Diff(want, got); d != "" { + t.Fatalf("metadata (-want, +got):\n%s", d) + } +} + +func TestByProducts(t *testing.T) { + resultValue := v1.ResultValue{Type: "string", StringVal: "result-value"} + tr := &v1.TaskRun{ //nolint:staticcheck + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + { + Name: "result-name", + Value: resultValue, + }, + }, + }, + }, + } + + resultBytes, err := json.Marshal(resultValue) + if err != nil { + t.Fatalf("Could not marshal results: %s", err) + } + want := []slsa.ResourceDescriptor{ + { + Name: "taskRunResults/result-name", + Content: resultBytes, + MediaType: pipelinerun.JsonMediaType, + }, + } + got, err := byproducts(objects.NewTaskRunObjectV1(tr)) + if err != nil { + t.Fatalf("Could not extract byproducts: %s", err) + } + if d := cmp.Diff(want, got); d != "" { + t.Fatalf("byproducts (-want, +got):\n%s", d) + } +} + +func TestTaskRunGenerateAttestation(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun1.json") + if err != nil { + t.Fatal(err) + } + e1BuildStart := time.Unix(1617011400, 0) + e1BuildFinished := time.Unix(1617011415, 0) + + resultValue := v1.ResultValue{Type: "string", StringVal: "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"} + resultBytesDigest, err := json.Marshal(resultValue) + if err != nil { + t.Fatalf("Could not marshal results: %s", err) + } + resultValue = v1.ResultValue{Type: "string", StringVal: "gcr.io/my/image"} + resultBytesUri, err := json.Marshal(resultValue) + if err != nil { + t.Fatalf("Could not marshal results: %s", err) + } + + want := in_toto.ProvenanceStatementSLSA1{ + StatementHeader: in_toto.StatementHeader{ + Type: in_toto.StatementInTotoV01, + PredicateType: slsa.PredicateSLSAProvenance, + Subject: []in_toto.Subject{ + { + Name: "gcr.io/my/image", + Digest: common.DigestSet{ + "sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7", + }, + }, + }, + }, + Predicate: slsa.ProvenancePredicate{ + BuildDefinition: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa", + ExternalParameters: map[string]any{ + "runSpec": tr.Spec, + }, + InternalParameters: map[string]any{ + "tekton-pipelines-feature-flags": config.FeatureFlags{EnableAPIFields: "beta", ResultExtractionMethod: "termination-message"}, + }, + ResolvedDependencies: []slsa.ResourceDescriptor{ + { + URI: "git+https://github.com/test", + Digest: common.DigestSet{"sha1": "ab123"}, + Name: "task", + }, + { + URI: "oci://gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + { + URI: "oci://gcr.io/test2/test2", + Digest: common.DigestSet{"sha256": "4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac"}, + }, + { + URI: "oci://gcr.io/test3/test3", + Digest: common.DigestSet{"sha256": "f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478"}, + }, + {Name: "inputs/result", URI: "git+https://git.test.com.git", Digest: common.DigestSet{"sha1": "taskrun"}}, + }, + }, + RunDetails: slsa.ProvenanceRunDetails{ + Builder: slsa.Builder{ + ID: "test_builder-1", + }, + BuildMetadata: slsa.BuildMetadata{ + InvocationID: "abhhf-12354-asjsdbjs23-3435353n", + StartedOn: &e1BuildStart, + FinishedOn: &e1BuildFinished, + }, + Byproducts: []slsa.ResourceDescriptor{ + { + Name: "taskRunResults/IMAGE_DIGEST", + Content: resultBytesDigest, + MediaType: pipelinerun.JsonMediaType, + }, + { + Name: "taskRunResults/IMAGE_URL", + Content: resultBytesUri, + MediaType: pipelinerun.JsonMediaType, + }, + }, + }, + }, + } + + got, err := GenerateAttestation(ctx, objects.NewTaskRunObjectV1(tr), &slsaconfig.SlsaConfig{ + BuilderID: "test_builder-1", + BuildType: "https://tekton.dev/chains/v2/slsa", + }) + + if err != nil { + t.Errorf("unwant error: %s", err.Error()) + } + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("GenerateAttestation(): -want +got: %s", diff) + } +} + +func getResolvedDependencies(tro *objects.TaskRunObjectV1) []v1resourcedescriptor.ResourceDescriptor { + rd, err := resolveddependencies.TaskRun(context.Background(), tro) + if err != nil { + return []v1resourcedescriptor.ResourceDescriptor{} + } + return rd +} + +func TestGetBuildDefinition(t *testing.T) { + tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun1.json") + if err != nil { + t.Fatal(err) + } + + tr.Annotations = map[string]string{ + "annotation1": "annotation1", + } + tr.Labels = map[string]string{ + "label1": "label1", + } + + tro := objects.NewTaskRunObjectV1(tr) + tests := []struct { + name string + buildType string + want slsa.ProvenanceBuildDefinition + err error + }{ + { + name: "test slsa build type", + buildType: "https://tekton.dev/chains/v2/slsa", + want: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa", + ExternalParameters: externalparameters.TaskRun(tro), + InternalParameters: internalparameters.SLSAInternalParameters(tro), + ResolvedDependencies: getResolvedDependencies(tro), + }, + err: nil, + }, + { + name: "test default build type", + buildType: "", + want: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa", + ExternalParameters: externalparameters.TaskRun(tro), + InternalParameters: internalparameters.SLSAInternalParameters(tro), + ResolvedDependencies: getResolvedDependencies(tro), + }, + err: nil, + }, + { + name: "test tekton build type", + buildType: "https://tekton.dev/chains/v2/slsa-tekton", + want: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa-tekton", + ExternalParameters: externalparameters.TaskRun(tro), + InternalParameters: internalparameters.TektonInternalParameters(tro), + ResolvedDependencies: getResolvedDependencies(tro), + }, + err: nil, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + bd, err := getBuildDefinition(context.Background(), tc.buildType, tro) + if err != nil { + t.Fatalf("Did not expect an error but got %v", err) + } + + if diff := cmp.Diff(tc.want, bd); diff != "" { + t.Errorf("getBuildDefinition(): -want +got: %v", diff) + } + + }) + } +} + +func TestUnsupportedBuildType(t *testing.T) { + tr, err := objectloader.TaskRunFromFile("../../../testdata/v2alpha3/taskrun1.json") + if err != nil { + t.Fatal(err) + } + + got, err := getBuildDefinition(context.Background(), "bad-buildType", objects.NewTaskRunObjectV1(tr)) + if err == nil { + t.Error("getBuildDefinition(): expected error got nil") + } + if diff := cmp.Diff(slsa.ProvenanceBuildDefinition{}, got); diff != "" { + t.Errorf("getBuildDefinition(): -want +got: %s", diff) + } +} diff --git a/pkg/chains/formats/slsa/v2alpha3/slsav2.go b/pkg/chains/formats/slsa/v2alpha3/slsav2.go new file mode 100644 index 0000000000..d86dd01802 --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/slsav2.go @@ -0,0 +1,70 @@ +/* +Copyright 2021 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v2alpha3 + +import ( + "context" + "fmt" + + "github.com/tektoncd/chains/pkg/chains/formats" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/pipelinerun" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/taskrun" + "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/chains/pkg/config" +) + +const ( + PayloadTypeSlsav2alpha3 = formats.PayloadTypeSlsav2alpha3 +) + +func init() { + formats.RegisterPayloader(PayloadTypeSlsav2alpha3, NewFormatter) +} + +type Slsa struct { + slsaConfig *slsaconfig.SlsaConfig +} + +func NewFormatter(cfg config.Config) (formats.Payloader, error) { + return &Slsa{ + slsaConfig: &slsaconfig.SlsaConfig{ + BuilderID: cfg.Builder.ID, + DeepInspectionEnabled: cfg.Artifacts.PipelineRuns.DeepInspectionEnabled, + BuildType: cfg.BuildDefinition.BuildType, + }, + }, nil +} + +func (s *Slsa) Wrap() bool { + return true +} + +func (s *Slsa) CreatePayload(ctx context.Context, obj interface{}) (interface{}, error) { + switch v := obj.(type) { + case *objects.TaskRunObjectV1: + return taskrun.GenerateAttestation(ctx, v, s.slsaConfig) + case *objects.PipelineRunObjectV1: + return pipelinerun.GenerateAttestation(ctx, v, s.slsaConfig) + default: + return nil, fmt.Errorf("intoto does not support type: %s", v) + } +} + +func (s *Slsa) Type() config.PayloadType { + return formats.PayloadTypeSlsav2alpha3 +} diff --git a/pkg/chains/formats/slsa/v2alpha3/slsav2_test.go b/pkg/chains/formats/slsa/v2alpha3/slsav2_test.go new file mode 100644 index 0000000000..31b4e77e5a --- /dev/null +++ b/pkg/chains/formats/slsa/v2alpha3/slsav2_test.go @@ -0,0 +1,503 @@ +/* +Copyright 2021 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v2alpha3 + +import ( + "encoding/json" + "testing" + "time" + + "github.com/tektoncd/chains/pkg/chains/formats" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/compare" + "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha3/internal/pipelinerun" + "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/chains/pkg/config" + "github.com/tektoncd/chains/pkg/internal/objectloader" + + "github.com/google/go-cmp/cmp" + "github.com/in-toto/in-toto-golang/in_toto" + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" + pipelineConfig "github.com/tektoncd/pipeline/pkg/apis/config" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + logtesting "knative.dev/pkg/logging/testing" +) + +var ( + e1BuildStart = time.Unix(1617011400, 0) + e1BuildFinished = time.Unix(1617011415, 0) +) + +func TestNewFormatter(t *testing.T) { + t.Run("Ok", func(t *testing.T) { + cfg := config.Config{ + Builder: config.BuilderConfig{ + ID: "testid", + }, + } + f, err := NewFormatter(cfg) + if err != nil { + t.Errorf("Error creating formatter: %s", err) + } + if f == nil { + t.Error("Failed to create formatter") + } + }) +} + +func TestCreatePayloadError(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + + cfg := config.Config{ + Builder: config.BuilderConfig{ + ID: "testid", + }, + } + f, _ := NewFormatter(cfg) + + t.Run("Invalid type", func(t *testing.T) { + p, err := f.CreatePayload(ctx, "not a task ref") + + if p != nil { + t.Errorf("Unexpected payload") + } + if err == nil { + t.Errorf("Expected error") + } else { + if err.Error() != "intoto does not support type: not a task ref" { + t.Errorf("wrong error returned: '%s'", err.Error()) + } + } + }) + +} + +func TestCorrectPayloadType(t *testing.T) { + var i Slsa + if i.Type() != formats.PayloadTypeSlsav2alpha3 { + t.Errorf("Invalid type returned: %s", i.Type()) + } +} + +func TestTaskRunCreatePayload1(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + + tr, err := objectloader.TaskRunFromFile("../testdata/v2alpha3/taskrun1.json") + if err != nil { + t.Fatal(err) + } + + resultValue := v1beta1.ParamValue{Type: "string", StringVal: "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"} + resultBytesDigest, err := json.Marshal(resultValue) + if err != nil { + t.Fatalf("Could not marshal results: %s", err) + } + resultValue = v1beta1.ParamValue{Type: "string", StringVal: "gcr.io/my/image"} + resultBytesUri, err := json.Marshal(resultValue) + if err != nil { + t.Fatalf("Could not marshal results: %s", err) + } + + cfg := config.Config{ + Builder: config.BuilderConfig{ + ID: "test_builder-1", + }, + } + expected := in_toto.ProvenanceStatementSLSA1{ + StatementHeader: in_toto.StatementHeader{ + Type: in_toto.StatementInTotoV01, + PredicateType: slsa.PredicateSLSAProvenance, + Subject: []in_toto.Subject{ + { + Name: "gcr.io/my/image", + Digest: common.DigestSet{ + "sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7", + }, + }, + }, + }, + Predicate: slsa.ProvenancePredicate{ + BuildDefinition: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa", + ExternalParameters: map[string]any{ + "runSpec": tr.Spec, + }, + InternalParameters: map[string]any{ + "tekton-pipelines-feature-flags": pipelineConfig.FeatureFlags{EnableAPIFields: "beta", ResultExtractionMethod: "termination-message"}, + }, + ResolvedDependencies: []slsa.ResourceDescriptor{ + { + URI: "git+https://github.com/test", + Digest: common.DigestSet{"sha1": "ab123"}, + Name: "task", + }, + { + URI: "oci://gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + { + URI: "oci://gcr.io/test2/test2", + Digest: common.DigestSet{"sha256": "4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac"}, + }, + { + URI: "oci://gcr.io/test3/test3", + Digest: common.DigestSet{"sha256": "f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478"}, + }, + {Name: "inputs/result", URI: "git+https://git.test.com.git", Digest: common.DigestSet{"sha1": "taskrun"}}, + }, + }, + RunDetails: slsa.ProvenanceRunDetails{ + Builder: slsa.Builder{ + ID: "test_builder-1", + }, + BuildMetadata: slsa.BuildMetadata{ + InvocationID: "abhhf-12354-asjsdbjs23-3435353n", + StartedOn: &e1BuildStart, + FinishedOn: &e1BuildFinished, + }, + Byproducts: []slsa.ResourceDescriptor{ + { + Name: "taskRunResults/IMAGE_DIGEST", + Content: resultBytesDigest, + MediaType: pipelinerun.JsonMediaType, + }, + { + Name: "taskRunResults/IMAGE_URL", + Content: resultBytesUri, + MediaType: pipelinerun.JsonMediaType, + }, + }, + }, + }, + } + + i, _ := NewFormatter(cfg) + + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) + + if err != nil { + t.Errorf("unexpected error: %s", err.Error()) + } + if diff := cmp.Diff(expected, got); diff != "" { + t.Errorf("Slsa.CreatePayload(): -want +got: %s", diff) + } +} + +func TestTaskRunCreatePayload2(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + tr, err := objectloader.TaskRunFromFile("../testdata/v2alpha3/taskrun2.json") + if err != nil { + t.Fatal(err) + } + + resultValue := v1beta1.ParamValue{Type: "string", StringVal: "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"} + resultBytesDigest, err := json.Marshal(resultValue) + if err != nil { + t.Fatalf("Could not marshal results: %s", err) + } + resultValue = v1beta1.ParamValue{Type: "string", StringVal: "pkg:deb/debian/curl@7.50.3-1"} + resultBytesUri, err := json.Marshal(resultValue) + if err != nil { + t.Fatalf("Could not marshal results: %s", err) + } + + cfg := config.Config{ + Builder: config.BuilderConfig{ + ID: "test_builder-2", + }, + } + expected := in_toto.ProvenanceStatementSLSA1{ + StatementHeader: in_toto.StatementHeader{ + Type: in_toto.StatementInTotoV01, + PredicateType: slsa.PredicateSLSAProvenance, + Subject: nil, + }, + Predicate: slsa.ProvenancePredicate{ + BuildDefinition: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa", + ExternalParameters: map[string]any{ + "runSpec": tr.Spec, + }, + InternalParameters: map[string]any{}, + ResolvedDependencies: []slsa.ResourceDescriptor{ + { + URI: "git+https://github.com/catalog", + Digest: common.DigestSet{"sha1": "x123"}, + Name: "task", + }, + { + URI: "oci://gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + {Name: "inputs/result", URI: "git+https://git.test.com.git", Digest: common.DigestSet{"sha1": "sha:taskdefault"}}, + }, + }, + RunDetails: slsa.ProvenanceRunDetails{ + Builder: slsa.Builder{ + ID: "test_builder-2", + }, + BuildMetadata: slsa.BuildMetadata{ + InvocationID: "abhhf-12354-asjsdbjs23-3435353n", + StartedOn: &e1BuildStart, + FinishedOn: &e1BuildFinished, + }, + Byproducts: []slsa.ResourceDescriptor{ + { + Name: "taskRunResults/some-uri_DIGEST", + Content: resultBytesDigest, + MediaType: pipelinerun.JsonMediaType, + }, + { + Name: "taskRunResults/some-uri", + Content: resultBytesUri, + MediaType: pipelinerun.JsonMediaType, + }, + }, + }, + }, + } + + i, _ := NewFormatter(cfg) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) + + if err != nil { + t.Errorf("unexpected error: %s", err.Error()) + } + if diff := cmp.Diff(expected, got); diff != "" { + t.Errorf("Slsa.CreatePayload(): -want +got: %s", diff) + } +} + +func TestMultipleSubjects(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + + tr, err := objectloader.TaskRunFromFile("../testdata/v2alpha3/taskrun-multiple-subjects.json") + if err != nil { + t.Fatal(err) + } + + resultValue := v1beta1.ParamValue{ + Type: "string", + StringVal: "gcr.io/myimage1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6,gcr.io/myimage2@sha256:daa1a56e13c85cf164e7d9e595006649e3a04c47fe4a8261320e18a0bf3b0367", + } + resultBytes, err := json.Marshal(resultValue) + if err != nil { + t.Fatalf("Could not marshal results: %s", err) + } + cfg := config.Config{ + Builder: config.BuilderConfig{ + ID: "test_builder-multiple", + }, + } + expected := in_toto.ProvenanceStatementSLSA1{ + StatementHeader: in_toto.StatementHeader{ + Type: in_toto.StatementInTotoV01, + PredicateType: slsa.PredicateSLSAProvenance, + Subject: []in_toto.Subject{ + { + Name: "gcr.io/myimage1", + Digest: common.DigestSet{ + "sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6", + }, + }, { + Name: "gcr.io/myimage2", + Digest: common.DigestSet{ + "sha256": "daa1a56e13c85cf164e7d9e595006649e3a04c47fe4a8261320e18a0bf3b0367", + }, + }, + }, + }, + Predicate: slsa.ProvenancePredicate{ + BuildDefinition: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa", + ExternalParameters: map[string]any{ + "runSpec": tr.Spec, + }, + InternalParameters: map[string]any{}, + ResolvedDependencies: []slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + }, + }, + RunDetails: slsa.ProvenanceRunDetails{ + Builder: slsa.Builder{ + ID: "test_builder-multiple", + }, + BuildMetadata: slsa.BuildMetadata{}, + Byproducts: []slsa.ResourceDescriptor{ + { + Name: "taskRunResults/IMAGES", + Content: resultBytes, + MediaType: pipelinerun.JsonMediaType, + }, + }, + }, + }, + } + + i, _ := NewFormatter(cfg) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) + if err != nil { + t.Errorf("unexpected error: %s", err.Error()) + } + if diff := cmp.Diff(expected, got); diff != "" { + t.Errorf("Slsa.CreatePayload(): -want +got: %s", diff) + } +} + +func createPro(path string) *objects.PipelineRunObjectV1 { + pr, err := objectloader.PipelineRunFromFile(path) + if err != nil { + panic(err) + } + tr1, err := objectloader.TaskRunFromFile("../testdata/v2alpha3/taskrun1.json") + if err != nil { + panic(err) + } + tr2, err := objectloader.TaskRunFromFile("../testdata/v2alpha3/taskrun2.json") + if err != nil { + panic(err) + } + p := objects.NewPipelineRunObjectV1(pr) + p.AppendTaskRun(tr1) + p.AppendTaskRun(tr2) + return p +} + +func TestPipelineRunCreatePayload1(t *testing.T) { + ctx := logtesting.TestContextWithLogger(t) + + pr := createPro("../testdata/v2alpha3/pipelinerun1.json") + + cfg := config.Config{ + Builder: config.BuilderConfig{ + ID: "test_builder-1", + }, + } + expected := in_toto.ProvenanceStatementSLSA1{ + StatementHeader: in_toto.StatementHeader{ + Type: in_toto.StatementInTotoV01, + PredicateType: slsa.PredicateSLSAProvenance, + Subject: []in_toto.Subject{ + { + Name: "test.io/test/image", + Digest: common.DigestSet{ + "sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7", + }, + }, + }, + }, + Predicate: slsa.ProvenancePredicate{ + BuildDefinition: slsa.ProvenanceBuildDefinition{ + BuildType: "https://tekton.dev/chains/v2/slsa", + ExternalParameters: map[string]any{ + "runSpec": pr.Spec, + }, + InternalParameters: map[string]any{}, + ResolvedDependencies: []slsa.ResourceDescriptor{ + { + URI: "git+https://github.com/test", + Digest: common.DigestSet{"sha1": "28b123"}, + Name: "pipeline", + }, + { + URI: "git+https://github.com/catalog", + Digest: common.DigestSet{"sha1": "x123"}, + Name: "pipelineTask", + }, + { + URI: "oci://gcr.io/test1/test1", + Digest: common.DigestSet{"sha256": "d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + }, + { + URI: "git+https://github.com/test", + Digest: common.DigestSet{"sha1": "ab123"}, + Name: "pipelineTask", + }, + { + URI: "oci://gcr.io/test2/test2", + Digest: common.DigestSet{"sha256": "4d6dd704ef58cb214dd826519929e92a978a57cdee43693006139c0080fd6fac"}, + }, + { + URI: "oci://gcr.io/test3/test3", + Digest: common.DigestSet{"sha256": "f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478"}, + }, + { + URI: "abc", + Digest: common.DigestSet{"sha256": "827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"}, + Name: "inputs/result", + }, + {Name: "inputs/result", URI: "git+https://git.test.com.git", Digest: common.DigestSet{"sha1": "abcd"}}, + }, + }, + RunDetails: slsa.ProvenanceRunDetails{ + Builder: slsa.Builder{ + ID: "test_builder-1", + }, + BuildMetadata: slsa.BuildMetadata{ + InvocationID: "abhhf-12354-asjsdbjs23-3435353n", + StartedOn: &e1BuildStart, + FinishedOn: &e1BuildFinished, + }, + Byproducts: []slsa.ResourceDescriptor{ + { + Name: "pipelineRunResults/CHAINS-GIT_COMMIT", + Content: []uint8(`"abcd"`), + MediaType: pipelinerun.JsonMediaType, + }, { + Name: "pipelineRunResults/CHAINS-GIT_URL", + Content: []uint8(`"https://git.test.com"`), + MediaType: pipelinerun.JsonMediaType, + }, { + Name: "pipelineRunResults/IMAGE_URL", + Content: []uint8(`"test.io/test/image"`), + MediaType: pipelinerun.JsonMediaType, + }, { + Name: "pipelineRunResults/IMAGE_DIGEST", + Content: []uint8(`"sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"`), + MediaType: pipelinerun.JsonMediaType, + }, { + Name: "pipelineRunResults/img-ARTIFACT_INPUTS", + Content: []uint8(`{"digest":"sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7","uri":"abc"}`), + MediaType: pipelinerun.JsonMediaType, + }, { + Name: "pipelineRunResults/img2-ARTIFACT_OUTPUTS", + Content: []uint8(`{"digest":"sha256:","uri":"def"}`), + MediaType: pipelinerun.JsonMediaType, + }, { + Name: "pipelineRunResults/img_no_uri-ARTIFACT_OUTPUTS", + Content: []uint8(`{"digest":"sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"}`), + MediaType: pipelinerun.JsonMediaType, + }, + }, + }, + }, + } + + i, _ := NewFormatter(cfg) + + got, err := i.CreatePayload(ctx, pr) + + if err != nil { + t.Errorf("unexpected error: %s", err.Error()) + } + if diff := cmp.Diff(expected, got, compare.SLSAV1CompareOptions()...); diff != "" { + t.Errorf("Slsa.CreatePayload(): -want +got: %s", diff) + } +} diff --git a/pkg/chains/objects/objects.go b/pkg/chains/objects/objects.go index d89204af28..c96d3e7af9 100644 --- a/pkg/chains/objects/objects.go +++ b/pkg/chains/objects/objects.go @@ -19,7 +19,10 @@ import ( "fmt" "strings" + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + "github.com/tektoncd/pipeline/pkg/apis/config" "github.com/tektoncd/pipeline/pkg/apis/pipeline/pod" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -40,14 +43,158 @@ type Object interface { runtime.Object } -// Result is a generic key value store containing the results +// GenericResult is a generic key value store containing the results // of Tekton operations. (eg. PipelineRun and TaskRun results) -type Result struct { +type GenericResult interface { + // GetName returns the name associated with the result. + GetName() string + + // GetStringValue returns the string value of the result. + GetStringValue() string + + // GetObjectValue returns the object value for the specified field. + GetObjectValue(field string) string + + // ObjectValueIsNil checks if the object value is nil. + ObjectValueIsNil() bool +} + +type GenericProvenance interface { + IsNil() bool + RefSourceIsNil() bool + + GetRefSourceURI() string + GetRefSourceDigest() common.DigestSet + GetRefSourceEntrypoint() string + + FeatureFlagsIsNil() bool + GetFeatureFlags() *config.FeatureFlags +} + +// ProvenanceV1 is a struct implementing the GenericProvenance interface. +type ProvenanceV1 struct { + Provenance *v1.Provenance +} + +// RefSourceIsNil checks if the reference source is nil. +func (p *ProvenanceV1) IsNil() bool { + return p.Provenance == nil +} + +// RefSourceIsNil checks if the reference source is nil. +func (p *ProvenanceV1) RefSourceIsNil() bool { + return p.Provenance.RefSource == nil +} + +// GetRefSourceURI returns the URI of the reference source. +func (p *ProvenanceV1) GetRefSourceURI() string { + return p.Provenance.RefSource.URI +} + +// GetRefSourceDigest returns the digest set of the reference source. +func (p *ProvenanceV1) GetRefSourceDigest() common.DigestSet { + return p.Provenance.RefSource.Digest +} + +// GetRefSourceEntrypoint returns the entrypoint of the reference source. +func (p *ProvenanceV1) GetRefSourceEntrypoint() string { + return p.Provenance.RefSource.EntryPoint +} + +func (p *ProvenanceV1) FeatureFlagsIsNil() bool { + return p.Provenance.FeatureFlags == nil +} + +func (p *ProvenanceV1) GetFeatureFlags() *config.FeatureFlags { + return p.Provenance.FeatureFlags +} + +// ProvenanceV1Beta1 is a struct implementing the GenericProvenance interface. +type ProvenanceV1Beta1 struct { + Provenance *v1beta1.Provenance +} + +// RefSourceIsNil checks if the reference source is nil. +func (p *ProvenanceV1Beta1) IsNil() bool { + return p.Provenance == nil +} + +// RefSourceIsNil checks if the reference source is nil. +func (p *ProvenanceV1Beta1) RefSourceIsNil() bool { + return p.Provenance.RefSource == nil +} + +// GetRefSourceURI returns the URI of the reference source. +func (p *ProvenanceV1Beta1) GetRefSourceURI() string { + return p.Provenance.RefSource.URI +} + +// GetRefSourceDigest returns the digest set of the reference source. +func (p *ProvenanceV1Beta1) GetRefSourceDigest() common.DigestSet { + return p.Provenance.RefSource.Digest +} + +// GetRefSourceEntrypoint returns the entrypoint of the reference source. +func (p *ProvenanceV1Beta1) GetRefSourceEntrypoint() string { + return p.Provenance.RefSource.EntryPoint +} + +func (p *ProvenanceV1Beta1) FeatureFlagsIsNil() bool { + return p.Provenance.FeatureFlags == nil +} + +func (p *ProvenanceV1Beta1) GetFeatureFlags() *config.FeatureFlags { + return p.Provenance.FeatureFlags +} + +// ResultV1 is a generic key value store containing the results +// of Tekton operations. (eg. PipelineRun and TaskRun results) +type ResultV1 struct { + Name string + Type v1.ResultsType + Value v1.ParamValue +} + +func (res ResultV1) GetName() string { + return res.Name +} + +func (res ResultV1) GetStringValue() string { + return res.Value.StringVal +} + +func (res ResultV1) GetObjectValue(field string) string { + return res.Value.ObjectVal[field] +} + +func (res ResultV1) ObjectValueIsNil() bool { + return res.Value.ObjectVal == nil +} + +// ResultV1Beta1 is a generic key value store containing the results +// of Tekton operations. (eg. PipelineRun and TaskRun results) +type ResultV1Beta1 struct { Name string Type v1beta1.ResultsType Value v1beta1.ParamValue } +func (res ResultV1Beta1) GetName() string { + return res.Name +} + +func (res ResultV1Beta1) GetStringValue() string { + return res.Value.StringVal +} + +func (res ResultV1Beta1) GetObjectValue(field string) string { + return res.Value.ObjectVal[field] +} + +func (res ResultV1Beta1) ObjectValueIsNil() bool { + return res.Value.ObjectVal == nil +} + // Tekton object is an extended Kubernetes object with operations specific // to Tekton objects. type TektonObject interface { @@ -57,8 +204,8 @@ type TektonObject interface { GetObject() interface{} GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error - GetResults() []Result - GetProvenance() *v1beta1.Provenance + GetResults() []GenericResult + GetProvenance() GenericProvenance GetServiceAccountName() string GetPullSecrets() []string IsDone() bool @@ -66,70 +213,74 @@ type TektonObject interface { SupportsTaskRunArtifact() bool SupportsPipelineRunArtifact() bool SupportsOCIArtifact() bool - GetRemoteProvenance() *v1beta1.Provenance + GetRemoteProvenance() GenericProvenance IsRemote() bool } func NewTektonObject(i interface{}) (TektonObject, error) { switch o := i.(type) { - case *v1beta1.PipelineRun: - return NewPipelineRunObject(o), nil - case *v1beta1.TaskRun: - return NewTaskRunObject(o), nil + case *v1.PipelineRun: + return NewPipelineRunObjectV1(o), nil + case *v1.TaskRun: + return NewTaskRunObjectV1(o), nil + case *v1beta1.PipelineRun: //nolint:staticcheck + return NewPipelineRunObjectV1Beta1(o), nil + case *v1beta1.TaskRun: //nolint:staticcheck + return NewTaskRunObjectV1Beta1(o), nil default: return nil, errors.New("unrecognized type when attempting to create tekton object") } } -// TaskRunObject extends v1beta1.TaskRun with additional functions. -type TaskRunObject struct { - *v1beta1.TaskRun +// TaskRunObjectV1 extends v1.TaskRun with additional functions. +type TaskRunObjectV1 struct { + *v1.TaskRun } -var _ TektonObject = &TaskRunObject{} +var _ TektonObject = &TaskRunObjectV1{} -func NewTaskRunObject(tr *v1beta1.TaskRun) *TaskRunObject { - return &TaskRunObject{ +func NewTaskRunObjectV1(tr *v1.TaskRun) *TaskRunObjectV1 { + return &TaskRunObjectV1{ tr, } } // Get the TaskRun GroupVersionKind -func (tro *TaskRunObject) GetGVK() string { +func (tro *TaskRunObjectV1) GetGVK() string { return fmt.Sprintf("%s/%s", tro.GetGroupVersionKind().GroupVersion().String(), tro.GetGroupVersionKind().Kind) } -func (tro *TaskRunObject) GetKindName() string { +func (tro *TaskRunObjectV1) GetKindName() string { return strings.ToLower(tro.GetGroupVersionKind().Kind) } -func (tro *TaskRunObject) GetProvenance() *v1beta1.Provenance { - return tro.Status.Provenance +func (tro *TaskRunObjectV1) GetProvenance() GenericProvenance { + return &ProvenanceV1{tro.Status.Provenance} } // Get the latest annotations on the TaskRun -func (tro *TaskRunObject) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { - tr, err := clientSet.TektonV1beta1().TaskRuns(tro.Namespace).Get(ctx, tro.Name, metav1.GetOptions{}) +func (tro *TaskRunObjectV1) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { + tr, err := clientSet.TektonV1().TaskRuns(tro.Namespace).Get(ctx, tro.Name, metav1.GetOptions{}) return tr.Annotations, err } // Get the base TaskRun object -func (tro *TaskRunObject) GetObject() interface{} { +func (tro *TaskRunObjectV1) GetObject() interface{} { return tro.TaskRun } // Patch the original TaskRun object -func (tro *TaskRunObject) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { - _, err := clientSet.TektonV1beta1().TaskRuns(tro.Namespace).Patch( +func (tro *TaskRunObjectV1) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { + _, err := clientSet.TektonV1().TaskRuns(tro.Namespace).Patch( ctx, tro.Name, types.MergePatchType, patchBytes, metav1.PatchOptions{}) return err } // Get the TaskRun results -func (tro *TaskRunObject) GetResults() []Result { - res := []Result{} - for _, key := range tro.Status.TaskRunResults { - res = append(res, Result{ +func (tro *TaskRunObjectV1) GetResults() []GenericResult { + res := []GenericResult{} + for _, key := range tro.Status.Results { + res = append(res, ResultV1{ Name: key.Name, Value: key.Value, }) @@ -137,7 +288,7 @@ func (tro *TaskRunObject) GetResults() []Result { return res } -func (tro *TaskRunObject) GetStepImages() []string { +func (tro *TaskRunObjectV1) GetStepImages() []string { images := []string{} for _, stepState := range tro.Status.Steps { images = append(images, stepState.ImageID) @@ -145,7 +296,7 @@ func (tro *TaskRunObject) GetStepImages() []string { return images } -func (tro *TaskRunObject) GetSidecarImages() []string { +func (tro *TaskRunObjectV1) GetSidecarImages() []string { images := []string{} for _, sidecarState := range tro.Status.Sidecars { images = append(images, sidecarState.ImageID) @@ -154,35 +305,35 @@ func (tro *TaskRunObject) GetSidecarImages() []string { } // Get the ServiceAccount declared in the TaskRun -func (tro *TaskRunObject) GetServiceAccountName() string { +func (tro *TaskRunObjectV1) GetServiceAccountName() string { return tro.Spec.ServiceAccountName } // Get the imgPullSecrets from the pod template -func (tro *TaskRunObject) GetPullSecrets() []string { +func (tro *TaskRunObjectV1) GetPullSecrets() []string { return getPodPullSecrets(tro.Spec.PodTemplate) } -func (tro *TaskRunObject) SupportsTaskRunArtifact() bool { +func (tro *TaskRunObjectV1) SupportsTaskRunArtifact() bool { return true } -func (tro *TaskRunObject) SupportsPipelineRunArtifact() bool { +func (tro *TaskRunObjectV1) SupportsPipelineRunArtifact() bool { return false } -func (tro *TaskRunObject) SupportsOCIArtifact() bool { +func (tro *TaskRunObjectV1) SupportsOCIArtifact() bool { return true } -func (tro *TaskRunObject) GetRemoteProvenance() *v1beta1.Provenance { +func (tro *TaskRunObjectV1) GetRemoteProvenance() GenericProvenance { if t := tro.Status.Provenance; t != nil && t.RefSource != nil && tro.IsRemote() { - return tro.Status.Provenance + return &ProvenanceV1{tro.Status.Provenance} } return nil } -func (tro *TaskRunObject) IsRemote() bool { +func (tro *TaskRunObjectV1) IsRemote() bool { isRemoteTask := false if tro.Spec.TaskRef != nil { if tro.Spec.TaskRef.Resolver != "" && tro.Spec.TaskRef.Resolver != "Cluster" { @@ -192,58 +343,58 @@ func (tro *TaskRunObject) IsRemote() bool { return isRemoteTask } -// PipelineRunObject extends v1beta1.PipelineRun with additional functions. -type PipelineRunObject struct { +// PipelineRunObjectV1 extends v1.PipelineRun with additional functions. +type PipelineRunObjectV1 struct { // The base PipelineRun - *v1beta1.PipelineRun + *v1.PipelineRun // taskRuns that were apart of this PipelineRun - taskRuns []*v1beta1.TaskRun + taskRuns []*v1.TaskRun } -var _ TektonObject = &PipelineRunObject{} +var _ TektonObject = &PipelineRunObjectV1{} -func NewPipelineRunObject(pr *v1beta1.PipelineRun) *PipelineRunObject { - return &PipelineRunObject{ +func NewPipelineRunObjectV1(pr *v1.PipelineRun) *PipelineRunObjectV1 { + return &PipelineRunObjectV1{ PipelineRun: pr, } } // Get the PipelineRun GroupVersionKind -func (pro *PipelineRunObject) GetGVK() string { +func (pro *PipelineRunObjectV1) GetGVK() string { return fmt.Sprintf("%s/%s", pro.GetGroupVersionKind().GroupVersion().String(), pro.GetGroupVersionKind().Kind) } -func (pro *PipelineRunObject) GetKindName() string { +func (pro *PipelineRunObjectV1) GetKindName() string { return strings.ToLower(pro.GetGroupVersionKind().Kind) } // Request the current annotations on the PipelineRun object -func (pro *PipelineRunObject) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { - pr, err := clientSet.TektonV1beta1().PipelineRuns(pro.Namespace).Get(ctx, pro.Name, metav1.GetOptions{}) +func (pro *PipelineRunObjectV1) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { + pr, err := clientSet.TektonV1().PipelineRuns(pro.Namespace).Get(ctx, pro.Name, metav1.GetOptions{}) return pr.Annotations, err } // Get the base PipelineRun -func (pro *PipelineRunObject) GetObject() interface{} { +func (pro *PipelineRunObjectV1) GetObject() interface{} { return pro.PipelineRun } // Patch the original PipelineRun object -func (pro *PipelineRunObject) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { - _, err := clientSet.TektonV1beta1().PipelineRuns(pro.Namespace).Patch( +func (pro *PipelineRunObjectV1) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { + _, err := clientSet.TektonV1().PipelineRuns(pro.Namespace).Patch( ctx, pro.Name, types.MergePatchType, patchBytes, metav1.PatchOptions{}) return err } -func (pro *PipelineRunObject) GetProvenance() *v1beta1.Provenance { - return pro.Status.Provenance +func (pro *PipelineRunObjectV1) GetProvenance() GenericProvenance { + return &ProvenanceV1{pro.Status.Provenance} } // Get the resolved Pipelinerun results -func (pro *PipelineRunObject) GetResults() []Result { - res := []Result{} - for _, key := range pro.Status.PipelineResults { - res = append(res, Result{ +func (pro *PipelineRunObjectV1) GetResults() []GenericResult { + res := []GenericResult{} + for _, key := range pro.Status.Results { + res = append(res, ResultV1{ Name: key.Name, Value: key.Value, }) @@ -252,56 +403,61 @@ func (pro *PipelineRunObject) GetResults() []Result { } // Get the ServiceAccount declared in the PipelineRun -func (pro *PipelineRunObject) GetServiceAccountName() string { - return pro.Spec.ServiceAccountName +func (pro *PipelineRunObjectV1) GetServiceAccountName() string { + return pro.Spec.TaskRunTemplate.ServiceAccountName } // Get the ServiceAccount declared in the PipelineRun -func (pro *PipelineRunObject) IsSuccessful() bool { +func (pro *PipelineRunObjectV1) IsSuccessful() bool { return pro.Status.GetCondition(apis.ConditionSucceeded).IsTrue() } // Append TaskRuns to this PipelineRun -func (pro *PipelineRunObject) AppendTaskRun(tr *v1beta1.TaskRun) { +func (pro *PipelineRunObjectV1) AppendTaskRun(tr *v1.TaskRun) { pro.taskRuns = append(pro.taskRuns, tr) } +// Append TaskRuns to this PipelineRun +func (pro *PipelineRunObjectV1) GetTaskRuns() []*v1.TaskRun { //nolint:staticcheck + return pro.taskRuns +} + // Get the associated TaskRun via the Task name -func (pro *PipelineRunObject) GetTaskRunFromTask(taskName string) *TaskRunObject { +func (pro *PipelineRunObjectV1) GetTaskRunFromTask(taskName string) *TaskRunObjectV1 { for _, tr := range pro.taskRuns { val, ok := tr.Labels[PipelineTaskLabel] if ok && val == taskName { - return NewTaskRunObject(tr) + return NewTaskRunObjectV1(tr) } } return nil } // Get the imgPullSecrets from the pod template -func (pro *PipelineRunObject) GetPullSecrets() []string { - return getPodPullSecrets(pro.Spec.PodTemplate) +func (pro *PipelineRunObjectV1) GetPullSecrets() []string { + return getPodPullSecrets(pro.Spec.TaskRunTemplate.PodTemplate) } -func (pro *PipelineRunObject) SupportsTaskRunArtifact() bool { +func (pro *PipelineRunObjectV1) SupportsTaskRunArtifact() bool { return false } -func (pro *PipelineRunObject) SupportsPipelineRunArtifact() bool { +func (pro *PipelineRunObjectV1) SupportsPipelineRunArtifact() bool { return true } -func (pro *PipelineRunObject) SupportsOCIArtifact() bool { +func (pro *PipelineRunObjectV1) SupportsOCIArtifact() bool { return false } -func (pro *PipelineRunObject) GetRemoteProvenance() *v1beta1.Provenance { +func (pro *PipelineRunObjectV1) GetRemoteProvenance() GenericProvenance { if p := pro.Status.Provenance; p != nil && p.RefSource != nil && pro.IsRemote() { - return pro.Status.Provenance + return &ProvenanceV1{pro.Status.Provenance} } return nil } -func (pro *PipelineRunObject) IsRemote() bool { +func (pro *PipelineRunObjectV1) IsRemote() bool { isRemotePipeline := false if pro.Spec.PipelineRef != nil { if pro.Spec.PipelineRef.Resolver != "" && pro.Spec.PipelineRef.Resolver != "Cluster" { @@ -321,3 +477,233 @@ func getPodPullSecrets(podTemplate *pod.Template) []string { } return imgPullSecrets } + +// PipelineRunObjectV1Beta1 extends v1.PipelineRun with additional functions. +type PipelineRunObjectV1Beta1 struct { + // The base PipelineRun + *v1beta1.PipelineRun + // taskRuns that were apart of this PipelineRun + taskRuns []*v1beta1.TaskRun //nolint:staticcheck +} + +var _ TektonObject = &PipelineRunObjectV1Beta1{} + +func NewPipelineRunObjectV1Beta1(pr *v1beta1.PipelineRun) *PipelineRunObjectV1Beta1 { //nolint:staticcheck + return &PipelineRunObjectV1Beta1{ + PipelineRun: pr, + } +} + +// Get the PipelineRun GroupVersionKind +func (pro *PipelineRunObjectV1Beta1) GetGVK() string { + return fmt.Sprintf("%s/%s", pro.GetGroupVersionKind().GroupVersion().String(), pro.GetGroupVersionKind().Kind) +} + +func (pro *PipelineRunObjectV1Beta1) GetKindName() string { + return strings.ToLower(pro.GetGroupVersionKind().Kind) +} + +// Request the current annotations on the PipelineRun object +func (pro *PipelineRunObjectV1Beta1) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { + pr, err := clientSet.TektonV1beta1().PipelineRuns(pro.Namespace).Get(ctx, pro.Name, metav1.GetOptions{}) + return pr.Annotations, err +} + +// Get the base PipelineRun +func (pro *PipelineRunObjectV1Beta1) GetObject() interface{} { + return pro.PipelineRun +} + +// Patch the original PipelineRun object +func (pro *PipelineRunObjectV1Beta1) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { + _, err := clientSet.TektonV1beta1().PipelineRuns(pro.Namespace).Patch( + ctx, pro.Name, types.MergePatchType, patchBytes, metav1.PatchOptions{}) + return err +} + +func (pro *PipelineRunObjectV1Beta1) GetProvenance() GenericProvenance { + return &ProvenanceV1Beta1{pro.Status.Provenance} +} + +// Get the resolved Pipelinerun results +func (pro *PipelineRunObjectV1Beta1) GetResults() []GenericResult { + res := []GenericResult{} + for _, key := range pro.Status.PipelineResults { + res = append(res, ResultV1Beta1{ + Name: key.Name, + Value: key.Value, + }) + } + return res +} + +// Get the ServiceAccount declared in the PipelineRun +func (pro *PipelineRunObjectV1Beta1) GetServiceAccountName() string { + return pro.Spec.ServiceAccountName +} + +// Get the ServiceAccount declared in the PipelineRun +func (pro *PipelineRunObjectV1Beta1) IsSuccessful() bool { + return pro.Status.GetCondition(apis.ConditionSucceeded).IsTrue() +} + +// Append TaskRuns to this PipelineRun +func (pro *PipelineRunObjectV1Beta1) AppendTaskRun(tr *v1beta1.TaskRun) { //nolint:staticcheck + pro.taskRuns = append(pro.taskRuns, tr) +} + +// Get the associated TaskRun via the Task name +func (pro *PipelineRunObjectV1Beta1) GetTaskRunFromTask(taskName string) *TaskRunObjectV1Beta1 { + for _, tr := range pro.taskRuns { + val, ok := tr.Labels[PipelineTaskLabel] + if ok && val == taskName { + return NewTaskRunObjectV1Beta1(tr) + } + } + return nil +} + +// Get the imgPullSecrets from the pod template +func (pro *PipelineRunObjectV1Beta1) GetPullSecrets() []string { + return getPodPullSecrets(pro.Spec.PodTemplate) +} + +func (pro *PipelineRunObjectV1Beta1) SupportsTaskRunArtifact() bool { + return false +} + +func (pro *PipelineRunObjectV1Beta1) SupportsPipelineRunArtifact() bool { + return true +} + +func (pro *PipelineRunObjectV1Beta1) SupportsOCIArtifact() bool { + return false +} + +func (pro *PipelineRunObjectV1Beta1) GetRemoteProvenance() GenericProvenance { + if p := pro.Status.Provenance; p != nil && p.RefSource != nil && pro.IsRemote() { + return &ProvenanceV1Beta1{pro.Status.Provenance} + } + return nil +} + +func (pro *PipelineRunObjectV1Beta1) IsRemote() bool { + isRemotePipeline := false + if pro.Spec.PipelineRef != nil { + if pro.Spec.PipelineRef.Resolver != "" && pro.Spec.PipelineRef.Resolver != "Cluster" { + isRemotePipeline = true + } + } + return isRemotePipeline +} + +// TaskRunObjectV1Beta1 extends v1beta1.TaskRun with additional functions. +type TaskRunObjectV1Beta1 struct { + *v1beta1.TaskRun +} + +var _ TektonObject = &TaskRunObjectV1Beta1{} + +func NewTaskRunObjectV1Beta1(tr *v1beta1.TaskRun) *TaskRunObjectV1Beta1 { //nolint:staticcheck + return &TaskRunObjectV1Beta1{ + tr, + } +} + +// Get the TaskRun GroupVersionKind +func (tro *TaskRunObjectV1Beta1) GetGVK() string { + return fmt.Sprintf("%s/%s", tro.GetGroupVersionKind().GroupVersion().String(), tro.GetGroupVersionKind().Kind) +} + +func (tro *TaskRunObjectV1Beta1) GetKindName() string { + return strings.ToLower(tro.GetGroupVersionKind().Kind) +} + +func (tro *TaskRunObjectV1Beta1) GetProvenance() GenericProvenance { + return &ProvenanceV1Beta1{tro.Status.Provenance} +} + +// Get the latest annotations on the TaskRun +func (tro *TaskRunObjectV1Beta1) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { + tr, err := clientSet.TektonV1beta1().TaskRuns(tro.Namespace).Get(ctx, tro.Name, metav1.GetOptions{}) + return tr.Annotations, err +} + +// Get the base TaskRun object +func (tro *TaskRunObjectV1Beta1) GetObject() interface{} { + return tro.TaskRun +} + +// Patch the original TaskRun object +func (tro *TaskRunObjectV1Beta1) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { + _, err := clientSet.TektonV1beta1().TaskRuns(tro.Namespace).Patch( + ctx, tro.Name, types.MergePatchType, patchBytes, metav1.PatchOptions{}) + return err +} + +// Get the TaskRun results +func (tro *TaskRunObjectV1Beta1) GetResults() []GenericResult { + res := []GenericResult{} + for _, key := range tro.Status.TaskRunResults { + res = append(res, ResultV1Beta1{ + Name: key.Name, + Value: key.Value, + }) + } + return res +} + +func (tro *TaskRunObjectV1Beta1) GetStepImages() []string { + images := []string{} + for _, stepState := range tro.Status.Steps { + images = append(images, stepState.ImageID) + } + return images +} + +func (tro *TaskRunObjectV1Beta1) GetSidecarImages() []string { + images := []string{} + for _, sidecarState := range tro.Status.Sidecars { + images = append(images, sidecarState.ImageID) + } + return images +} + +// Get the ServiceAccount declared in the TaskRun +func (tro *TaskRunObjectV1Beta1) GetServiceAccountName() string { + return tro.Spec.ServiceAccountName +} + +// Get the imgPullSecrets from the pod template +func (tro *TaskRunObjectV1Beta1) GetPullSecrets() []string { + return getPodPullSecrets(tro.Spec.PodTemplate) +} + +func (tro *TaskRunObjectV1Beta1) SupportsTaskRunArtifact() bool { + return true +} + +func (tro *TaskRunObjectV1Beta1) SupportsPipelineRunArtifact() bool { + return false +} + +func (tro *TaskRunObjectV1Beta1) SupportsOCIArtifact() bool { + return true +} + +func (tro *TaskRunObjectV1Beta1) GetRemoteProvenance() GenericProvenance { + if t := tro.Status.Provenance; t != nil && t.RefSource != nil && tro.IsRemote() { + return &ProvenanceV1Beta1{tro.Status.Provenance} + } + return nil +} + +func (tro *TaskRunObjectV1Beta1) IsRemote() bool { + isRemoteTask := false + if tro.Spec.TaskRef != nil { + if tro.Spec.TaskRef.Resolver != "" && tro.Spec.TaskRef.Resolver != "Cluster" { + isRemoteTask = true + } + } + return isRemoteTask +} diff --git a/pkg/chains/objects/objects_test.go b/pkg/chains/objects/objects_test.go index 61e9817f3d..cb1d86229b 100644 --- a/pkg/chains/objects/objects_test.go +++ b/pkg/chains/objects/objects_test.go @@ -19,7 +19,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" "github.com/tektoncd/pipeline/pkg/apis/pipeline/pod" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) @@ -38,8 +38,8 @@ func getEmptyTemplate() *pod.PodTemplate { return &pod.PodTemplate{} } -func getTaskRun() *v1beta1.TaskRun { - return &v1beta1.TaskRun{ +func getTaskRun() *v1.TaskRun { + return &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "objects-test", @@ -47,47 +47,47 @@ func getTaskRun() *v1beta1.TaskRun { PipelineTaskLabel: "foo-task", }, }, - Spec: v1beta1.TaskRunSpec{ + Spec: v1.TaskRunSpec{ ServiceAccountName: "taskrun-sa", - Params: []v1beta1.Param{ + Params: []v1.Param{ { Name: "runtime-param", - Value: *v1beta1.NewStructuredValues("runtime-value"), + Value: *v1.NewStructuredValues("runtime-value"), }, }, }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - Provenance: &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Provenance: &v1.Provenance{ + RefSource: &v1.RefSource{ URI: "https://github.com/tektoncd/chains", Digest: map[string]string{"sha1": "abcdef"}, EntryPoint: "pkg/chains/objects.go", }, }, - TaskSpec: &v1beta1.TaskSpec{ - Params: []v1beta1.ParamSpec{ + TaskSpec: &v1.TaskSpec{ + Params: []v1.ParamSpec{ { Name: "param1", - Default: v1beta1.NewStructuredValues("default-value"), + Default: v1.NewStructuredValues("default-value"), }, }, }, - TaskRunResults: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "img1_input_ARTIFACT_INPUTS", - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7", }), }, - {Name: "mvn1_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, - {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, + {Name: "mvn1_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, + {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, }, - Steps: []v1beta1.StepState{{ + Steps: []v1.StepState{{ ImageID: "step-image", }}, - Sidecars: []v1beta1.SidecarState{{ + Sidecars: []v1.SidecarState{{ ImageID: "sidecar-image", }}, }, @@ -95,48 +95,50 @@ func getTaskRun() *v1beta1.TaskRun { } } -func getPipelineRun() *v1beta1.PipelineRun { - return &v1beta1.PipelineRun{ +func getPipelineRun() *v1.PipelineRun { + return &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "objects-test", }, - Spec: v1beta1.PipelineRunSpec{ - ServiceAccountName: "pipelinerun-sa", - Params: []v1beta1.Param{ + Spec: v1.PipelineRunSpec{ + TaskRunTemplate: v1.PipelineTaskRunTemplate{ + ServiceAccountName: "pipelinerun-sa", + }, + Params: []v1.Param{ { Name: "runtime-param", - Value: *v1beta1.NewStructuredValues("runtime-value"), + Value: *v1.NewStructuredValues("runtime-value"), }, }, }, - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - Provenance: &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Provenance: &v1.Provenance{ + RefSource: &v1.RefSource{ URI: "https://github.com/tektoncd/chains", Digest: map[string]string{"sha1": "abcdef"}, EntryPoint: "pkg/chains/objects.go", }, }, - PipelineSpec: &v1beta1.PipelineSpec{ - Params: []v1beta1.ParamSpec{ + PipelineSpec: &v1.PipelineSpec{ + Params: []v1.ParamSpec{ { Name: "param1", - Default: v1beta1.NewStructuredValues("default-value"), + Default: v1.NewStructuredValues("default-value"), }, }, }, - PipelineResults: []v1beta1.PipelineRunResult{ + Results: []v1.PipelineRunResult{ { Name: "img1_input_ARTIFACT_INPUTS", - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7", }), }, - {Name: "mvn1_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, - {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, + {Name: "mvn1_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, + {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, }, }, }, @@ -170,7 +172,7 @@ func TestTaskRun_ImagePullSecrets(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tr := NewTaskRunObject(getTaskRun()) + tr := NewTaskRunObjectV1(getTaskRun()) tr.Spec.PodTemplate = tt.template secret := tr.GetPullSecrets() assert.ElementsMatch(t, secret, tt.want) @@ -206,8 +208,8 @@ func TestPipelineRun_ImagePullSecrets(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - pr := NewPipelineRunObject(getPipelineRun()) - pr.Spec.PodTemplate = tt.template + pr := NewPipelineRunObjectV1(getPipelineRun()) + pr.Spec.TaskRunTemplate.PodTemplate = tt.template secret := pr.GetPullSecrets() assert.ElementsMatch(t, secret, tt.want) }) @@ -217,15 +219,15 @@ func TestPipelineRun_ImagePullSecrets(t *testing.T) { func TestPipelineRun_GetProvenance(t *testing.T) { t.Run("TestPipelineRun_GetProvenance", func(t *testing.T) { - pr := NewPipelineRunObject(getPipelineRun()) + pr := NewPipelineRunObjectV1(getPipelineRun()) got := pr.GetProvenance() - want := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + want := &ProvenanceV1{&v1.Provenance{ + RefSource: &v1.RefSource{ URI: "https://github.com/tektoncd/chains", Digest: map[string]string{"sha1": "abcdef"}, EntryPoint: "pkg/chains/objects.go", }, - } + }} if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } @@ -236,15 +238,15 @@ func TestPipelineRun_GetProvenance(t *testing.T) { func TestTaskRun_GetProvenance(t *testing.T) { t.Run("TestTaskRun_GetProvenance", func(t *testing.T) { - tr := NewTaskRunObject(getTaskRun()) + tr := NewTaskRunObjectV1(getTaskRun()) got := tr.GetProvenance() - want := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + want := &ProvenanceV1{&v1.Provenance{ + RefSource: &v1.RefSource{ URI: "https://github.com/tektoncd/chains", Digest: map[string]string{"sha1": "abcdef"}, EntryPoint: "pkg/chains/objects.go", }, - } + }} if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } @@ -255,18 +257,18 @@ func TestTaskRun_GetProvenance(t *testing.T) { func TestPipelineRun_GetResults(t *testing.T) { t.Run("TestPipelineRun_GetResults", func(t *testing.T) { - pr := NewPipelineRunObject(getPipelineRun()) + pr := NewPipelineRunObjectV1(getPipelineRun()) got := pr.GetResults() - assert.ElementsMatch(t, got, []Result{ - { + assert.ElementsMatch(t, got, []GenericResult{ + ResultV1{ Name: "img1_input_ARTIFACT_INPUTS", - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7", }), }, - {Name: "mvn1_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, - {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, + ResultV1{Name: "mvn1_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, + ResultV1{Name: "mvn1_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, }) }) @@ -275,7 +277,7 @@ func TestPipelineRun_GetResults(t *testing.T) { func TestTaskRun_GetStepImages(t *testing.T) { t.Run("TestTaskRun_GetStepImages", func(t *testing.T) { - tr := NewTaskRunObject(getTaskRun()) + tr := NewTaskRunObjectV1(getTaskRun()) got := tr.GetStepImages() want := []string{"step-image"} if d := cmp.Diff(want, got); d != "" { @@ -288,7 +290,7 @@ func TestTaskRun_GetStepImages(t *testing.T) { func TestTaskRun_GetSidecarImages(t *testing.T) { t.Run("TestTaskRun_GetSidecarImages", func(t *testing.T) { - tr := NewTaskRunObject(getTaskRun()) + tr := NewTaskRunObjectV1(getTaskRun()) got := tr.GetSidecarImages() want := []string{"sidecar-image"} if d := cmp.Diff(want, got); d != "" { @@ -301,55 +303,55 @@ func TestTaskRun_GetSidecarImages(t *testing.T) { func TestTaskRun_GetResults(t *testing.T) { t.Run("TestTaskRun_GetResults", func(t *testing.T) { - pr := NewTaskRunObject(getTaskRun()) + pr := NewTaskRunObjectV1(getTaskRun()) got := pr.GetResults() - assert.ElementsMatch(t, got, []Result{ - { + assert.ElementsMatch(t, got, []GenericResult{ + ResultV1{ Name: "img1_input_ARTIFACT_INPUTS", - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7", }), }, - {Name: "mvn1_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, - {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, + ResultV1{Name: "mvn1_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, + ResultV1{Name: "mvn1_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, }) }) } func TestPipelineRun_GetGVK(t *testing.T) { - assert.Equal(t, "tekton.dev/v1beta1/PipelineRun", NewPipelineRunObject(getPipelineRun()).GetGVK()) + assert.Equal(t, "tekton.dev/v1/PipelineRun", NewPipelineRunObjectV1(getPipelineRun()).GetGVK()) } func TestTaskRun_GetGVK(t *testing.T) { - assert.Equal(t, "tekton.dev/v1beta1/TaskRun", NewTaskRunObject(getTaskRun()).GetGVK()) + assert.Equal(t, "tekton.dev/v1/TaskRun", NewTaskRunObjectV1(getTaskRun()).GetGVK()) } func TestPipelineRun_GetKindName(t *testing.T) { - assert.Equal(t, "pipelinerun", NewPipelineRunObject(getPipelineRun()).GetKindName()) + assert.Equal(t, "pipelinerun", NewPipelineRunObjectV1(getPipelineRun()).GetKindName()) } func TestTaskRun_GetKindName(t *testing.T) { - assert.Equal(t, "taskrun", NewTaskRunObject(getTaskRun()).GetKindName()) + assert.Equal(t, "taskrun", NewTaskRunObjectV1(getTaskRun()).GetKindName()) } func TestPipelineRun_GetServiceAccountName(t *testing.T) { - assert.Equal(t, "pipelinerun-sa", NewPipelineRunObject(getPipelineRun()).GetServiceAccountName()) + assert.Equal(t, "pipelinerun-sa", NewPipelineRunObjectV1(getPipelineRun()).GetServiceAccountName()) } func TestTaskRun_GetServiceAccountName(t *testing.T) { - assert.Equal(t, "taskrun-sa", NewTaskRunObject(getTaskRun()).GetServiceAccountName()) + assert.Equal(t, "taskrun-sa", NewTaskRunObjectV1(getTaskRun()).GetServiceAccountName()) } func TestNewTektonObject(t *testing.T) { tro, err := NewTektonObject(getTaskRun()) assert.NoError(t, err) - assert.IsType(t, &TaskRunObject{}, tro) + assert.IsType(t, &TaskRunObjectV1{}, tro) pro, err := NewTektonObject(getPipelineRun()) assert.NoError(t, err) - assert.IsType(t, &PipelineRunObject{}, pro) + assert.IsType(t, &PipelineRunObjectV1{}, pro) unknown, err := NewTektonObject("someting-else") assert.Nil(t, unknown) @@ -357,7 +359,7 @@ func TestNewTektonObject(t *testing.T) { } func TestPipelineRun_GetTaskRunFromTask(t *testing.T) { - pro := NewPipelineRunObject(getPipelineRun()) + pro := NewPipelineRunObjectV1(getPipelineRun()) assert.Nil(t, pro.GetTaskRunFromTask("missing")) assert.Nil(t, pro.GetTaskRunFromTask("foo-task")) @@ -369,14 +371,14 @@ func TestPipelineRun_GetTaskRunFromTask(t *testing.T) { } func TestProvenanceExists(t *testing.T) { - pro := NewPipelineRunObject(getPipelineRun()) - provenance := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + pro := NewPipelineRunObjectV1(getPipelineRun()) + provenance := &ProvenanceV1{&v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, - } - pro.Status.Provenance = &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + }} + pro.Status.Provenance = &v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, } @@ -384,14 +386,14 @@ func TestProvenanceExists(t *testing.T) { } func TestPipelineRunRemoteProvenance(t *testing.T) { - pro := NewPipelineRunObject(getPipelineRun()) - provenance := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + pro := NewPipelineRunObjectV1(getPipelineRun()) + provenance := &ProvenanceV1{&v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, - } - pro.Status.Provenance = &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + }} + pro.Status.Provenance = &v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, } @@ -399,14 +401,14 @@ func TestPipelineRunRemoteProvenance(t *testing.T) { } func TestTaskRunRemoteProvenance(t *testing.T) { - tro := NewTaskRunObject(getTaskRun()) - provenance := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + tro := NewTaskRunObjectV1(getTaskRun()) + provenance := &ProvenanceV1{&v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, - } - tro.Status.Provenance = &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + }} + tro.Status.Provenance = &v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, } @@ -414,9 +416,9 @@ func TestTaskRunRemoteProvenance(t *testing.T) { } func TestPipelineRunIsRemote(t *testing.T) { - pro := NewPipelineRunObject(getPipelineRun()) - pro.Spec.PipelineRef = &v1beta1.PipelineRef{ - ResolverRef: v1beta1.ResolverRef{ + pro := NewPipelineRunObjectV1(getPipelineRun()) + pro.Spec.PipelineRef = &v1.PipelineRef{ + ResolverRef: v1.ResolverRef{ Resolver: "Bundle", }, } @@ -424,9 +426,9 @@ func TestPipelineRunIsRemote(t *testing.T) { } func TestTaskRunIsRemote(t *testing.T) { - tro := NewTaskRunObject(getTaskRun()) - tro.Spec.TaskRef = &v1beta1.TaskRef{ - ResolverRef: v1beta1.ResolverRef{ + tro := NewTaskRunObjectV1(getTaskRun()) + tro.Spec.TaskRef = &v1.TaskRef{ + ResolverRef: v1.ResolverRef{ Resolver: "Bundle", }, } diff --git a/pkg/chains/rekor_test.go b/pkg/chains/rekor_test.go index cbc8075c93..ce53fdc1bf 100644 --- a/pkg/chains/rekor_test.go +++ b/pkg/chains/rekor_test.go @@ -18,8 +18,8 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) func TestShouldUploadTlog(t *testing.T) { @@ -77,13 +77,13 @@ func TestShouldUploadTlog(t *testing.T) { for _, test := range tests { t.Run(test.description, func(t *testing.T) { - tr := &v1beta1.TaskRun{ - ObjectMeta: v1.ObjectMeta{ + tr := &v1.TaskRun{ + ObjectMeta: metav1.ObjectMeta{ Annotations: test.annotations, }, } cfg := config.Config{Transparency: test.cfg} - trObj := objects.NewTaskRunObject(tr) + trObj := objects.NewTaskRunObjectV1(tr) got := shouldUploadTlog(cfg, trObj) if got != test.expected { t.Fatalf("got (%v) doesn't match expected (%v)", got, test.expected) diff --git a/pkg/chains/signing_test.go b/pkg/chains/signing_test.go index 4660a04c57..0a8cba2f09 100644 --- a/pkg/chains/signing_test.go +++ b/pkg/chains/signing_test.go @@ -26,7 +26,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/storage" "github.com/tektoncd/chains/pkg/config" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" fakepipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client/fake" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/util/sets" @@ -40,13 +40,13 @@ func TestSigner_Sign(t *testing.T) { // - generates payloads // - stores them in the configured systems // - marks the object as signed - tro := objects.NewTaskRunObject(&v1beta1.TaskRun{ + tro := objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", }, }) - pro := objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + pro := objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", }, @@ -186,14 +186,14 @@ func TestSigner_Sign(t *testing.T) { func TestSigner_Transparency(t *testing.T) { newTaskRun := func(name string) objects.TektonObject { - return objects.NewTaskRunObject(&v1beta1.TaskRun{ + return objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: name, }, }) } newPipelineRun := func(name string) objects.TektonObject { - return objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + return objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: name, }, @@ -202,12 +202,12 @@ func TestSigner_Transparency(t *testing.T) { setAnnotation := func(obj objects.TektonObject, key, value string) { // TODO: opportunity to add code reuse switch o := obj.GetObject().(type) { - case *v1beta1.PipelineRun: + case *v1.PipelineRun: if o.Annotations == nil { o.Annotations = make(map[string]string) } o.Annotations[key] = value - case *v1beta1.TaskRun: + case *v1.TaskRun: if o.Annotations == nil { o.Annotations = make(map[string]string) } diff --git a/pkg/chains/storage/docdb/docdb_test.go b/pkg/chains/storage/docdb/docdb_test.go index 95e0386590..9de09f119d 100644 --- a/pkg/chains/storage/docdb/docdb_test.go +++ b/pkg/chains/storage/docdb/docdb_test.go @@ -19,7 +19,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "gocloud.dev/docstore" _ "gocloud.dev/docstore/memdocstore" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -43,7 +43,7 @@ func TestBackend_StorePayload(t *testing.T) { { name: "no error", args: args{ - rawPayload: &v1beta1.TaskRun{ObjectMeta: metav1.ObjectMeta{UID: "foo"}}, + rawPayload: &v1.TaskRun{ObjectMeta: metav1.ObjectMeta{UID: "foo"}}, signature: "signature", key: "foo", }, @@ -51,7 +51,7 @@ func TestBackend_StorePayload(t *testing.T) { { name: "no error - PipelineRun", args: args{ - rawPayload: &v1beta1.PipelineRun{ObjectMeta: metav1.ObjectMeta{UID: "foo"}}, + rawPayload: &v1.PipelineRun{ObjectMeta: metav1.ObjectMeta{UID: "foo"}}, signature: "signature", key: "moo", }, diff --git a/pkg/chains/storage/gcs/gcs.go b/pkg/chains/storage/gcs/gcs.go index 1c42406dc6..d4901ffabd 100644 --- a/pkg/chains/storage/gcs/gcs.go +++ b/pkg/chains/storage/gcs/gcs.go @@ -26,7 +26,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/signing" "github.com/tektoncd/chains/pkg/chains/storage/api" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" ) const ( @@ -68,13 +68,13 @@ func NewStorageBackend(ctx context.Context, cfg config.Config) (*Backend, error) func (b *Backend) StorePayload(ctx context.Context, obj objects.TektonObject, rawPayload []byte, signature string, opts config.StorageOpts) error { logger := logging.FromContext(ctx) - if tr, isTaskRun := obj.GetObject().(*v1beta1.TaskRun); isTaskRun { + if tr, isTaskRun := obj.GetObject().(*v1.TaskRun); isTaskRun { store := &TaskRunStorer{ writer: b.writer, key: opts.ShortKey, } - // TODO(https://github.com/tektoncd/chains/issues/665) currently using deprecated v1beta1 APIs until we add full v1 support - if _, err := store.Store(ctx, &api.StoreRequest[*v1beta1.TaskRun, *in_toto.Statement]{ + // TODO(https://github.com/tektoncd/chains/issues/665) currently using deprecated v1 APIs until we add full v1 support + if _, err := store.Store(ctx, &api.StoreRequest[*v1.TaskRun, *in_toto.Statement]{ Object: obj, Artifact: tr, // We don't actually use payload - we store the raw bundle values directly. @@ -89,13 +89,13 @@ func (b *Backend) StorePayload(ctx context.Context, obj objects.TektonObject, ra logger.Errorf("error writing to GCS: %w", err) return err } - } else if pr, isPipelineRun := obj.GetObject().(*v1beta1.PipelineRun); isPipelineRun { + } else if pr, isPipelineRun := obj.GetObject().(*v1.PipelineRun); isPipelineRun { store := &PipelineRunStorer{ writer: b.writer, key: opts.ShortKey, } - // TODO(https://github.com/tektoncd/chains/issues/665) currently using deprecated v1beta1 APIs until we add full v1 support - if _, err := store.Store(ctx, &api.StoreRequest[*v1beta1.PipelineRun, *in_toto.Statement]{ + // TODO(https://github.com/tektoncd/chains/issues/665) currently using deprecated v1 APIs until we add full v1 support + if _, err := store.Store(ctx, &api.StoreRequest[*v1.PipelineRun, *in_toto.Statement]{ Object: obj, Artifact: pr, // We don't actually use payload - we store the raw bundle values directly. @@ -111,7 +111,7 @@ func (b *Backend) StorePayload(ctx context.Context, obj objects.TektonObject, ra return err } } else { - return fmt.Errorf("type %T not supported - supported types: [*v1beta1.TaskRun, *v1beta1.PipelineRun]", obj.GetObject()) + return fmt.Errorf("type %T not supported - supported types: [*v1.TaskRun, *v1.PipelineRun]", obj.GetObject()) } return nil } @@ -151,9 +151,9 @@ func (b *Backend) RetrieveSignatures(ctx context.Context, obj objects.TektonObje var object string switch t := obj.GetObject().(type) { - case *v1beta1.TaskRun: + case *v1.TaskRun: object = taskRunSigName(t, opts) - case *v1beta1.PipelineRun: + case *v1.PipelineRun: object = pipelineRunSigname(t, opts) default: return nil, fmt.Errorf("unsupported TektonObject type: %T", t) @@ -174,9 +174,9 @@ func (b *Backend) RetrievePayloads(ctx context.Context, obj objects.TektonObject var object string switch t := obj.GetObject().(type) { - case *v1beta1.TaskRun: + case *v1.TaskRun: object = taskRunPayloadName(t, opts) - case *v1beta1.PipelineRun: + case *v1.PipelineRun: object = pipelineRunPayloadName(t, opts) default: return nil, fmt.Errorf("unsupported TektonObject type: %T", t) @@ -207,29 +207,29 @@ func (b *Backend) retrieveObject(ctx context.Context, object string) (string, er } //nolint:staticcheck -func taskRunSigName(tr *v1beta1.TaskRun, opts config.StorageOpts) string { +func taskRunSigName(tr *v1.TaskRun, opts config.StorageOpts) string { return fmt.Sprintf(SignatureNameFormatTaskRun, tr.Namespace, tr.Name, opts.ShortKey) } //nolint:staticcheck -func taskRunPayloadName(tr *v1beta1.TaskRun, opts config.StorageOpts) string { +func taskRunPayloadName(tr *v1.TaskRun, opts config.StorageOpts) string { return fmt.Sprintf(PayloadNameFormatTaskRun, tr.Namespace, tr.Name, opts.ShortKey) } //nolint:staticcheck -func pipelineRunSigname(pr *v1beta1.PipelineRun, opts config.StorageOpts) string { +func pipelineRunSigname(pr *v1.PipelineRun, opts config.StorageOpts) string { return fmt.Sprintf(SignatureNameFormatPipelineRun, pr.Namespace, pr.Name, opts.ShortKey) } //nolint:staticcheck -func pipelineRunPayloadName(pr *v1beta1.PipelineRun, opts config.StorageOpts) string { +func pipelineRunPayloadName(pr *v1.PipelineRun, opts config.StorageOpts) string { return fmt.Sprintf(PayloadNameFormatPipelineRun, pr.Namespace, pr.Name, opts.ShortKey) } //nolint:staticcheck var ( - _ api.Storer[*v1beta1.TaskRun, *in_toto.Statement] = &TaskRunStorer{} - _ api.Storer[*v1beta1.PipelineRun, *in_toto.Statement] = &PipelineRunStorer{} + _ api.Storer[*v1.TaskRun, *in_toto.Statement] = &TaskRunStorer{} + _ api.Storer[*v1.PipelineRun, *in_toto.Statement] = &PipelineRunStorer{} ) // TaskRunStorer stores TaskRuns in GCS. @@ -244,7 +244,7 @@ type TaskRunStorer struct { // Store stores the TaskRun chains information in GCS // //nolint:staticcheck -func (s *TaskRunStorer) Store(ctx context.Context, req *api.StoreRequest[*v1beta1.TaskRun, *in_toto.Statement]) (*api.StoreResponse, error) { +func (s *TaskRunStorer) Store(ctx context.Context, req *api.StoreRequest[*v1.TaskRun, *in_toto.Statement]) (*api.StoreResponse, error) { tr := req.Artifact key := s.key if key == "" { @@ -268,7 +268,7 @@ type PipelineRunStorer struct { // Store stores the PipelineRun chains information in GCS // //nolint:staticcheck -func (s *PipelineRunStorer) Store(ctx context.Context, req *api.StoreRequest[*v1beta1.PipelineRun, *in_toto.Statement]) (*api.StoreResponse, error) { +func (s *PipelineRunStorer) Store(ctx context.Context, req *api.StoreRequest[*v1.PipelineRun, *in_toto.Statement]) (*api.StoreResponse, error) { pr := req.Artifact key := s.key if key == "" { diff --git a/pkg/chains/storage/gcs/gcs_test.go b/pkg/chains/storage/gcs/gcs_test.go index 1ff15f53af..5ceb110efb 100644 --- a/pkg/chains/storage/gcs/gcs_test.go +++ b/pkg/chains/storage/gcs/gcs_test.go @@ -23,7 +23,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" rtesting "knative.dev/pkg/reconciler/testing" @@ -32,8 +32,8 @@ import ( //nolint:staticcheck func TestBackend_StorePayload(t *testing.T) { type args struct { - tr *v1beta1.TaskRun - pr *v1beta1.PipelineRun + tr *v1.TaskRun + pr *v1.PipelineRun signed []byte signature string opts config.StorageOpts @@ -46,14 +46,14 @@ func TestBackend_StorePayload(t *testing.T) { { name: "no error, intoto", args: args{ - tr: &v1beta1.TaskRun{ + tr: &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "foo", Name: "bar", UID: types.UID("uid"), }, }, - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "foo", Name: "bar", @@ -68,14 +68,14 @@ func TestBackend_StorePayload(t *testing.T) { { name: "no error, tekton", args: args{ - tr: &v1beta1.TaskRun{ + tr: &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "foo", Name: "bar", UID: types.UID("uid"), }, }, - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "foo", Name: "bar", @@ -98,7 +98,7 @@ func TestBackend_StorePayload(t *testing.T) { reader: mockGcsRead, cfg: config.Config{Storage: config.StorageConfigs{GCS: config.GCSStorageConfig{Bucket: "foo"}}}, } - trObj := objects.NewTaskRunObject(tt.args.tr) + trObj := objects.NewTaskRunObjectV1(tt.args.tr) if err := b.StorePayload(ctx, trObj, tt.args.signed, tt.args.signature, tt.args.opts); (err != nil) != tt.wantErr { t.Errorf("Backend.StorePayload() error = %v, wantErr %v", err, tt.wantErr) } @@ -121,7 +121,7 @@ func TestBackend_StorePayload(t *testing.T) { t.Errorf("wrong signature, expected %s, got %s", tt.args.signed, gotPayload[objectPayload]) } - prObj := objects.NewPipelineRunObject(tt.args.pr) + prObj := objects.NewPipelineRunObjectV1(tt.args.pr) if err := b.StorePayload(ctx, prObj, tt.args.signed, tt.args.signature, tt.args.opts); (err != nil) != tt.wantErr { t.Errorf("Backend.StorePayload() error = %v, wantErr %v", err, tt.wantErr) } diff --git a/pkg/chains/storage/grafeas/grafeas_test.go b/pkg/chains/storage/grafeas/grafeas_test.go index 295ceacc50..c68867e9df 100644 --- a/pkg/chains/storage/grafeas/grafeas_test.go +++ b/pkg/chains/storage/grafeas/grafeas_test.go @@ -29,7 +29,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats" "github.com/tektoncd/chains/pkg/chains/formats/slsa/extract" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/credentials/insecure" @@ -66,17 +66,17 @@ const ( var ( // clone taskrun // -------------- - cloneTaskRun = &v1beta1.TaskRun{ + cloneTaskRun = &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "default", Name: "git-clone", UID: types.UID("uid-task1"), }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: "CHAINS-GIT_COMMIT", Value: *v1beta1.NewStructuredValues(commitSHA)}, - {Name: "CHAINS-GIT_URL", Value: *v1beta1.NewStructuredValues(repoURL)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + {Name: "CHAINS-GIT_COMMIT", Value: *v1.NewStructuredValues(commitSHA)}, + {Name: "CHAINS-GIT_URL", Value: *v1.NewStructuredValues(repoURL)}, }, }, }, @@ -100,19 +100,19 @@ var ( artifactIdentifier2 = fmt.Sprintf("%s@sha256:%s", artifactURL2, artifactDigest2) // artifact build taskrun - buildTaskRun = &v1beta1.TaskRun{ + buildTaskRun = &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "default", Name: "artifact-build", UID: types.UID("uid-task2"), }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: "IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:" + artifactDigest1)}, - {Name: "IMAGE_URL", Value: *v1beta1.NewStructuredValues(artifactURL1)}, - {Name: "x_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:" + artifactDigest2)}, - {Name: "x_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues(artifactURL2)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + {Name: "IMAGE_DIGEST", Value: *v1.NewStructuredValues("sha256:" + artifactDigest1)}, + {Name: "IMAGE_URL", Value: *v1.NewStructuredValues(artifactURL1)}, + {Name: "x_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:" + artifactDigest2)}, + {Name: "x_ARTIFACT_URI", Value: *v1.NewStructuredValues(artifactURL2)}, }, }, }, @@ -139,23 +139,23 @@ var ( } // ci pipelinerun - ciPipeline = &v1beta1.PipelineRun{ + ciPipeline = &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "default", Name: "ci-pipeline", UID: types.UID("uid-pipeline"), }, - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineResults: []v1beta1.PipelineRunResult{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Results: []v1.PipelineRunResult{ // the results from task 1 - clone - {Name: "CHAINS-GIT_COMMIT", Value: *v1beta1.NewStructuredValues(commitSHA)}, - {Name: "CHAINS-GIT_URL", Value: *v1beta1.NewStructuredValues(repoURL)}, + {Name: "CHAINS-GIT_COMMIT", Value: *v1.NewStructuredValues(commitSHA)}, + {Name: "CHAINS-GIT_URL", Value: *v1.NewStructuredValues(repoURL)}, // the results from task 2 - build - {Name: "IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:" + artifactDigest1)}, - {Name: "IMAGE_URL", Value: *v1beta1.NewStructuredValues(artifactURL1)}, - {Name: "x_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:" + artifactDigest2)}, - {Name: "x_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues(artifactURL2)}, + {Name: "IMAGE_DIGEST", Value: *v1.NewStructuredValues("sha256:" + artifactDigest1)}, + {Name: "IMAGE_URL", Value: *v1.NewStructuredValues(artifactURL1)}, + {Name: "x_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:" + artifactDigest2)}, + {Name: "x_ARTIFACT_URI", Value: *v1.NewStructuredValues(artifactURL2)}, }, }, }, @@ -261,7 +261,7 @@ func TestGrafeasBackend_StoreAndRetrieve(t *testing.T) { { name: "intoto for clone taskrun, no error, no occurrences created because no artifacts were built.", args: args{ - runObject: &objects.TaskRunObject{ + runObject: &objects.TaskRunObjectV1{ TaskRun: cloneTaskRun, }, payload: getRawPayload(t, cloneTaskRunProvenance), @@ -274,7 +274,7 @@ func TestGrafeasBackend_StoreAndRetrieve(t *testing.T) { { name: "intoto for build taskrun, no error, 2 BUILD occurrences should be created for the 2 artifacts generated.", args: args{ - runObject: &objects.TaskRunObject{ + runObject: &objects.TaskRunObjectV1{ TaskRun: buildTaskRun, }, payload: getRawPayload(t, buildTaskRunProvenance), @@ -287,7 +287,7 @@ func TestGrafeasBackend_StoreAndRetrieve(t *testing.T) { { name: "simplesigning for the build taskrun, no error, 1 ATTESTATION occurrence should be created for the artifact specified in storageopts.key", args: args{ - runObject: &objects.TaskRunObject{ + runObject: &objects.TaskRunObjectV1{ TaskRun: buildTaskRun, }, payload: []byte("attestation payload"), @@ -300,7 +300,7 @@ func TestGrafeasBackend_StoreAndRetrieve(t *testing.T) { { name: "intoto for the ci pipeline, no error, 2 occurences should be created for the pipelinerun for the 2 artifact generated.", args: args{ - runObject: &objects.PipelineRunObject{ + runObject: &objects.PipelineRunObjectV1{ PipelineRun: ciPipeline, }, payload: getRawPayload(t, ciPipelineRunProvenance), @@ -313,7 +313,7 @@ func TestGrafeasBackend_StoreAndRetrieve(t *testing.T) { { name: "tekton format for a taskrun, error, only simplesigning and intoto are supported", args: args{ - runObject: &objects.TaskRunObject{ + runObject: &objects.TaskRunObjectV1{ TaskRun: buildTaskRun, }, payload: []byte("foo"), @@ -584,7 +584,7 @@ func setupConnection() (*grpc.ClientConn, pb.GrafeasClient, error) { return conn, client, nil } -// --------------------- Mocked GrafeasV1Beta1Server interface ----------------- +// --------------------- Mocked Grafeasv1Server interface ----------------- type mockGrafeasServer struct { // Embed for forward compatibility. // Tests will keep working if more methods are added in the future. diff --git a/pkg/chains/storage/oci/oci_test.go b/pkg/chains/storage/oci/oci_test.go index 36d5a37a70..eedd13188b 100644 --- a/pkg/chains/storage/oci/oci_test.go +++ b/pkg/chains/storage/oci/oci_test.go @@ -33,7 +33,7 @@ import ( "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" "github.com/sigstore/sigstore/pkg/signature/payload" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" remotetest "github.com/tektoncd/pipeline/test" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/client-go/kubernetes" @@ -43,13 +43,13 @@ import ( const namespace = "oci-test" var ( - tr = &v1beta1.TaskRun{ + tr = &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: namespace, }, } - pr = &v1beta1.PipelineRun{ + pr = &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: namespace, @@ -116,7 +116,7 @@ func TestBackend_StorePayload(t *testing.T) { }{{ name: "simplesigning payload", fields: fields{ - object: objects.NewTaskRunObject(tr), + object: objects.NewTaskRunObjectV1(tr), }, args: args{ payload: simple, @@ -129,7 +129,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "into-to payload", fields: fields{ - object: objects.NewTaskRunObject(tr), + object: objects.NewTaskRunObjectV1(tr), }, args: args{ payload: intotoStatement, @@ -142,7 +142,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "no subject", fields: fields{ - object: objects.NewTaskRunObject(tr), + object: objects.NewTaskRunObjectV1(tr), }, args: args{ payload: in_toto.Statement{}, @@ -155,7 +155,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "simplesigning payload", fields: fields{ - object: objects.NewPipelineRunObject(pr), + object: objects.NewPipelineRunObjectV1(pr), }, args: args{ payload: simple, @@ -168,7 +168,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "into-to payload", fields: fields{ - object: objects.NewPipelineRunObject(pr), + object: objects.NewPipelineRunObjectV1(pr), }, args: args{ payload: intotoStatement, @@ -181,7 +181,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "in-toto-and-simple-payload", fields: fields{ - object: objects.NewTaskRunObject(tr), + object: objects.NewTaskRunObjectV1(tr), }, args: args{ payload: simple, @@ -194,7 +194,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "tekton-and-simple-payload", fields: fields{ - object: objects.NewTaskRunObject(tr), + object: objects.NewTaskRunObjectV1(tr), }, args: args{ payload: simple, @@ -207,7 +207,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "no subject", fields: fields{ - object: objects.NewPipelineRunObject(pr), + object: objects.NewPipelineRunObjectV1(pr), }, args: args{ payload: in_toto.Statement{}, diff --git a/pkg/chains/storage/pubsub/pubsub_test.go b/pkg/chains/storage/pubsub/pubsub_test.go index 7e76202c3b..17e86cfae5 100644 --- a/pkg/chains/storage/pubsub/pubsub_test.go +++ b/pkg/chains/storage/pubsub/pubsub_test.go @@ -23,9 +23,9 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "gocloud.dev/pubsub" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" rtesting "knative.dev/pkg/reconciler/testing" ) @@ -36,7 +36,7 @@ func TestBackend_StorePayload(t *testing.T) { logger := logtesting.TestLogger(t) type fields struct { - tr *v1beta1.TaskRun + tr *v1.TaskRun cfg config.Config } type args struct { @@ -53,8 +53,8 @@ func TestBackend_StorePayload(t *testing.T) { { name: "no subject", fields: fields{ - tr: &v1beta1.TaskRun{ - ObjectMeta: v1.ObjectMeta{ + tr: &v1.TaskRun{ + ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "bar", }, @@ -109,7 +109,7 @@ func TestBackend_StorePayload(t *testing.T) { } }() - trObj := objects.NewTaskRunObject(tt.fields.tr) + trObj := objects.NewTaskRunObjectV1(tt.fields.tr) // Store the payload. if err := b.StorePayload(ctx, trObj, tt.args.rawPayload, tt.args.signature, tt.args.storageOpts); (err != nil) != tt.wantErr { t.Errorf("Backend.StorePayload() error = %v, wantErr %v", err, tt.wantErr) diff --git a/pkg/chains/storage/tekton/tekton_test.go b/pkg/chains/storage/tekton/tekton_test.go index 679d55439b..0a04023297 100644 --- a/pkg/chains/storage/tekton/tekton_test.go +++ b/pkg/chains/storage/tekton/tekton_test.go @@ -21,7 +21,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" fakepipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client/fake" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" rtesting "knative.dev/pkg/reconciler/testing" @@ -42,15 +42,15 @@ func TestBackend_StorePayload(t *testing.T) { A: "foo", B: 3, }, - object: objects.NewTaskRunObject(&v1beta1.TaskRun{ + object: objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "bar", }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: "IMAGE_URL", Value: *v1beta1.NewStructuredValues("mockImage")}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + {Name: "IMAGE_URL", Value: *v1.NewStructuredValues("mockImage")}, }, }, }, @@ -62,15 +62,15 @@ func TestBackend_StorePayload(t *testing.T) { A: "foo", B: 3, }, - object: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + object: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "bar", }, - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineResults: []v1beta1.PipelineRunResult{ - {Name: "IMAGE_URL", Value: *v1beta1.NewStructuredValues("mockImage")}, + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Results: []v1.PipelineRunResult{ + {Name: "IMAGE_URL", Value: *v1.NewStructuredValues("mockImage")}, }, }, }, diff --git a/pkg/chains/verifier.go b/pkg/chains/verifier.go index bd0964567a..e21125a557 100644 --- a/pkg/chains/verifier.go +++ b/pkg/chains/verifier.go @@ -21,7 +21,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/chains/storage" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" versioned "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/client-go/kubernetes" @@ -29,7 +29,7 @@ import ( ) type Verifier interface { - VerifyTaskRun(ctx context.Context, tr *v1beta1.TaskRun) error + VerifyTaskRun(ctx context.Context, tr *v1.TaskRun) error } type TaskRunVerifier struct { @@ -38,7 +38,7 @@ type TaskRunVerifier struct { SecretPath string } -func (tv *TaskRunVerifier) VerifyTaskRun(ctx context.Context, tr *v1beta1.TaskRun) error { +func (tv *TaskRunVerifier) VerifyTaskRun(ctx context.Context, tr *v1.TaskRun) error { // Get all the things we might need (storage backends, signers and formatters) cfg := *config.FromContext(ctx) logger := logging.FromContext(ctx) @@ -50,7 +50,7 @@ func (tv *TaskRunVerifier) VerifyTaskRun(ctx context.Context, tr *v1beta1.TaskRu &artifacts.OCIArtifact{}, } - trObj := objects.NewTaskRunObject(tr) + trObj := objects.NewTaskRunObjectV1(tr) // Storage allBackends, err := storage.InitializeBackends(ctx, tv.Pipelineclientset, tv.KubeClient, cfg) diff --git a/pkg/config/config.go b/pkg/config/config.go index 15c49e878e..087ed49365 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -266,12 +266,12 @@ func NewConfigFromMap(data map[string]string) (*Config, error) { if err := cm.Parse(data, // Artifact-specific configs // TaskRuns - asString(taskrunFormatKey, &cfg.Artifacts.TaskRuns.Format, "in-toto", "slsa/v1", "slsa/v2alpha1", "slsa/v2alpha2"), + asString(taskrunFormatKey, &cfg.Artifacts.TaskRuns.Format, "in-toto", "slsa/v1", "slsa/v2alpha1", "slsa/v2alpha2", "slsa/v2alpha3"), asStringSet(taskrunStorageKey, &cfg.Artifacts.TaskRuns.StorageBackend, sets.New[string]("tekton", "oci", "gcs", "docdb", "grafeas", "kafka")), asString(taskrunSignerKey, &cfg.Artifacts.TaskRuns.Signer, "x509", "kms"), // PipelineRuns - asString(pipelinerunFormatKey, &cfg.Artifacts.PipelineRuns.Format, "in-toto", "slsa/v1", "slsa/v2alpha2"), + asString(pipelinerunFormatKey, &cfg.Artifacts.PipelineRuns.Format, "in-toto", "slsa/v1", "slsa/v2alpha2", "slsa/v2alpha3"), asStringSet(pipelinerunStorageKey, &cfg.Artifacts.PipelineRuns.StorageBackend, sets.New[string]("tekton", "oci", "docdb", "grafeas")), asString(pipelinerunSignerKey, &cfg.Artifacts.PipelineRuns.Signer, "x509", "kms"), asBool(pipelinerunEnableDeepInspectionKey, &cfg.Artifacts.PipelineRuns.DeepInspectionEnabled), diff --git a/pkg/internal/objectloader/objectloader.go b/pkg/internal/objectloader/objectloader.go index b2cb90f9a7..cc0c6e4847 100644 --- a/pkg/internal/objectloader/objectloader.go +++ b/pkg/internal/objectloader/objectloader.go @@ -20,27 +20,52 @@ import ( "encoding/json" "os" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" ) -func TaskRunFromFile(f string) (*v1beta1.TaskRun, error) { +func TaskRunFromFile(f string) (*v1.TaskRun, error) { contents, err := os.ReadFile(f) if err != nil { return nil, err } - var tr v1beta1.TaskRun + var tr v1.TaskRun if err := json.Unmarshal(contents, &tr); err != nil { return nil, err } return &tr, nil } -func PipelineRunFromFile(f string) (*v1beta1.PipelineRun, error) { +func PipelineRunFromFile(f string) (*v1.PipelineRun, error) { contents, err := os.ReadFile(f) if err != nil { return nil, err } - var pr v1beta1.PipelineRun + var pr v1.PipelineRun + if err := json.Unmarshal(contents, &pr); err != nil { + return nil, err + } + return &pr, nil +} + +func TaskRunV1Beta1FromFile(f string) (*v1beta1.TaskRun, error) { //nolint:staticcheck + contents, err := os.ReadFile(f) + if err != nil { + return nil, err + } + var tr v1beta1.TaskRun //nolint:staticcheck + if err := json.Unmarshal(contents, &tr); err != nil { + return nil, err + } + return &tr, nil +} + +func PipelineRunV1Beta1FromFile(f string) (*v1beta1.PipelineRun, error) { //nolint:staticcheck + contents, err := os.ReadFile(f) + if err != nil { + return nil, err + } + var pr v1beta1.PipelineRun //nolint:staticcheck if err := json.Unmarshal(contents, &pr); err != nil { return nil, err } diff --git a/pkg/reconciler/pipelinerun/controller.go b/pkg/reconciler/pipelinerun/controller.go index bc3d7ad47e..fc9a9c19b3 100644 --- a/pkg/reconciler/pipelinerun/controller.go +++ b/pkg/reconciler/pipelinerun/controller.go @@ -19,11 +19,11 @@ import ( "github.com/tektoncd/chains/pkg/chains" "github.com/tektoncd/chains/pkg/chains/storage" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" pipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client" - pipelineruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun" - taskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun" - pipelinerunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun" + pipelineruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun" + taskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/taskrun" + pipelinerunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun" "k8s.io/client-go/tools/cache" kubeclient "knative.dev/pkg/client/injection/kube/client" "knative.dev/pkg/configmap" @@ -33,7 +33,7 @@ import ( _ "github.com/tektoncd/chains/pkg/chains/formats/all" ) -func NewController(ctx context.Context, cmw configmap.Watcher) *controller.Impl { +func NewControllerV1(ctx context.Context, cmw configmap.Watcher) *controller.Impl { logger := logging.FromContext(ctx) pipelineRunInformer := pipelineruninformer.Get(ctx) taskRunInformer := taskruninformer.Get(ctx) @@ -77,12 +77,66 @@ func NewController(ctx context.Context, cmw configmap.Watcher) *controller.Impl c.Tracker = impl.Tracker - pipelineRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) + pipelineRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) //nolint:errcheck - taskRunInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ - FilterFunc: controller.FilterController(&v1beta1.PipelineRun{}), + taskRunInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ //nolint:errcheck + FilterFunc: controller.FilterController(&v1.PipelineRun{}), Handler: controller.HandleAll(impl.EnqueueControllerOf), }) return impl } + +// func NewControllerV1Beta1(ctx context.Context, cmw configmap.Watcher) *controller.Impl { +// logger := logging.FromContext(ctx) +// pipelineRunInformer := v1beta1pipelineruninformer.Get(ctx) +// taskRunInformer := v1beta1taskruninformer.Get(ctx) + +// kubeClient := kubeclient.Get(ctx) +// pipelineClient := pipelineclient.Get(ctx) + +// psSigner := &chains.ObjectSigner{ +// SecretPath: SecretPath, +// Pipelineclientset: pipelineClient, +// } + +// c := &ReconcilerV1Beta1{ +// PipelineRunSigner: psSigner, +// Pipelineclientset: pipelineClient, +// TaskRunLister: taskRunInformer.Lister(), +// } +// impl := v1beta1pipelinerunreconciler.NewImpl(ctx, c, func(impl *controller.Impl) controller.Options { +// cfgStore := config.NewConfigStore(logger, func(name string, value interface{}) { +// // get updated config +// cfg := *value.(*config.Config) + +// // get all backends for storing provenance +// backends, err := storage.InitializeBackends(ctx, pipelineClient, kubeClient, cfg) +// if err != nil { +// logger.Error(err) +// } +// psSigner.Backends = backends +// }) + +// // setup watches for the config names provided by client +// cfgStore.WatchConfigs(cmw) + +// return controller.Options{ +// // The chains reconciler shouldn't mutate the pipelinerun's status. +// SkipStatusUpdates: true, +// ConfigStore: cfgStore, +// FinalizerName: "chains.tekton.dev/pipelinerun", // TODO: unique name required? +// } +// }) + +// c.Tracker = impl.Tracker + +// pipelineRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) //nolint:errcheck + +// taskRunInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ //nolint:errcheck +// FilterFunc: controller.FilterController(&v1beta1.PipelineRun{}), //nolint:staticcheck +// Handler: controller.HandleAll(impl.EnqueueControllerOf), +// }) + +// return impl +// } diff --git a/pkg/reconciler/pipelinerun/pipelinerun.go b/pkg/reconciler/pipelinerun/pipelinerun.go index 069aa80d36..11ea60509b 100644 --- a/pkg/reconciler/pipelinerun/pipelinerun.go +++ b/pkg/reconciler/pipelinerun/pipelinerun.go @@ -19,10 +19,10 @@ import ( signing "github.com/tektoncd/chains/pkg/chains" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" - pipelinerunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun" - listers "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1beta1" + pipelinerunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun" + listers "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1" "k8s.io/apimachinery/pkg/api/errors" "knative.dev/pkg/logging" pkgreconciler "knative.dev/pkg/reconciler" @@ -47,7 +47,7 @@ var _ pipelinerunreconciler.Finalizer = (*Reconciler)(nil) // ReconcileKind handles a changed or created PipelineRun. // This is the main entrypoint for chains business logic. -func (r *Reconciler) ReconcileKind(ctx context.Context, pr *v1beta1.PipelineRun) pkgreconciler.Event { +func (r *Reconciler) ReconcileKind(ctx context.Context, pr *v1.PipelineRun) pkgreconciler.Event { log := logging.FromContext(ctx).With("pipelinerun", fmt.Sprintf("%s/%s", pr.Namespace, pr.Name)) return r.FinalizeKind(logging.WithLogger(ctx, log), pr) } @@ -56,13 +56,13 @@ func (r *Reconciler) ReconcileKind(ctx context.Context, pr *v1beta1.PipelineRun) // We utilize finalizers to ensure that we get a crack at signing every pipelinerun // that we see flowing through the system. If we don't add a finalizer, it could // get cleaned up before we see the final state and sign it. -func (r *Reconciler) FinalizeKind(ctx context.Context, pr *v1beta1.PipelineRun) pkgreconciler.Event { +func (r *Reconciler) FinalizeKind(ctx context.Context, pr *v1.PipelineRun) pkgreconciler.Event { // Check to make sure the PipelineRun is finished. if !pr.IsDone() { logging.FromContext(ctx).Infof("pipelinerun is still running") return nil } - pro := objects.NewPipelineRunObject(pr) + pro := objects.NewPipelineRunObjectV1(pr) // Check to see if it has already been signed. if signing.Reconciled(ctx, r.Pipelineclientset, pro) { @@ -72,21 +72,8 @@ func (r *Reconciler) FinalizeKind(ctx context.Context, pr *v1beta1.PipelineRun) // Get TaskRun names depending on whether embeddedstatus feature is set or not var trs []string - if len(pr.Status.ChildReferences) == 0 || len(pr.Status.TaskRuns) > 0 || len(pr.Status.Runs) > 0 { //nolint:all //incompatible with pipelines v0.45 - for trName, ptrs := range pr.Status.TaskRuns { //nolint:all //incompatible with pipelines v0.45 - // TaskRuns within a PipelineRun may not have been finalized yet if the PipelineRun timeout - // has exceeded. Wait to process the PipelineRun on the next update, see - // https://github.com/tektoncd/pipeline/issues/4916 - if ptrs.Status == nil || ptrs.Status.CompletionTime == nil { - logging.FromContext(ctx).Infof("taskrun %s within pipelinerun is not yet finalized: embedded status is not complete", trName) - return nil - } - trs = append(trs, trName) - } - } else { - for _, cr := range pr.Status.ChildReferences { - trs = append(trs, cr.Name) - } + for _, cr := range pr.Status.ChildReferences { + trs = append(trs, cr.Name) } // Signing both taskruns and pipelineruns causes a race condition when using oci storage @@ -111,7 +98,7 @@ func (r *Reconciler) FinalizeKind(ctx context.Context, pr *v1beta1.PipelineRun) logging.FromContext(ctx).Infof("taskrun %s within pipelinerun is not yet finalized: status is not complete", name) return r.trackTaskRun(tr, pr) } - reconciled := signing.Reconciled(ctx, r.Pipelineclientset, objects.NewTaskRunObject(tr)) + reconciled := signing.Reconciled(ctx, r.Pipelineclientset, objects.NewTaskRunObjectV1(tr)) if !reconciled { logging.FromContext(ctx).Infof("taskrun %s within pipelinerun is not yet reconciled", name) return r.trackTaskRun(tr, pr) @@ -125,9 +112,9 @@ func (r *Reconciler) FinalizeKind(ctx context.Context, pr *v1beta1.PipelineRun) return nil } -func (r *Reconciler) trackTaskRun(tr *v1beta1.TaskRun, pr *v1beta1.PipelineRun) error { +func (r *Reconciler) trackTaskRun(tr *v1.TaskRun, pr *v1.PipelineRun) error { ref := tracker.Reference{ - APIVersion: "tekton.dev/v1beta1", + APIVersion: "tekton.dev/v1", Kind: "TaskRun", Namespace: tr.Namespace, Name: tr.Name, diff --git a/pkg/reconciler/pipelinerun/pipelinerun_test.go b/pkg/reconciler/pipelinerun/pipelinerun_test.go index be6bb0aa7d..efd8a8711a 100644 --- a/pkg/reconciler/pipelinerun/pipelinerun_test.go +++ b/pkg/reconciler/pipelinerun/pipelinerun_test.go @@ -23,14 +23,13 @@ import ( "github.com/tektoncd/chains/pkg/config" "github.com/tektoncd/chains/pkg/internal/mocksigner" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - informers "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + informers "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1" fakepipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client/fake" - fakepipelineruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/fake" - faketaskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake" + fakepipelineruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun/fake" + faketaskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/taskrun/fake" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "knative.dev/pkg/apis" duckv1 "knative.dev/pkg/apis/duck/v1" @@ -46,17 +45,17 @@ func TestReconciler_Reconcile(t *testing.T) { tests := []struct { name string key string - pipelineRuns []*v1beta1.PipelineRun + pipelineRuns []*v1.PipelineRun }{ { name: "no pipelineRuns", key: "foo/bar", - pipelineRuns: []*v1beta1.PipelineRun{}, + pipelineRuns: []*v1.PipelineRun{}, }, { name: "found PipelineRun", key: "foo/bar", - pipelineRuns: []*v1beta1.PipelineRun{ + pipelineRuns: []*v1.PipelineRun{ { ObjectMeta: metav1.ObjectMeta{ Name: "bar", @@ -78,7 +77,7 @@ func TestReconciler_Reconcile(t *testing.T) { Name: config.ChainsConfig, }, }) - ctl := NewController(ctx, configMapWatcher) + ctl := NewControllerV1(ctx, configMapWatcher) if la, ok := ctl.Reconciler.(pkgreconciler.LeaderAware); ok { if err := la.Promote(pkgreconciler.UniversalBucket(), func(pkgreconciler.Bucket, types.NamespacedName) {}); err != nil { @@ -93,13 +92,13 @@ func TestReconciler_Reconcile(t *testing.T) { } } -func setupData(ctx context.Context, t *testing.T, prs []*v1beta1.PipelineRun) informers.PipelineRunInformer { +func setupData(ctx context.Context, t *testing.T, prs []*v1.PipelineRun) informers.PipelineRunInformer { pri := fakepipelineruninformer.Get(ctx) c := fakepipelineclient.Get(ctx) for _, pa := range prs { pa := pa.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.TektonV1beta1().PipelineRuns(pa.Namespace).Create(ctx, pa, metav1.CreateOptions{}); err != nil { + if _, err := c.TektonV1().PipelineRuns(pa.Namespace).Create(ctx, pa, metav1.CreateOptions{}); err != nil { t.Fatal(err) } } @@ -111,20 +110,20 @@ func TestReconciler_handlePipelineRun(t *testing.T) { tests := []struct { name string - pr *v1beta1.PipelineRun - taskruns []*v1beta1.TaskRun + pr *v1.PipelineRun + taskruns []*v1.TaskRun shouldSign bool wantErr bool }{ { name: "complete, already signed", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{signing.ChainsAnnotation: "true"}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }}, @@ -133,13 +132,13 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, { name: "complete, not already signed", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }}, @@ -148,13 +147,13 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, { name: "not complete, not already signed", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{}, }}, @@ -163,31 +162,19 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, { name: "taskruns completed with full taskrun status", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - TaskRuns: map[string]*v1beta1.PipelineRunTaskRunStatus{ - "taskrun1": { - PipelineTaskName: "task1", - Status: &v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - CompletionTime: &metav1.Time{}, - }, - }, - }, - }, - }, }, }, - taskruns: []*v1beta1.TaskRun{ + taskruns: []*v1.TaskRun{ { ObjectMeta: metav1.ObjectMeta{ Name: "taskrun1", @@ -196,9 +183,9 @@ func TestReconciler_handlePipelineRun(t *testing.T) { "chains.tekton.dev/signed": "true", }, }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, }, }, }, @@ -208,19 +195,19 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, { name: "taskruns completed with child references", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - ChildReferences: []v1beta1.ChildStatusReference{ - v1beta1.ChildStatusReference{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + ChildReferences: []v1.ChildStatusReference{ + { Name: "taskrun1", PipelineTaskName: "task1", }, @@ -228,7 +215,7 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, }, }, - taskruns: []*v1beta1.TaskRun{ + taskruns: []*v1.TaskRun{ { ObjectMeta: metav1.ObjectMeta{ Name: "taskrun1", @@ -237,9 +224,9 @@ func TestReconciler_handlePipelineRun(t *testing.T) { "chains.tekton.dev/signed": "true", }, }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, }, }, }, @@ -247,58 +234,21 @@ func TestReconciler_handlePipelineRun(t *testing.T) { shouldSign: true, wantErr: false, }, - { - name: "taskruns not yet completed", - pr: &v1beta1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "pipelinerun", - Namespace: "default", - Annotations: map[string]string{}, - }, - Status: v1beta1.PipelineRunStatus{ - Status: duckv1.Status{ - Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, - }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - TaskRuns: map[string]*v1beta1.PipelineRunTaskRunStatus{ - "taskrun1": { - PipelineTaskName: "task1", - Status: &v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - CompletionTime: &metav1.Time{}, - }, - }, - }, - }, - }, - }, - }, - taskruns: []*v1beta1.TaskRun{ - { - ObjectMeta: metav1.ObjectMeta{ - Name: "taskrun1", - Namespace: "default", - }, - }, - }, - shouldSign: false, - wantErr: true, - }, { name: "taskruns not yet completed with child references", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - ChildReferences: []v1beta1.ChildStatusReference{ - v1beta1.ChildStatusReference{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + ChildReferences: []v1.ChildStatusReference{ + { Name: "taskrun1", PipelineTaskName: "task1", }, @@ -306,7 +256,7 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, }, }, - taskruns: []*v1beta1.TaskRun{ + taskruns: []*v1.TaskRun{ { ObjectMeta: metav1.ObjectMeta{ Name: "taskrun1", @@ -317,50 +267,21 @@ func TestReconciler_handlePipelineRun(t *testing.T) { shouldSign: false, wantErr: true, }, - { - name: "missing taskrun", - pr: &v1beta1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "pipelinerun", - Namespace: "default", - Annotations: map[string]string{}, - }, - Status: v1beta1.PipelineRunStatus{ - Status: duckv1.Status{ - Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, - }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - TaskRuns: map[string]*v1beta1.PipelineRunTaskRunStatus{ - "taskrun1": { - PipelineTaskName: "task1", - Status: &v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - CompletionTime: &metav1.Time{}, - }, - }, - }, - }, - }, - }, - }, - shouldSign: false, - wantErr: false, - }, { name: "missing taskrun with child references", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - ChildReferences: []v1beta1.ChildStatusReference{ - v1beta1.ChildStatusReference{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + ChildReferences: []v1.ChildStatusReference{ + { Name: "taskrun1", PipelineTaskName: "task1", }, @@ -377,7 +298,7 @@ func TestReconciler_handlePipelineRun(t *testing.T) { signer := &mocksigner.Signer{} ctx, _ := rtesting.SetupFakeContext(t) c := fakepipelineclient.Get(ctx) - tekton.CreateObject(t, ctx, c, objects.NewPipelineRunObject(tt.pr)) + tekton.CreateObject(t, ctx, c, objects.NewPipelineRunObjectV1(tt.pr)) tri := faketaskruninformer.Get(ctx) r := &Reconciler{ @@ -397,7 +318,7 @@ func TestReconciler_handlePipelineRun(t *testing.T) { t.Fatalf("TaskRun not added to informer: %v, namespace: %v", err, tt.pr.Namespace) } } - + ctx = config.ToContext(ctx, &config.Config{}) if err := r.ReconcileKind(ctx, tt.pr); err != nil && !tt.wantErr { t.Errorf("Reconciler.handlePipelineRun() error = %v", err) } diff --git a/pkg/reconciler/taskrun/controller.go b/pkg/reconciler/taskrun/controller.go index dbbb1cdab9..2f4fd47abe 100644 --- a/pkg/reconciler/taskrun/controller.go +++ b/pkg/reconciler/taskrun/controller.go @@ -20,8 +20,8 @@ import ( "github.com/tektoncd/chains/pkg/chains/storage" "github.com/tektoncd/chains/pkg/config" pipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client" - taskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun" - taskrunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/taskrun" + taskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/taskrun" + taskrunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun" kubeclient "knative.dev/pkg/client/injection/kube/client" "knative.dev/pkg/configmap" "knative.dev/pkg/controller" @@ -30,7 +30,7 @@ import ( _ "github.com/tektoncd/chains/pkg/chains/formats/all" ) -func NewController(ctx context.Context, cmw configmap.Watcher) *controller.Impl { +func NewControllerV1(ctx context.Context, cmw configmap.Watcher) *controller.Impl { logger := logging.FromContext(ctx) taskRunInformer := taskruninformer.Get(ctx) @@ -42,7 +42,7 @@ func NewController(ctx context.Context, cmw configmap.Watcher) *controller.Impl Pipelineclientset: pipelineClient, } - c := &Reconciler{ + c := &ReconcilerV1{ TaskRunSigner: tsSigner, Pipelineclientset: pipelineClient, } @@ -70,7 +70,52 @@ func NewController(ctx context.Context, cmw configmap.Watcher) *controller.Impl } }) - taskRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) + taskRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) //nolint:errcheck return impl } + +// func NewControllerV1Beta1(ctx context.Context, cmw configmap.Watcher) *controller.Impl { +// logger := logging.FromContext(ctx) +// taskRunInformer := v1beta1taskruninformer.Get(ctx) + +// kubeClient := kubeclient.Get(ctx) +// pipelineClient := pipelineclient.Get(ctx) + +// tsSigner := &chains.ObjectSigner{ +// SecretPath: SecretPath, +// Pipelineclientset: pipelineClient, +// } + +// c := &ReconcilerV1Beta1{ +// TaskRunSigner: tsSigner, +// Pipelineclientset: pipelineClient, +// } +// impl := v1beta1taskrunreconciler.NewImpl(ctx, c, func(impl *controller.Impl) controller.Options { +// cfgStore := config.NewConfigStore(logger, func(name string, value interface{}) { +// // get updated config +// cfg := *value.(*config.Config) + +// // get all backends for storing provenance +// backends, err := storage.InitializeBackends(ctx, pipelineClient, kubeClient, cfg) +// if err != nil { +// logger.Error(err) +// } +// tsSigner.Backends = backends +// }) + +// // setup watches for the config names provided by client +// cfgStore.WatchConfigs(cmw) + +// return controller.Options{ +// // The chains reconciler shouldn't mutate the taskrun's status. +// SkipStatusUpdates: true, +// ConfigStore: cfgStore, +// FinalizerName: "chains.tekton.dev", +// } +// }) + +// taskRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) //nolint:errcheck + +// return impl +// } diff --git a/pkg/reconciler/taskrun/taskrun.go b/pkg/reconciler/taskrun/taskrun.go index 18fa87d15b..9186918707 100644 --- a/pkg/reconciler/taskrun/taskrun.go +++ b/pkg/reconciler/taskrun/taskrun.go @@ -18,9 +18,9 @@ import ( signing "github.com/tektoncd/chains/pkg/chains" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" - taskrunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/taskrun" + taskrunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun" "knative.dev/pkg/logging" pkgreconciler "knative.dev/pkg/reconciler" ) @@ -30,18 +30,18 @@ const ( SecretPath = "/etc/signing-secrets" ) -type Reconciler struct { +type ReconcilerV1 struct { TaskRunSigner signing.Signer Pipelineclientset versioned.Interface } // Check that our Reconciler implements taskrunreconciler.Interface and taskrunreconciler.Finalizer -var _ taskrunreconciler.Interface = (*Reconciler)(nil) -var _ taskrunreconciler.Finalizer = (*Reconciler)(nil) +var _ taskrunreconciler.Interface = (*ReconcilerV1)(nil) +var _ taskrunreconciler.Finalizer = (*ReconcilerV1)(nil) // ReconcileKind handles a changed or created TaskRun. // This is the main entrypoint for chains business logic. -func (r *Reconciler) ReconcileKind(ctx context.Context, tr *v1beta1.TaskRun) pkgreconciler.Event { +func (r *ReconcilerV1) ReconcileKind(ctx context.Context, tr *v1.TaskRun) pkgreconciler.Event { return r.FinalizeKind(ctx, tr) } @@ -49,14 +49,14 @@ func (r *Reconciler) ReconcileKind(ctx context.Context, tr *v1beta1.TaskRun) pkg // We utilize finalizers to ensure that we get a crack at signing every taskrun // that we see flowing through the system. If we don't add a finalizer, it could // get cleaned up before we see the final state and sign it. -func (r *Reconciler) FinalizeKind(ctx context.Context, tr *v1beta1.TaskRun) pkgreconciler.Event { +func (r *ReconcilerV1) FinalizeKind(ctx context.Context, tr *v1.TaskRun) pkgreconciler.Event { // Check to make sure the TaskRun is finished. if !tr.IsDone() { logging.FromContext(ctx).Infof("taskrun %s/%s is still running", tr.Namespace, tr.Name) return nil } - obj := objects.NewTaskRunObject(tr) + obj := objects.NewTaskRunObjectV1(tr) // Check to see if it has already been signed. if signing.Reconciled(ctx, r.Pipelineclientset, obj) { diff --git a/pkg/reconciler/taskrun/taskrun_test.go b/pkg/reconciler/taskrun/taskrun_test.go index d0e8a2b36c..6337fc218b 100644 --- a/pkg/reconciler/taskrun/taskrun_test.go +++ b/pkg/reconciler/taskrun/taskrun_test.go @@ -22,10 +22,10 @@ import ( "github.com/tektoncd/chains/pkg/config" "github.com/tektoncd/chains/pkg/internal/mocksigner" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - informers "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + informers "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1" fakepipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client/fake" - faketaskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake" + faketaskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/taskrun/fake" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" @@ -42,17 +42,17 @@ func TestReconciler_Reconcile(t *testing.T) { tests := []struct { name string key string - taskRuns []*v1beta1.TaskRun + taskRuns []*v1.TaskRun }{ { name: "no taskruns", key: "foo/bar", - taskRuns: []*v1beta1.TaskRun{}, + taskRuns: []*v1.TaskRun{}, }, { name: "found taskrun", key: "foo/bar", - taskRuns: []*v1beta1.TaskRun{ + taskRuns: []*v1.TaskRun{ { ObjectMeta: metav1.ObjectMeta{ Name: "bar", @@ -74,7 +74,7 @@ func TestReconciler_Reconcile(t *testing.T) { Name: config.ChainsConfig, }, }) - ctl := NewController(ctx, configMapWatcher) + ctl := NewControllerV1(ctx, configMapWatcher) if la, ok := ctl.Reconciler.(pkgreconciler.LeaderAware); ok { if err := la.Promote(pkgreconciler.UniversalBucket(), func(pkgreconciler.Bucket, types.NamespacedName) {}); err != nil { @@ -89,13 +89,13 @@ func TestReconciler_Reconcile(t *testing.T) { } } -func setupData(ctx context.Context, t *testing.T, trs []*v1beta1.TaskRun) informers.TaskRunInformer { +func setupData(ctx context.Context, t *testing.T, trs []*v1.TaskRun) informers.TaskRunInformer { tri := faketaskruninformer.Get(ctx) c := fakepipelineclient.Get(ctx) for _, ta := range trs { ta := ta.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.TektonV1beta1().TaskRuns(ta.Namespace).Create(ctx, ta, metav1.CreateOptions{}); err != nil { + if _, err := c.TektonV1().TaskRuns(ta.Namespace).Create(ctx, ta, metav1.CreateOptions{}); err != nil { t.Fatal(err) } } @@ -107,16 +107,16 @@ func TestReconciler_handleTaskRun(t *testing.T) { tests := []struct { name string - tr *v1beta1.TaskRun + tr *v1.TaskRun shouldSign bool }{ { name: "complete, already signed", - tr: &v1beta1.TaskRun{ + tr: &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{signing.ChainsAnnotation: "true"}, }, - Status: v1beta1.TaskRunStatus{ + Status: v1.TaskRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }}, @@ -125,11 +125,11 @@ func TestReconciler_handleTaskRun(t *testing.T) { }, { name: "complete, not already signed", - tr: &v1beta1.TaskRun{ + tr: &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{}, }, - Status: v1beta1.TaskRunStatus{ + Status: v1.TaskRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }}, @@ -138,11 +138,11 @@ func TestReconciler_handleTaskRun(t *testing.T) { }, { name: "not complete, not already signed", - tr: &v1beta1.TaskRun{ + tr: &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{}, }, - Status: v1beta1.TaskRunStatus{ + Status: v1.TaskRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{}, }}, @@ -155,12 +155,13 @@ func TestReconciler_handleTaskRun(t *testing.T) { signer := &mocksigner.Signer{} ctx, _ := rtesting.SetupFakeContext(t) c := fakepipelineclient.Get(ctx) - tekton.CreateObject(t, ctx, c, objects.NewTaskRunObject(tt.tr)) + tekton.CreateObject(t, ctx, c, objects.NewTaskRunObjectV1(tt.tr)) - r := &Reconciler{ + r := &ReconcilerV1{ TaskRunSigner: signer, Pipelineclientset: c, } + ctx = config.ToContext(ctx, &config.Config{}) if err := r.ReconcileKind(ctx, tt.tr); err != nil { t.Errorf("Reconciler.handleTaskRun() error = %v", err) } diff --git a/pkg/test/tekton/tekton.go b/pkg/test/tekton/tekton.go index d74bf1461c..dd2014fe31 100644 --- a/pkg/test/tekton/tekton.go +++ b/pkg/test/tekton/tekton.go @@ -19,6 +19,7 @@ import ( "testing" "github.com/tektoncd/chains/pkg/chains/objects" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" pipelineclientset "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -27,28 +28,47 @@ import ( func CreateObject(t *testing.T, ctx context.Context, ps pipelineclientset.Interface, obj objects.TektonObject) objects.TektonObject { switch o := obj.GetObject().(type) { - case *v1beta1.PipelineRun: + case *v1.PipelineRun: + pr, err := ps.TektonV1().PipelineRuns(obj.GetNamespace()).Create(ctx, o, metav1.CreateOptions{}) + if err != nil { + t.Fatalf("error creating pipelinerun: %v", err) + } + return objects.NewPipelineRunObjectV1(pr) + case *v1.TaskRun: + tr, err := ps.TektonV1().TaskRuns(obj.GetNamespace()).Create(ctx, o, metav1.CreateOptions{}) + if err != nil { + t.Fatalf("error creating taskrun: %v", err) + } + return objects.NewTaskRunObjectV1(tr) + case *v1beta1.PipelineRun: //nolint:staticcheck pr, err := ps.TektonV1beta1().PipelineRuns(obj.GetNamespace()).Create(ctx, o, metav1.CreateOptions{}) if err != nil { t.Fatalf("error creating pipelinerun: %v", err) } - return objects.NewPipelineRunObject(pr) - case *v1beta1.TaskRun: + return objects.NewPipelineRunObjectV1Beta1(pr) + case *v1beta1.TaskRun: //nolint:staticcheck tr, err := ps.TektonV1beta1().TaskRuns(obj.GetNamespace()).Create(ctx, o, metav1.CreateOptions{}) if err != nil { t.Fatalf("error creating taskrun: %v", err) } - return objects.NewTaskRunObject(tr) + return objects.NewTaskRunObjectV1Beta1(tr) } return nil } // Passing in TektonObject since it encapsulates namespace, name, and type. func GetObject(t *testing.T, ctx context.Context, ps pipelineclientset.Interface, obj objects.TektonObject) (objects.TektonObject, error) { + if obj == nil { + t.Fatalf("nil object received %T", obj.GetObject()) + } switch obj.GetObject().(type) { - case *v1beta1.PipelineRun: + case *v1.PipelineRun: return GetPipelineRun(t, ctx, ps, obj.GetNamespace(), obj.GetName()) - case *v1beta1.TaskRun: + case *v1.TaskRun: + return GetTaskRun(t, ctx, ps, obj.GetNamespace(), obj.GetName()) + case *v1beta1.PipelineRun: //nolint:staticcheck + return GetPipelineRun(t, ctx, ps, obj.GetNamespace(), obj.GetName()) + case *v1beta1.TaskRun: //nolint:staticcheck return GetTaskRun(t, ctx, ps, obj.GetNamespace(), obj.GetName()) } t.Fatalf("unknown object type %T", obj.GetObject()) @@ -56,29 +76,39 @@ func GetObject(t *testing.T, ctx context.Context, ps pipelineclientset.Interface } func GetPipelineRun(t *testing.T, ctx context.Context, ps pipelineclientset.Interface, namespace, name string) (objects.TektonObject, error) { - pr, err := ps.TektonV1beta1().PipelineRuns(namespace).Get(ctx, name, metav1.GetOptions{}) + pr, err := ps.TektonV1().PipelineRuns(namespace).Get(ctx, name, metav1.GetOptions{}) if err != nil { t.Fatalf("error getting pipelinerun: %v", err) } - return objects.NewPipelineRunObject(pr), nil + return objects.NewPipelineRunObjectV1(pr), nil } func GetTaskRun(t *testing.T, ctx context.Context, ps pipelineclientset.Interface, namespace, name string) (objects.TektonObject, error) { - tr, err := ps.TektonV1beta1().TaskRuns(namespace).Get(ctx, name, metav1.GetOptions{}) + tr, err := ps.TektonV1().TaskRuns(namespace).Get(ctx, name, metav1.GetOptions{}) if err != nil { t.Fatalf("error getting taskrun: %v", err) } - return objects.NewTaskRunObject(tr), nil + return objects.NewTaskRunObjectV1(tr), nil } func WatchObject(t *testing.T, ctx context.Context, ps pipelineclientset.Interface, obj objects.TektonObject) (watch.Interface, error) { switch o := obj.GetObject().(type) { - case *v1beta1.PipelineRun: + case *v1.PipelineRun: + return ps.TektonV1().PipelineRuns(obj.GetNamespace()).Watch(ctx, metav1.SingleObject(metav1.ObjectMeta{ + Name: o.GetName(), + Namespace: o.GetNamespace(), + })) + case *v1.TaskRun: + return ps.TektonV1().TaskRuns(obj.GetNamespace()).Watch(ctx, metav1.SingleObject(metav1.ObjectMeta{ + Name: o.GetName(), + Namespace: o.GetNamespace(), + })) + case *v1beta1.PipelineRun: //nolint:staticcheck return ps.TektonV1beta1().PipelineRuns(obj.GetNamespace()).Watch(ctx, metav1.SingleObject(metav1.ObjectMeta{ Name: o.GetName(), Namespace: o.GetNamespace(), })) - case *v1beta1.TaskRun: + case *v1beta1.TaskRun: //nolint:staticcheck return ps.TektonV1beta1().TaskRuns(obj.GetNamespace()).Watch(ctx, metav1.SingleObject(metav1.ObjectMeta{ Name: o.GetName(), Namespace: o.GetNamespace(), diff --git a/test/clients.go b/test/clients.go index d6138af32f..da51ef6c1b 100644 --- a/test/clients.go +++ b/test/clients.go @@ -110,7 +110,7 @@ func setup(ctx context.Context, t *testing.T, opts setupOpts) (*clients, string, imageDest := fmt.Sprintf("%s/%s", c.internalRegistry, opts.kanikoTaskImage) t.Logf("Creating Kaniko task referencing image %s", imageDest) task := kanikoTask(t, namespace, imageDest) - if _, err := c.PipelineClient.TektonV1beta1().Tasks(namespace).Create(ctx, task, metav1.CreateOptions{}); err != nil { + if _, err := c.PipelineClient.TektonV1().Tasks(namespace).Create(ctx, task, metav1.CreateOptions{}); err != nil { t.Fatalf("error creating task: %s", err) } } diff --git a/test/e2e_test.go b/test/e2e_test.go index 1f8c135ed4..96e08e1dc8 100644 --- a/test/e2e_test.go +++ b/test/e2e_test.go @@ -21,6 +21,7 @@ package test import ( "bytes" + "context" "crypto" "crypto/ecdsa" "encoding/base64" @@ -45,6 +46,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/chains/provenance" "github.com/tektoncd/chains/pkg/test/tekton" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/apis/resource/v1alpha1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -214,10 +216,13 @@ func TestOCISigning(t *testing.T) { t.Cleanup(cleanup) // Setup the right config. - resetConfig := setConfigMap(ctx, t, c, map[string]string{"artifacts.oci.storage": "tekton", "artifacts.taskrun.format": "in-toto"}) + resetConfig := setConfigMap(ctx, t, c, map[string]string{ + "artifacts.oci.storage": "tekton", + "artifacts.taskrun.format": "in-toto", + }) t.Cleanup(resetConfig) - tro := getTaskRunObject(ns) + tro := getTaskRunObjectV1(ns) createdTro := tekton.CreateObject(t, ctx, c.PipelineClient, tro) @@ -234,8 +239,15 @@ func TestOCISigning(t *testing.T) { // Let's fetch the signature and body: t.Log(obj.GetAnnotations()) + if _, ok := obj.GetAnnotations()["chains.tekton.dev/signature-586789aa031f"]; !ok { + t.Fatal("TaskRun missing expected signature annotation: chains.tekton.dev/signature-586789aa031f") + } + if _, ok := obj.GetAnnotations()["chains.tekton.dev/payload-586789aa031f"]; !ok { + t.Fatal("TaskRun missing expected payload annotation: chains.tekton.dev/signature-586789aa031f") + } - sig, body := obj.GetAnnotations()["chains.tekton.dev/signature-05f95b26ed10"], obj.GetAnnotations()["chains.tekton.dev/payload-05f95b26ed10"] + sig, body := obj.GetAnnotations()["chains.tekton.dev/signature-586789aa031f"], + obj.GetAnnotations()["chains.tekton.dev/payload-586789aa031f"] // base64 decode them sigBytes, err := base64.StdEncoding.DecodeString(sig) if err != nil { @@ -417,7 +429,7 @@ func TestOCIStorage(t *testing.T) { imageName := "chains-test-oci-storage" image := fmt.Sprintf("%s/%s", c.internalRegistry, imageName) task := kanikoTask(t, ns, image) - if _, err := c.PipelineClient.TektonV1beta1().Tasks(ns).Create(ctx, task, metav1.CreateOptions{}); err != nil { + if _, err := c.PipelineClient.TektonV1().Tasks(ns).Create(ctx, task, metav1.CreateOptions{}); err != nil { t.Fatalf("error creating task: %s", err) } @@ -537,7 +549,7 @@ func TestRetryFailed(t *testing.T) { registry: true, kanikoTaskImage: "chains-test-tr-retryfailed", }, - getObject: getTaskRunObject, + getObject: getTaskRunObjectV1, }, { name: "pipelinerun", @@ -635,30 +647,61 @@ var imageTaskRun = v1beta1.TaskRun{ } func getTaskRunObject(ns string) objects.TektonObject { - o := objects.NewTaskRunObject(&imageTaskRun) + trV1 := &v1.TaskRun{} + imageTaskRun.ConvertTo(context.Background(), trV1) + o := objects.NewTaskRunObjectV1(trV1) + o.Namespace = ns + return o +} + +func getTaskRunObjectWithParams(ns string, params []v1.Param) objects.TektonObject { + trV1 := &v1.TaskRun{} + imageTaskRun.ConvertTo(context.Background(), trV1) + o := objects.NewTaskRunObjectV1(trV1) + o.Namespace = ns + o.Spec.Params = params + return o +} + +func taskRunFromFile(f string) (*v1.TaskRun, error) { + contents, err := os.ReadFile(f) + if err != nil { + return nil, err + } + var tr v1.TaskRun + if err := json.Unmarshal(contents, &tr); err != nil { + return nil, err + } + return &tr, nil +} + +func getTaskRunObjectV1(ns string) objects.TektonObject { + tr, _ := taskRunFromFile("testdata/type-hinting/taskrun.json") + o := objects.NewTaskRunObjectV1(tr) o.Namespace = ns return o } -func getTaskRunObjectWithParams(ns string, params []v1beta1.Param) objects.TektonObject { - o := objects.NewTaskRunObject(&imageTaskRun) +func getTaskRunObjectV1WithParams(ns string, params []v1.Param) objects.TektonObject { + tr, _ := taskRunFromFile("testdata/type-hinting/taskrun.json") + o := objects.NewTaskRunObjectV1(tr) o.Namespace = ns o.Spec.Params = params return o } -var imagePipelineRun = v1beta1.PipelineRun{ +var imagePipelineRun = v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "image-pipelinerun", Annotations: map[string]string{chains.RekorAnnotation: "true"}, }, - Spec: v1beta1.PipelineRunSpec{ - PipelineSpec: &v1beta1.PipelineSpec{ - Tasks: []v1beta1.PipelineTask{{ + Spec: v1.PipelineRunSpec{ + PipelineSpec: &v1.PipelineSpec{ + Tasks: []v1.PipelineTask{{ Name: "echo", - TaskSpec: &v1beta1.EmbeddedTask{ - TaskSpec: v1beta1.TaskSpec{ - Steps: []v1beta1.Step{ + TaskSpec: &v1.EmbeddedTask{ + TaskSpec: v1.TaskSpec{ + Steps: []v1.Step{ { Image: "busybox", Script: "echo success", @@ -672,13 +715,13 @@ var imagePipelineRun = v1beta1.PipelineRun{ } func getPipelineRunObject(ns string) objects.TektonObject { - o := objects.NewPipelineRunObject(&imagePipelineRun) + o := objects.NewPipelineRunObjectV1(&imagePipelineRun) o.Namespace = ns return o } -func getPipelineRunObjectWithParams(ns string, params []v1beta1.Param) objects.TektonObject { - o := objects.NewPipelineRunObject(&imagePipelineRun) +func getPipelineRunObjectWithParams(ns string, params []v1.Param) objects.TektonObject { + o := objects.NewPipelineRunObjectV1(&imagePipelineRun) o.Namespace = ns o.Spec.Params = params return o @@ -688,7 +731,7 @@ func TestProvenanceMaterials(t *testing.T) { tests := []struct { name string cm map[string]string - getObjectWithParams func(ns string, params []v1beta1.Param) objects.TektonObject + getObjectWithParams func(ns string, params []v1.Param) objects.TektonObject payloadKey string }{ { @@ -725,10 +768,10 @@ func TestProvenanceMaterials(t *testing.T) { commit := "my-git-commit" url := "https://my-git-url" - params := []v1beta1.Param{{ - Name: "CHAINS-GIT_COMMIT", Value: *v1beta1.NewStructuredValues(commit), + params := []v1.Param{{ + Name: "CHAINS-GIT_COMMIT", Value: *v1.NewStructuredValues(commit), }, { - Name: "CHAINS-GIT_URL", Value: *v1beta1.NewStructuredValues(url), + Name: "CHAINS-GIT_URL", Value: *v1.NewStructuredValues(url), }} obj := test.getObjectWithParams(ns, params) @@ -774,9 +817,9 @@ func TestProvenanceMaterials(t *testing.T) { } if test.name == "pipelinerun" { - pr := signedObj.GetObject().(*v1beta1.PipelineRun) + pr := signedObj.GetObject().(*v1.PipelineRun) for _, cr := range pr.Status.ChildReferences { - taskRun, err := c.PipelineClient.TektonV1beta1().TaskRuns(ns).Get(ctx, cr.Name, metav1.GetOptions{}) + taskRun, err := c.PipelineClient.TektonV1().TaskRuns(ns).Get(ctx, cr.Name, metav1.GetOptions{}) if err != nil { t.Errorf("Did not expect an error but got %v", err) } @@ -790,7 +833,7 @@ func TestProvenanceMaterials(t *testing.T) { } } } else { - tr := signedObj.GetObject().(*v1beta1.TaskRun) + tr := signedObj.GetObject().(*v1.TaskRun) for _, step := range tr.Status.Steps { want = append(want, provenance.ProvenanceMaterial{ URI: artifacts.OCIScheme + "" + strings.Split(step.ImageID, "@")[0], diff --git a/test/examples_test.go b/test/examples_test.go index dcce933ac2..702d2f780a 100644 --- a/test/examples_test.go +++ b/test/examples_test.go @@ -46,7 +46,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "sigs.k8s.io/yaml" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -71,64 +71,88 @@ type TestExample struct { // https://github.com/tektoncd/pipeline/blob/main/test/examples_test.go func TestExamples(t *testing.T) { tests := []TestExample{ + // { + // name: "taskrun-examples-slsa-v1", + // cm: map[string]string{ + // "artifacts.taskrun.format": "slsa/v1", + // "artifacts.oci.storage": "tekton", + // }, + // getExampleObjects: getTaskRunExamples, + // payloadKey: "chains.tekton.dev/payload-taskrun-%s", + // signatureKey: "chains.tekton.dev/signature-taskrun-%s", + // outputLocation: "slsa/v1", + // predicate: "slsav0.1", + // }, + // { + // name: "pipelinerun-examples-slsa-v1", + // cm: map[string]string{ + // "artifacts.pipelinerun.format": "slsa/v1", + // "artifacts.pipelinerun.storage": "tekton", + // }, + // getExampleObjects: getPipelineRunExamples, + // payloadKey: "chains.tekton.dev/payload-pipelinerun-%s", + // signatureKey: "chains.tekton.dev/signature-pipelinerun-%s", + // outputLocation: "slsa/v1", + // predicate: "slsav0.1", + // }, + // { + // name: "taskrun-examples-slsa-v2", + // cm: map[string]string{ + // "artifacts.taskrun.format": "slsa/v2alpha1", + // "artifacts.oci.storage": "tekton", + // }, + // getExampleObjects: getTaskRunExamples, + // payloadKey: "chains.tekton.dev/payload-taskrun-%s", + // signatureKey: "chains.tekton.dev/signature-taskrun-%s", + // outputLocation: "slsa/v2", + // predicate: "slsav0.2", + // }, + // { + // name: "taskrun-examples-slsa-v2alpha2", + // cm: map[string]string{ + // "artifacts.taskrun.format": "slsa/v2alpha2", + // "artifacts.oci.storage": "tekton", + // }, + // getExampleObjects: getTaskRunExamples, + // payloadKey: "chains.tekton.dev/payload-taskrun-%s", + // signatureKey: "chains.tekton.dev/signature-taskrun-%s", + // outputLocation: "slsa/v2alpha2", + // predicate: "slsav1.0", + // }, + // { + // name: "pipelinerun-examples-slsa-v2alpha2", + // cm: map[string]string{ + // "artifacts.pipelinerun.format": "slsa/v2alpha2", + // "artifacts.oci.storage": "tekton", + // }, + // getExampleObjects: getPipelineRunExamples, + // payloadKey: "chains.tekton.dev/payload-pipelinerun-%s", + // signatureKey: "chains.tekton.dev/signature-pipelinerun-%s", + // outputLocation: "slsa/v2alpha2", + // predicate: "slsav1.0", + // }, { - name: "taskrun-examples-slsa-v1", + name: "taskrun-examples-slsa-v2alpha3", cm: map[string]string{ - "artifacts.taskrun.format": "slsa/v1", + "artifacts.taskrun.format": "slsa/v2alpha3", "artifacts.oci.storage": "tekton", }, getExampleObjects: getTaskRunExamples, payloadKey: "chains.tekton.dev/payload-taskrun-%s", signatureKey: "chains.tekton.dev/signature-taskrun-%s", - outputLocation: "slsa/v1", - predicate: "slsav0.1", - }, - { - name: "pipelinerun-examples-slsa-v1", - cm: map[string]string{ - "artifacts.pipelinerun.format": "slsa/v1", - "artifacts.pipelinerun.storage": "tekton", - }, - getExampleObjects: getPipelineRunExamples, - payloadKey: "chains.tekton.dev/payload-pipelinerun-%s", - signatureKey: "chains.tekton.dev/signature-pipelinerun-%s", - outputLocation: "slsa/v1", - predicate: "slsav0.1", - }, - { - name: "taskrun-examples-slsa-v2", - cm: map[string]string{ - "artifacts.taskrun.format": "slsa/v2alpha1", - "artifacts.oci.storage": "tekton", - }, - getExampleObjects: getTaskRunExamples, - payloadKey: "chains.tekton.dev/payload-taskrun-%s", - signatureKey: "chains.tekton.dev/signature-taskrun-%s", - outputLocation: "slsa/v2", - predicate: "slsav0.2", - }, - { - name: "taskrun-examples-slsa-v2alpha2", - cm: map[string]string{ - "artifacts.taskrun.format": "slsa/v2alpha2", - "artifacts.oci.storage": "tekton", - }, - getExampleObjects: getTaskRunExamples, - payloadKey: "chains.tekton.dev/payload-taskrun-%s", - signatureKey: "chains.tekton.dev/signature-taskrun-%s", - outputLocation: "slsa/v2alpha2", + outputLocation: "slsa/v2alpha3", predicate: "slsav1.0", }, { - name: "pipelinerun-examples-slsa-v2alpha2", + name: "pipelinerun-examples-slsa-v2alpha3", cm: map[string]string{ - "artifacts.pipelinerun.format": "slsa/v2alpha2", + "artifacts.pipelinerun.format": "slsa/v2alpha3", "artifacts.oci.storage": "tekton", }, getExampleObjects: getPipelineRunExamples, payloadKey: "chains.tekton.dev/payload-pipelinerun-%s", signatureKey: "chains.tekton.dev/signature-pipelinerun-%s", - outputLocation: "slsa/v2alpha2", + outputLocation: "slsa/v2alpha3", predicate: "slsav1.0", }, } @@ -250,10 +274,10 @@ func (v *verifier) Public() crypto.PublicKey { func expectedProvenanceSLSA1(t *testing.T, ctx context.Context, example string, obj objects.TektonObject, outputLocation string, ns string, c *clients) intoto.ProvenanceStatementSLSA1 { switch obj.(type) { - case *objects.TaskRunObject: + case *objects.TaskRunObjectV1: f := expectedTaskRunProvenanceFormat(t, example, obj, outputLocation) return expectedAttestationSLSA1(t, example, f, outputLocation) - case *objects.PipelineRunObject: + case *objects.PipelineRunObjectV1: f := expectedPipelineRunProvenanceFormat(t, ctx, example, obj, outputLocation, ns, c) return expectedAttestationSLSA1(t, example, f, outputLocation) default: @@ -264,10 +288,10 @@ func expectedProvenanceSLSA1(t *testing.T, ctx context.Context, example string, func expectedProvenance(t *testing.T, ctx context.Context, example string, obj objects.TektonObject, outputLocation string, ns string, c *clients) intoto.ProvenanceStatement { switch obj.(type) { - case *objects.TaskRunObject: + case *objects.TaskRunObjectV1: f := expectedTaskRunProvenanceFormat(t, example, obj, outputLocation) return expectedAttestation(t, example, f, outputLocation) - case *objects.PipelineRunObject: + case *objects.PipelineRunObjectV1: f := expectedPipelineRunProvenanceFormat(t, ctx, example, obj, outputLocation, ns, c) return expectedAttestation(t, example, f, outputLocation) default: @@ -294,7 +318,7 @@ type Format struct { } func expectedTaskRunProvenanceFormat(t *testing.T, example string, obj objects.TektonObject, outputLocation string) Format { - tr := obj.GetObject().(*v1beta1.TaskRun) + tr := obj.GetObject().(*v1.TaskRun) name := tr.Name if tr.Spec.TaskRef != nil { @@ -330,7 +354,7 @@ func expectedTaskRunProvenanceFormat(t *testing.T, example string, obj objects.T } func expectedPipelineRunProvenanceFormat(t *testing.T, ctx context.Context, example string, obj objects.TektonObject, outputLocation string, ns string, c *clients) Format { - pr := obj.GetObject().(*v1beta1.PipelineRun) + pr := obj.GetObject().(*v1.PipelineRun) buildStartTimes := []string{} buildFinishedTimes := []string{} @@ -338,7 +362,7 @@ func expectedPipelineRunProvenanceFormat(t *testing.T, ctx context.Context, exam uriDigestSet := make(map[string]bool) for _, cr := range pr.Status.ChildReferences { - taskRun, err := c.PipelineClient.TektonV1beta1().TaskRuns(ns).Get(ctx, cr.Name, metav1.GetOptions{}) + taskRun, err := c.PipelineClient.TektonV1().TaskRuns(ns).Get(ctx, cr.Name, metav1.GetOptions{}) if err != nil { t.Errorf("Did not expect an error but got %v", err) } @@ -465,12 +489,12 @@ func taskRunFromExample(t *testing.T, ns, example string) objects.TektonObject { if err != nil { t.Fatal(err) } - var tr *v1beta1.TaskRun + var tr *v1.TaskRun if err := yaml.Unmarshal(contents, &tr); err != nil { t.Fatal(err) } tr.Namespace = ns - return objects.NewTaskRunObject(tr) + return objects.NewTaskRunObjectV1(tr) } func pipelineRunFromExample(t *testing.T, ns, example string) objects.TektonObject { @@ -478,12 +502,12 @@ func pipelineRunFromExample(t *testing.T, ns, example string) objects.TektonObje if err != nil { t.Fatal(err) } - var pr *v1beta1.PipelineRun + var pr *v1.PipelineRun if err := yaml.Unmarshal(contents, &pr); err != nil { t.Fatal(err) } pr.Namespace = ns - return objects.NewPipelineRunObject(pr) + return objects.NewPipelineRunObjectV1(pr) } func ignoreEnvironmentAnnotationsAndLabels(key string, value any) bool { diff --git a/test/kaniko.go b/test/kaniko.go index 401b1cdde0..4d9591aeae 100644 --- a/test/kaniko.go +++ b/test/kaniko.go @@ -23,76 +23,76 @@ import ( "github.com/google/go-containerregistry/pkg/name" "github.com/tektoncd/chains/pkg/chains" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - v1 "k8s.io/api/core/v1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) const taskName = "kaniko-task" func kanikoPipelineRun(ns string) objects.TektonObject { - imagePipelineRun := v1beta1.PipelineRun{ + imagePipelineRun := v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "image-pipelinerun", Namespace: ns, Annotations: map[string]string{chains.RekorAnnotation: "true"}, }, - Spec: v1beta1.PipelineRunSpec{ - PipelineSpec: &v1beta1.PipelineSpec{ - Tasks: []v1beta1.PipelineTask{{ + Spec: v1.PipelineRunSpec{ + PipelineSpec: &v1.PipelineSpec{ + Tasks: []v1.PipelineTask{{ Name: "kaniko", - TaskRef: &v1beta1.TaskRef{ + TaskRef: &v1.TaskRef{ Name: "kaniko-task", - Kind: v1beta1.NamespacedTaskKind, + Kind: v1.NamespacedTaskKind, }, }}, - Results: []v1beta1.PipelineResult{{ + Results: []v1.PipelineResult{{ Name: "IMAGE_URL", - Value: *v1beta1.NewStructuredValues("$(tasks.kaniko.results.IMAGE_URL)"), + Value: *v1.NewStructuredValues("$(tasks.kaniko.results.IMAGE_URL)"), }, { Name: "IMAGE_DIGEST", - Value: *v1beta1.NewStructuredValues("$(tasks.kaniko.results.IMAGE_DIGEST)"), + Value: *v1.NewStructuredValues("$(tasks.kaniko.results.IMAGE_DIGEST)"), }}, }, }, } - return objects.NewPipelineRunObject(&imagePipelineRun) + return objects.NewPipelineRunObjectV1(&imagePipelineRun) } func kanikoTaskRun(namespace string) objects.TektonObject { - tr := &v1beta1.TaskRun{ + tr := &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "kaniko-taskrun", Namespace: namespace, }, - Spec: v1beta1.TaskRunSpec{ - TaskRef: &v1beta1.TaskRef{ + Spec: v1.TaskRunSpec{ + TaskRef: &v1.TaskRef{ Name: taskName, }, }, } - return objects.NewTaskRunObject(tr) + return objects.NewTaskRunObjectV1(tr) } -func kanikoTask(t *testing.T, namespace, destinationImage string) *v1beta1.Task { +func kanikoTask(t *testing.T, namespace, destinationImage string) *v1.Task { ref, err := name.ParseReference(destinationImage) if err != nil { t.Fatalf("unable to parse image name: %v", err) } - return &v1beta1.Task{ + return &v1.Task{ ObjectMeta: metav1.ObjectMeta{ Name: taskName, Namespace: namespace, }, - Spec: v1beta1.TaskSpec{ - Results: []v1beta1.TaskResult{ + Spec: v1.TaskSpec{ + Results: []v1.TaskResult{ {Name: "IMAGE_URL"}, {Name: "IMAGE_DIGEST"}, }, - Steps: []v1beta1.Step{{ + Steps: []v1.Step{{ Name: "create-dockerfile", Image: "bash:latest", - VolumeMounts: []v1.VolumeMount{{ + VolumeMounts: []corev1.VolumeMount{{ Name: "dockerfile", MountPath: "/dockerfile", }}, @@ -109,23 +109,23 @@ func kanikoTask(t *testing.T, namespace, destinationImage string) *v1beta1.Task // Need this to push the image to the insecure registry "--insecure", }, - VolumeMounts: []v1.VolumeMount{{ + VolumeMounts: []corev1.VolumeMount{{ Name: "dockerfile", MountPath: "/dockerfile", }}, }, { Name: "save-image-url", Image: "bash:latest", - VolumeMounts: []v1.VolumeMount{{ + VolumeMounts: []corev1.VolumeMount{{ Name: "dockerfile", MountPath: "/dockerfile", }}, Script: fmt.Sprintf("#!/usr/bin/env bash\necho %s | tee $(results.IMAGE_URL.path)", ref.String()), }, }, - Volumes: []v1.Volume{{ + Volumes: []corev1.Volume{{ Name: "dockerfile", - VolumeSource: v1.VolumeSource{EmptyDir: &v1.EmptyDirVolumeSource{}}, + VolumeSource: corev1.VolumeSource{EmptyDir: &corev1.EmptyDirVolumeSource{}}, }}, }, } @@ -144,14 +144,14 @@ cosign verify --allow-insecure-registry --key cosign.pub %s cosign verify-attestation --allow-insecure-registry --key cosign.pub %s` script = fmt.Sprintf(script, publicKey, destinationImage, destinationImage) - return objects.NewTaskRunObject(&v1beta1.TaskRun{ + return objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "verify-kaniko-taskrun", Namespace: namespace, }, - Spec: v1beta1.TaskRunSpec{ - TaskSpec: &v1beta1.TaskSpec{ - Steps: []v1beta1.Step{{ + Spec: v1.TaskRunSpec{ + TaskSpec: &v1.TaskSpec{ + Steps: []v1.Step{{ Name: "verify-image", Image: "gcr.io/projectsigstore/cosign/ci/cosign:d764e8b89934dc1043bd1b13112a66641c63a038@sha256:228c37f9f37415efbd6a4ff16aae81197206ce1410a227bcab8ac8b039b36237", Script: script, diff --git a/test/test_utils.go b/test/test_utils.go index 6d322a7fde..392fe978ad 100644 --- a/test/test_utils.go +++ b/test/test_utils.go @@ -35,7 +35,7 @@ import ( chainsstorage "github.com/tektoncd/chains/pkg/chains/storage" "github.com/tektoncd/chains/pkg/config" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" pipelineclientset "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -44,9 +44,9 @@ import ( "k8s.io/client-go/kubernetes" ) -func getTr(ctx context.Context, t *testing.T, c pipelineclientset.Interface, name, ns string) (tr *v1beta1.TaskRun) { +func getTr(ctx context.Context, t *testing.T, c pipelineclientset.Interface, name, ns string) (tr *v1.TaskRun) { t.Helper() - tr, err := c.TektonV1beta1().TaskRuns(ns).Get(ctx, name, metav1.GetOptions{}) + tr, err := c.TektonV1().TaskRuns(ns).Get(ctx, name, metav1.GetOptions{}) if err != nil { t.Error(err) } @@ -116,16 +116,16 @@ func signed(obj objects.TektonObject) bool { return ok } -var simpleTaskspec = v1beta1.TaskSpec{ - Steps: []v1beta1.Step{{ +var simpleTaskspec = v1.TaskSpec{ + Steps: []v1.Step{{ Image: "busybox", Script: "echo true", }}, } -var simpleTaskRun = v1beta1.TaskRun{ +var simpleTaskRun = v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{GenerateName: "test-task-"}, - Spec: v1beta1.TaskRunSpec{TaskSpec: &simpleTaskspec}, + Spec: v1.TaskRunSpec{TaskSpec: &simpleTaskspec}, } func makeBucket(t *testing.T, client *storage.Client) (string, func()) { @@ -216,11 +216,11 @@ func printDebugging(t *testing.T, obj objects.TektonObject) { kind := obj.GetObjectKind().GroupVersionKind().Kind t.Logf("============================== %s logs ==============================", obj.GetGVK()) - output, _ := exec.Command("tkn", kind, "logs", "-n", obj.GetNamespace(), obj.GetName()).CombinedOutput() + output, _ := exec.Command("tkn", strings.ToLower(kind), "logs", "-n", obj.GetNamespace(), obj.GetName()).CombinedOutput() t.Log(string(output)) t.Logf("============================== %s describe ==============================", obj.GetGVK()) - output, _ = exec.Command("tkn", kind, "describe", "-n", obj.GetNamespace(), obj.GetName()).CombinedOutput() + output, _ = exec.Command("tkn", strings.ToLower(kind), "describe", "-n", obj.GetNamespace(), obj.GetName()).CombinedOutput() t.Log(string(output)) t.Log("============================== chains controller logs ==============================") @@ -251,10 +251,16 @@ func verifySignature(ctx context.Context, t *testing.T, c *clients, obj objects. var configuredBackends []string var key string switch obj.GetObject().(type) { - case *objects.TaskRunObject: + case *objects.TaskRunObjectV1: configuredBackends = sets.List[string](cfg.Artifacts.TaskRuns.StorageBackend) key = fmt.Sprintf("taskrun-%s", obj.GetUID()) - case *objects.PipelineRunObject: + case *objects.PipelineRunObjectV1: + configuredBackends = sets.List[string](cfg.Artifacts.PipelineRuns.StorageBackend) + key = fmt.Sprintf("pipelinerun-%s", obj.GetUID()) + case *objects.TaskRunObjectV1Beta1: + configuredBackends = sets.List[string](cfg.Artifacts.TaskRuns.StorageBackend) + key = fmt.Sprintf("taskrun-%s", obj.GetUID()) + case *objects.PipelineRunObjectV1Beta1: configuredBackends = sets.List[string](cfg.Artifacts.PipelineRuns.StorageBackend) key = fmt.Sprintf("pipelinerun-%s", obj.GetUID()) } diff --git a/test/testdata/slsa/v2alpha2/pipeline-output-image.json b/test/testdata/slsa/v2alpha2/pipeline-output-image.json index b7da1c6273..67a67f4f2c 100644 --- a/test/testdata/slsa/v2alpha2/pipeline-output-image.json +++ b/test/testdata/slsa/v2alpha2/pipeline-output-image.json @@ -77,7 +77,9 @@ } ] }, - "timeout": "1h0m0s" + "timeouts": { + "pipeline": "1h0m0s" + } } }, "resolvedDependencies": [ diff --git a/test/testdata/slsa/v2alpha3/pipeline-output-image.json b/test/testdata/slsa/v2alpha3/pipeline-output-image.json new file mode 100644 index 0000000000..168426c439 --- /dev/null +++ b/test/testdata/slsa/v2alpha3/pipeline-output-image.json @@ -0,0 +1,126 @@ +{ + "_type": "https://in-toto.io/Statement/v0.1", + "predicateType": "https://slsa.dev/provenance/v1", + "subject": [ + { + "name": "gcr.io/foo/bar", + "digest": { + "sha256": "05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5" + } + } + ], + "predicate": { + "buildDefinition": { + "buildType": "https://tekton.dev/chains/v2/slsa", + "externalParameters": { + "runSpec": { + "taskRunTemplate": { + "serviceAccountName": "default" + }, + "params": [ + { + "name": "CHAINS-GIT_COMMIT", + "value": "my-git-commit" + },{ + "name": "CHAINS-GIT_URL", + "value": "https://my-git-url" + } + ], + "pipelineSpec": { + "results": [ + { + "name": "IMAGE_URL", + "description": "", + "value": "$(tasks.buildimage.results.IMAGE_URL)" + }, + { + "name": "IMAGE_DIGEST", + "description": "", + "value": "$(tasks.buildimage.results.IMAGE_DIGEST)" + } + ], + "tasks": [ + { + "name": "buildimage", + "taskSpec": { + "metadata": {}, + "steps": [ + { + "name": "create-dockerfile", + "image": "distroless.dev/busybox@sha256:186312fcf3f381b5fc1dd80b1afc0d316f3ed39fb4add8ff900d1f0c7c49a92c", + "computeResources": {}, + "script": "#!/usr/bin/env sh\necho 'gcr.io/foo/bar' | tee $(results.IMAGE_URL.path)\necho 'sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5' | tee $(results.IMAGE_DIGEST.path)", + "volumeMounts": [ + { + "mountPath": "/dockerfile", + "name": "dockerfile" + } + ] + } + ], + "spec": null, + "results": [ + { + "name": "IMAGE_URL", + "type": "string" + },{ + "name": "IMAGE_DIGEST", + "type": "string" + } + ], + "volumes": [ + { + "emptyDir": {}, + "name": "dockerfile" + } + ] + + } + } + ] + }, + "timeouts": { + "pipeline": "1h0m0s" + } + } + }, + "resolvedDependencies": [ + {{range .URIDigest}} + { + "uri": "{{.URI}}", + "digest": { + "sha256": "{{.Digest}}" + } + }, + {{end}} + { + "uri": "git+https://my-git-url.git", + "digest": {"sha1": "my-git-commit"}, + "name": "inputs/result" + } + ] + }, + "runDetails": { + "builder": { + "id": "https://tekton.dev/chains/v2" + }, + "metadata": { + "invocationID": "{{.UID}}", + "startedOn": "{{.PipelineStartedOn}}", + "finishedOn": "{{.PipelineFinishedOn}}" + }, + "byproducts": [ + { + "name": "pipelineRunResults/IMAGE_URL", + "mediaType": "application/json", + "content": "Imdjci5pby9mb28vYmFyXG4i" + }, + { + "name": "pipelineRunResults/IMAGE_DIGEST", + "mediaType": "application/json", + "content": "InNoYTI1NjowNWY5NWIyNmVkMTA2NjhiNzE4M2MxZTJkYTk4NjEwZTkxMzcyZmE5ZjUxMDA0NmQ0Y2U1ODEyYWRkYWQ4NmI1XG4i" + } + ] + } + } +} diff --git a/test/testdata/slsa/v2alpha3/task-output-image.json b/test/testdata/slsa/v2alpha3/task-output-image.json new file mode 100644 index 0000000000..696ce8d528 --- /dev/null +++ b/test/testdata/slsa/v2alpha3/task-output-image.json @@ -0,0 +1,74 @@ +{ + "_type": "https://in-toto.io/Statement/v0.1", + "predicateType": "https://slsa.dev/provenance/v1", + "subject": [ + { + "name": "gcr.io/foo/bar", + "digest": { + "sha256": "05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5" + } + } + ], + "predicate": { + "buildDefinition": { + "buildType": "https://tekton.dev/chains/v2/slsa", + "externalParameters": { + "runSpec": { + "serviceAccountName": "default", + "taskSpec": { + "steps": [ + { + "name": "create-image", + "image": "busybox", + "computeResources": {}, + "script": "#!/usr/bin/env sh\necho 'gcr.io/foo/bar' | tee $(results.IMAGE_URL.path)\necho 'sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5' | tee $(results.IMAGE_DIGEST.path)" + } + ], + "results": [ + { + "name": "IMAGE_URL", + "type": "string" + },{ + "name": "IMAGE_DIGEST", + "type": "string" + } + ] + }, + "timeout": "1h0m0s" + } + }, + "resolvedDependencies": [ + {{range .URIDigest}} + { + "uri": "{{.URI}}", + "digest": { + "sha256": "{{.Digest}}" + } + } + {{end}} + ] + }, + "runDetails": { + "builder": { + "id": "https://tekton.dev/chains/v2" + }, + "metadata": { + "invocationID": "{{.UID}}", + "startedOn": "{{index .BuildStartTimes 0}}", + "finishedOn": "{{index .BuildFinishedTimes 0}}" + }, + "byproducts": [ + { + "name": "taskRunResults/IMAGE_DIGEST", + "mediaType": "application/json", + "content": "InNoYTI1NjowNWY5NWIyNmVkMTA2NjhiNzE4M2MxZTJkYTk4NjEwZTkxMzcyZmE5ZjUxMDA0NmQ0Y2U1ODEyYWRkYWQ4NmI1XG4i" + }, + { + "name": "taskRunResults/IMAGE_URL", + "mediaType": "application/json", + "content": "Imdjci5pby9mb28vYmFyXG4i" + } + ] + } + } +} diff --git a/test/testdata/type-hinting/taskrun.json b/test/testdata/type-hinting/taskrun.json new file mode 100644 index 0000000000..655878c215 --- /dev/null +++ b/test/testdata/type-hinting/taskrun.json @@ -0,0 +1,33 @@ +{ + "apiVersion": "tekton.dev/v1", + "kind": "TaskRun", + "metadata": { + "name": "image-build", + "annotations": { + "chains.tekton.dev/rekor": "true" + } + }, + "spec": { + "taskSpec": { + "results": [ + { + "name": "first-image-IMAGE_URL", + "type": "string", + "description": "The precise URL of the OCI image built." + }, + { + "name": "first-image-IMAGE_DIGEST", + "type": "string", + "description": "The algorithm and digest of the OCI image built." + } + ], + "steps": [ + { + "name": "dummy-build", + "image": "bash:latest", + "script": "#!/usr/bin/env bash\necho -n \"gcr.io/foo/bar\" | tee $(results.first-image-IMAGE_URL.path)\necho -n \"sha256:586789aa031fafc7d78a5393cdc772e0b55107ea54bb8bcf3f2cdac6c6da51ee\" | tee $(results.first-image-IMAGE_DIGEST.path)\n" + } + ] + } + } +} \ No newline at end of file diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/fake/fake.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/fake/fake.go deleted file mode 100644 index 2b97a8ec5d..0000000000 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/fake/fake.go +++ /dev/null @@ -1,40 +0,0 @@ -/* -Copyright 2020 The Tekton Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by injection-gen. DO NOT EDIT. - -package fake - -import ( - context "context" - - fake "github.com/tektoncd/pipeline/pkg/client/injection/informers/factory/fake" - pipelinerun "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun" - controller "knative.dev/pkg/controller" - injection "knative.dev/pkg/injection" -) - -var Get = pipelinerun.Get - -func init() { - injection.Fake.RegisterInformer(withInformer) -} - -func withInformer(ctx context.Context) (context.Context, controller.Informer) { - f := fake.Get(ctx) - inf := f.Tekton().V1beta1().PipelineRuns() - return context.WithValue(ctx, pipelinerun.Key{}, inf), inf.Informer() -} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/pipelinerun.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/pipelinerun.go deleted file mode 100644 index 29efdfecd4..0000000000 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/pipelinerun.go +++ /dev/null @@ -1,52 +0,0 @@ -/* -Copyright 2020 The Tekton Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by injection-gen. DO NOT EDIT. - -package pipelinerun - -import ( - context "context" - - v1beta1 "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1beta1" - factory "github.com/tektoncd/pipeline/pkg/client/injection/informers/factory" - controller "knative.dev/pkg/controller" - injection "knative.dev/pkg/injection" - logging "knative.dev/pkg/logging" -) - -func init() { - injection.Default.RegisterInformer(withInformer) -} - -// Key is used for associating the Informer inside the context.Context. -type Key struct{} - -func withInformer(ctx context.Context) (context.Context, controller.Informer) { - f := factory.Get(ctx) - inf := f.Tekton().V1beta1().PipelineRuns() - return context.WithValue(ctx, Key{}, inf), inf.Informer() -} - -// Get extracts the typed informer from the context. -func Get(ctx context.Context) v1beta1.PipelineRunInformer { - untyped := ctx.Value(Key{}) - if untyped == nil { - logging.FromContext(ctx).Panic( - "Unable to fetch github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1beta1.PipelineRunInformer from context.") - } - return untyped.(v1beta1.PipelineRunInformer) -} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake/fake.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake/fake.go deleted file mode 100644 index 79919b4612..0000000000 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake/fake.go +++ /dev/null @@ -1,40 +0,0 @@ -/* -Copyright 2020 The Tekton Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by injection-gen. DO NOT EDIT. - -package fake - -import ( - context "context" - - fake "github.com/tektoncd/pipeline/pkg/client/injection/informers/factory/fake" - taskrun "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun" - controller "knative.dev/pkg/controller" - injection "knative.dev/pkg/injection" -) - -var Get = taskrun.Get - -func init() { - injection.Fake.RegisterInformer(withInformer) -} - -func withInformer(ctx context.Context) (context.Context, controller.Informer) { - f := fake.Get(ctx) - inf := f.Tekton().V1beta1().TaskRuns() - return context.WithValue(ctx, taskrun.Key{}, inf), inf.Informer() -} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun/controller.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/controller.go similarity index 99% rename from vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun/controller.go rename to vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/controller.go index f93cc4afdc..b1efcea654 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun/controller.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/controller.go @@ -26,7 +26,7 @@ import ( versionedscheme "github.com/tektoncd/pipeline/pkg/client/clientset/versioned/scheme" client "github.com/tektoncd/pipeline/pkg/client/injection/client" - pipelinerun "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun" + pipelinerun "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun" zap "go.uber.org/zap" corev1 "k8s.io/api/core/v1" labels "k8s.io/apimachinery/pkg/labels" diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun/reconciler.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/reconciler.go similarity index 86% rename from vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun/reconciler.go rename to vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/reconciler.go index 219d8418e9..f49825001b 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun/reconciler.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/reconciler.go @@ -23,12 +23,12 @@ import ( json "encoding/json" fmt "fmt" - v1beta1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" versioned "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" - pipelinev1beta1 "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1beta1" + pipelinev1 "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1" zap "go.uber.org/zap" "go.uber.org/zap/zapcore" - v1 "k8s.io/api/core/v1" + corev1 "k8s.io/api/core/v1" equality "k8s.io/apimachinery/pkg/api/equality" errors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -43,40 +43,40 @@ import ( ) // Interface defines the strongly typed interfaces to be implemented by a -// controller reconciling v1beta1.PipelineRun. +// controller reconciling v1.PipelineRun. type Interface interface { - // ReconcileKind implements custom logic to reconcile v1beta1.PipelineRun. Any changes + // ReconcileKind implements custom logic to reconcile v1.PipelineRun. Any changes // to the objects .Status or .Finalizers will be propagated to the stored // object. It is recommended that implementors do not call any update calls // for the Kind inside of ReconcileKind, it is the responsibility of the calling // controller to propagate those properties. The resource passed to ReconcileKind // will always have an empty deletion timestamp. - ReconcileKind(ctx context.Context, o *v1beta1.PipelineRun) reconciler.Event + ReconcileKind(ctx context.Context, o *v1.PipelineRun) reconciler.Event } // Finalizer defines the strongly typed interfaces to be implemented by a -// controller finalizing v1beta1.PipelineRun. +// controller finalizing v1.PipelineRun. type Finalizer interface { - // FinalizeKind implements custom logic to finalize v1beta1.PipelineRun. Any changes + // FinalizeKind implements custom logic to finalize v1.PipelineRun. Any changes // to the objects .Status or .Finalizers will be ignored. Returning a nil or // Normal type reconciler.Event will allow the finalizer to be deleted on // the resource. The resource passed to FinalizeKind will always have a set // deletion timestamp. - FinalizeKind(ctx context.Context, o *v1beta1.PipelineRun) reconciler.Event + FinalizeKind(ctx context.Context, o *v1.PipelineRun) reconciler.Event } // ReadOnlyInterface defines the strongly typed interfaces to be implemented by a -// controller reconciling v1beta1.PipelineRun if they want to process resources for which +// controller reconciling v1.PipelineRun if they want to process resources for which // they are not the leader. type ReadOnlyInterface interface { - // ObserveKind implements logic to observe v1beta1.PipelineRun. + // ObserveKind implements logic to observe v1.PipelineRun. // This method should not write to the API. - ObserveKind(ctx context.Context, o *v1beta1.PipelineRun) reconciler.Event + ObserveKind(ctx context.Context, o *v1.PipelineRun) reconciler.Event } -type doReconcile func(ctx context.Context, o *v1beta1.PipelineRun) reconciler.Event +type doReconcile func(ctx context.Context, o *v1.PipelineRun) reconciler.Event -// reconcilerImpl implements controller.Reconciler for v1beta1.PipelineRun resources. +// reconcilerImpl implements controller.Reconciler for v1.PipelineRun resources. type reconcilerImpl struct { // LeaderAwareFuncs is inlined to help us implement reconciler.LeaderAware. reconciler.LeaderAwareFuncs @@ -85,7 +85,7 @@ type reconcilerImpl struct { Client versioned.Interface // Listers index properties about resources. - Lister pipelinev1beta1.PipelineRunLister + Lister pipelinev1.PipelineRunLister // Recorder is an event recorder for recording Event resources to the // Kubernetes API. @@ -112,7 +112,7 @@ var _ controller.Reconciler = (*reconcilerImpl)(nil) // Check that our generated Reconciler is always LeaderAware. var _ reconciler.LeaderAware = (*reconcilerImpl)(nil) -func NewReconciler(ctx context.Context, logger *zap.SugaredLogger, client versioned.Interface, lister pipelinev1beta1.PipelineRunLister, recorder record.EventRecorder, r Interface, options ...controller.Options) controller.Reconciler { +func NewReconciler(ctx context.Context, logger *zap.SugaredLogger, client versioned.Interface, lister pipelinev1.PipelineRunLister, recorder record.EventRecorder, r Interface, options ...controller.Options) controller.Reconciler { // Check the options function input. It should be 0 or 1. if len(options) > 1 { logger.Fatal("Up to one options struct is supported, found: ", len(options)) @@ -267,7 +267,7 @@ func (r *reconcilerImpl) Reconcile(ctx context.Context, key string) error { default: if err = r.updateStatus(ctx, logger, original, resource); err != nil { logger.Warnw("Failed to update resource status", zap.Error(err)) - r.Recorder.Eventf(resource, v1.EventTypeWarning, "UpdateFailed", + r.Recorder.Eventf(resource, corev1.EventTypeWarning, "UpdateFailed", "Failed to update status for %q: %v", resource.Name, err) return err } @@ -293,7 +293,7 @@ func (r *reconcilerImpl) Reconcile(ctx context.Context, key string) error { // This is a wrapped error, don't emit an event. } else { logger.Errorw("Returned an error", zap.Error(reconcileEvent)) - r.Recorder.Event(resource, v1.EventTypeWarning, "InternalError", reconcileEvent.Error()) + r.Recorder.Event(resource, corev1.EventTypeWarning, "InternalError", reconcileEvent.Error()) } return reconcileEvent } @@ -301,13 +301,13 @@ func (r *reconcilerImpl) Reconcile(ctx context.Context, key string) error { return nil } -func (r *reconcilerImpl) updateStatus(ctx context.Context, logger *zap.SugaredLogger, existing *v1beta1.PipelineRun, desired *v1beta1.PipelineRun) error { +func (r *reconcilerImpl) updateStatus(ctx context.Context, logger *zap.SugaredLogger, existing *v1.PipelineRun, desired *v1.PipelineRun) error { existing = existing.DeepCopy() return reconciler.RetryUpdateConflicts(func(attempts int) (err error) { // The first iteration tries to use the injectionInformer's state, subsequent attempts fetch the latest state via API. if attempts > 0 { - getter := r.Client.TektonV1beta1().PipelineRuns(desired.Namespace) + getter := r.Client.TektonV1().PipelineRuns(desired.Namespace) existing, err = getter.Get(ctx, desired.Name, metav1.GetOptions{}) if err != nil { @@ -328,7 +328,7 @@ func (r *reconcilerImpl) updateStatus(ctx context.Context, logger *zap.SugaredLo existing.Status = desired.Status - updater := r.Client.TektonV1beta1().PipelineRuns(existing.Namespace) + updater := r.Client.TektonV1().PipelineRuns(existing.Namespace) _, err = updater.UpdateStatus(ctx, existing, metav1.UpdateOptions{}) return err @@ -338,7 +338,7 @@ func (r *reconcilerImpl) updateStatus(ctx context.Context, logger *zap.SugaredLo // updateFinalizersFiltered will update the Finalizers of the resource. // TODO: this method could be generic and sync all finalizers. For now it only // updates defaultFinalizerName or its override. -func (r *reconcilerImpl) updateFinalizersFiltered(ctx context.Context, resource *v1beta1.PipelineRun, desiredFinalizers sets.String) (*v1beta1.PipelineRun, error) { +func (r *reconcilerImpl) updateFinalizersFiltered(ctx context.Context, resource *v1.PipelineRun, desiredFinalizers sets.String) (*v1.PipelineRun, error) { // Don't modify the informers copy. existing := resource.DeepCopy() @@ -376,21 +376,21 @@ func (r *reconcilerImpl) updateFinalizersFiltered(ctx context.Context, resource return resource, err } - patcher := r.Client.TektonV1beta1().PipelineRuns(resource.Namespace) + patcher := r.Client.TektonV1().PipelineRuns(resource.Namespace) resourceName := resource.Name updated, err := patcher.Patch(ctx, resourceName, types.MergePatchType, patch, metav1.PatchOptions{}) if err != nil { - r.Recorder.Eventf(existing, v1.EventTypeWarning, "FinalizerUpdateFailed", + r.Recorder.Eventf(existing, corev1.EventTypeWarning, "FinalizerUpdateFailed", "Failed to update finalizers for %q: %v", resourceName, err) } else { - r.Recorder.Eventf(updated, v1.EventTypeNormal, "FinalizerUpdate", + r.Recorder.Eventf(updated, corev1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", resource.GetName()) } return updated, err } -func (r *reconcilerImpl) setFinalizerIfFinalizer(ctx context.Context, resource *v1beta1.PipelineRun) (*v1beta1.PipelineRun, error) { +func (r *reconcilerImpl) setFinalizerIfFinalizer(ctx context.Context, resource *v1.PipelineRun) (*v1.PipelineRun, error) { if _, ok := r.reconciler.(Finalizer); !ok { return resource, nil } @@ -406,7 +406,7 @@ func (r *reconcilerImpl) setFinalizerIfFinalizer(ctx context.Context, resource * return r.updateFinalizersFiltered(ctx, resource, finalizers) } -func (r *reconcilerImpl) clearFinalizer(ctx context.Context, resource *v1beta1.PipelineRun, reconcileEvent reconciler.Event) (*v1beta1.PipelineRun, error) { +func (r *reconcilerImpl) clearFinalizer(ctx context.Context, resource *v1.PipelineRun, reconcileEvent reconciler.Event) (*v1.PipelineRun, error) { if _, ok := r.reconciler.(Finalizer); !ok { return resource, nil } @@ -419,7 +419,7 @@ func (r *reconcilerImpl) clearFinalizer(ctx context.Context, resource *v1beta1.P if reconcileEvent != nil { var event *reconciler.ReconcilerEvent if reconciler.EventAs(reconcileEvent, &event) { - if event.EventType == v1.EventTypeNormal { + if event.EventType == corev1.EventTypeNormal { finalizers.Delete(r.finalizerName) } } diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun/state.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/state.go similarity index 94% rename from vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun/state.go rename to vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/state.go index 39399186d9..35540fcf4e 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun/state.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/state.go @@ -21,7 +21,7 @@ package pipelinerun import ( fmt "fmt" - v1beta1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" types "k8s.io/apimachinery/pkg/types" cache "k8s.io/client-go/tools/cache" reconciler "knative.dev/pkg/reconciler" @@ -83,7 +83,7 @@ func (s *state) isNotLeaderNorObserver() bool { return false } -func (s *state) reconcileMethodFor(o *v1beta1.PipelineRun) (string, doReconcile) { +func (s *state) reconcileMethodFor(o *v1.PipelineRun) (string, doReconcile) { if o.GetDeletionTimestamp().IsZero() { if s.isLeader { return reconciler.DoReconcileKind, s.reconciler.ReconcileKind diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/controller.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/controller.go new file mode 100644 index 0000000000..2cf0767987 --- /dev/null +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/controller.go @@ -0,0 +1,170 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package taskrun + +import ( + context "context" + fmt "fmt" + reflect "reflect" + strings "strings" + + versionedscheme "github.com/tektoncd/pipeline/pkg/client/clientset/versioned/scheme" + client "github.com/tektoncd/pipeline/pkg/client/injection/client" + taskrun "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/taskrun" + zap "go.uber.org/zap" + corev1 "k8s.io/api/core/v1" + labels "k8s.io/apimachinery/pkg/labels" + types "k8s.io/apimachinery/pkg/types" + watch "k8s.io/apimachinery/pkg/watch" + scheme "k8s.io/client-go/kubernetes/scheme" + v1 "k8s.io/client-go/kubernetes/typed/core/v1" + record "k8s.io/client-go/tools/record" + kubeclient "knative.dev/pkg/client/injection/kube/client" + controller "knative.dev/pkg/controller" + logging "knative.dev/pkg/logging" + logkey "knative.dev/pkg/logging/logkey" + reconciler "knative.dev/pkg/reconciler" +) + +const ( + defaultControllerAgentName = "taskrun-controller" + defaultFinalizerName = "taskruns.tekton.dev" +) + +// NewImpl returns a controller.Impl that handles queuing and feeding work from +// the queue through an implementation of controller.Reconciler, delegating to +// the provided Interface and optional Finalizer methods. OptionsFn is used to return +// controller.ControllerOptions to be used by the internal reconciler. +func NewImpl(ctx context.Context, r Interface, optionsFns ...controller.OptionsFn) *controller.Impl { + logger := logging.FromContext(ctx) + + // Check the options function input. It should be 0 or 1. + if len(optionsFns) > 1 { + logger.Fatal("Up to one options function is supported, found: ", len(optionsFns)) + } + + taskrunInformer := taskrun.Get(ctx) + + lister := taskrunInformer.Lister() + + var promoteFilterFunc func(obj interface{}) bool + var promoteFunc = func(bkt reconciler.Bucket) {} + + rec := &reconcilerImpl{ + LeaderAwareFuncs: reconciler.LeaderAwareFuncs{ + PromoteFunc: func(bkt reconciler.Bucket, enq func(reconciler.Bucket, types.NamespacedName)) error { + + // Signal promotion event + promoteFunc(bkt) + + all, err := lister.List(labels.Everything()) + if err != nil { + return err + } + for _, elt := range all { + if promoteFilterFunc != nil { + if ok := promoteFilterFunc(elt); !ok { + continue + } + } + enq(bkt, types.NamespacedName{ + Namespace: elt.GetNamespace(), + Name: elt.GetName(), + }) + } + return nil + }, + }, + Client: client.Get(ctx), + Lister: lister, + reconciler: r, + finalizerName: defaultFinalizerName, + } + + ctrType := reflect.TypeOf(r).Elem() + ctrTypeName := fmt.Sprintf("%s.%s", ctrType.PkgPath(), ctrType.Name()) + ctrTypeName = strings.ReplaceAll(ctrTypeName, "/", ".") + + logger = logger.With( + zap.String(logkey.ControllerType, ctrTypeName), + zap.String(logkey.Kind, "tekton.dev.TaskRun"), + ) + + impl := controller.NewContext(ctx, rec, controller.ControllerOptions{WorkQueueName: ctrTypeName, Logger: logger}) + agentName := defaultControllerAgentName + + // Pass impl to the options. Save any optional results. + for _, fn := range optionsFns { + opts := fn(impl) + if opts.ConfigStore != nil { + rec.configStore = opts.ConfigStore + } + if opts.FinalizerName != "" { + rec.finalizerName = opts.FinalizerName + } + if opts.AgentName != "" { + agentName = opts.AgentName + } + if opts.SkipStatusUpdates { + rec.skipStatusUpdates = true + } + if opts.DemoteFunc != nil { + rec.DemoteFunc = opts.DemoteFunc + } + if opts.PromoteFilterFunc != nil { + promoteFilterFunc = opts.PromoteFilterFunc + } + if opts.PromoteFunc != nil { + promoteFunc = opts.PromoteFunc + } + } + + rec.Recorder = createRecorder(ctx, agentName) + + return impl +} + +func createRecorder(ctx context.Context, agentName string) record.EventRecorder { + logger := logging.FromContext(ctx) + + recorder := controller.GetEventRecorder(ctx) + if recorder == nil { + // Create event broadcaster + logger.Debug("Creating event broadcaster") + eventBroadcaster := record.NewBroadcaster() + watches := []watch.Interface{ + eventBroadcaster.StartLogging(logger.Named("event-broadcaster").Infof), + eventBroadcaster.StartRecordingToSink( + &v1.EventSinkImpl{Interface: kubeclient.Get(ctx).CoreV1().Events("")}), + } + recorder = eventBroadcaster.NewRecorder(scheme.Scheme, corev1.EventSource{Component: agentName}) + go func() { + <-ctx.Done() + for _, w := range watches { + w.Stop() + } + }() + } + + return recorder +} + +func init() { + versionedscheme.AddToScheme(scheme.Scheme) +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/reconciler.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/reconciler.go new file mode 100644 index 0000000000..30a208a556 --- /dev/null +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/reconciler.go @@ -0,0 +1,432 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package taskrun + +import ( + context "context" + json "encoding/json" + fmt "fmt" + + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + versioned "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" + pipelinev1 "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1" + zap "go.uber.org/zap" + "go.uber.org/zap/zapcore" + corev1 "k8s.io/api/core/v1" + equality "k8s.io/apimachinery/pkg/api/equality" + errors "k8s.io/apimachinery/pkg/api/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + labels "k8s.io/apimachinery/pkg/labels" + types "k8s.io/apimachinery/pkg/types" + sets "k8s.io/apimachinery/pkg/util/sets" + record "k8s.io/client-go/tools/record" + controller "knative.dev/pkg/controller" + kmp "knative.dev/pkg/kmp" + logging "knative.dev/pkg/logging" + reconciler "knative.dev/pkg/reconciler" +) + +// Interface defines the strongly typed interfaces to be implemented by a +// controller reconciling v1.TaskRun. +type Interface interface { + // ReconcileKind implements custom logic to reconcile v1.TaskRun. Any changes + // to the objects .Status or .Finalizers will be propagated to the stored + // object. It is recommended that implementors do not call any update calls + // for the Kind inside of ReconcileKind, it is the responsibility of the calling + // controller to propagate those properties. The resource passed to ReconcileKind + // will always have an empty deletion timestamp. + ReconcileKind(ctx context.Context, o *v1.TaskRun) reconciler.Event +} + +// Finalizer defines the strongly typed interfaces to be implemented by a +// controller finalizing v1.TaskRun. +type Finalizer interface { + // FinalizeKind implements custom logic to finalize v1.TaskRun. Any changes + // to the objects .Status or .Finalizers will be ignored. Returning a nil or + // Normal type reconciler.Event will allow the finalizer to be deleted on + // the resource. The resource passed to FinalizeKind will always have a set + // deletion timestamp. + FinalizeKind(ctx context.Context, o *v1.TaskRun) reconciler.Event +} + +// ReadOnlyInterface defines the strongly typed interfaces to be implemented by a +// controller reconciling v1.TaskRun if they want to process resources for which +// they are not the leader. +type ReadOnlyInterface interface { + // ObserveKind implements logic to observe v1.TaskRun. + // This method should not write to the API. + ObserveKind(ctx context.Context, o *v1.TaskRun) reconciler.Event +} + +type doReconcile func(ctx context.Context, o *v1.TaskRun) reconciler.Event + +// reconcilerImpl implements controller.Reconciler for v1.TaskRun resources. +type reconcilerImpl struct { + // LeaderAwareFuncs is inlined to help us implement reconciler.LeaderAware. + reconciler.LeaderAwareFuncs + + // Client is used to write back status updates. + Client versioned.Interface + + // Listers index properties about resources. + Lister pipelinev1.TaskRunLister + + // Recorder is an event recorder for recording Event resources to the + // Kubernetes API. + Recorder record.EventRecorder + + // configStore allows for decorating a context with config maps. + // +optional + configStore reconciler.ConfigStore + + // reconciler is the implementation of the business logic of the resource. + reconciler Interface + + // finalizerName is the name of the finalizer to reconcile. + finalizerName string + + // skipStatusUpdates configures whether or not this reconciler automatically updates + // the status of the reconciled resource. + skipStatusUpdates bool +} + +// Check that our Reconciler implements controller.Reconciler. +var _ controller.Reconciler = (*reconcilerImpl)(nil) + +// Check that our generated Reconciler is always LeaderAware. +var _ reconciler.LeaderAware = (*reconcilerImpl)(nil) + +func NewReconciler(ctx context.Context, logger *zap.SugaredLogger, client versioned.Interface, lister pipelinev1.TaskRunLister, recorder record.EventRecorder, r Interface, options ...controller.Options) controller.Reconciler { + // Check the options function input. It should be 0 or 1. + if len(options) > 1 { + logger.Fatal("Up to one options struct is supported, found: ", len(options)) + } + + // Fail fast when users inadvertently implement the other LeaderAware interface. + // For the typed reconcilers, Promote shouldn't take any arguments. + if _, ok := r.(reconciler.LeaderAware); ok { + logger.Fatalf("%T implements the incorrect LeaderAware interface. Promote() should not take an argument as genreconciler handles the enqueuing automatically.", r) + } + + rec := &reconcilerImpl{ + LeaderAwareFuncs: reconciler.LeaderAwareFuncs{ + PromoteFunc: func(bkt reconciler.Bucket, enq func(reconciler.Bucket, types.NamespacedName)) error { + all, err := lister.List(labels.Everything()) + if err != nil { + return err + } + for _, elt := range all { + // TODO: Consider letting users specify a filter in options. + enq(bkt, types.NamespacedName{ + Namespace: elt.GetNamespace(), + Name: elt.GetName(), + }) + } + return nil + }, + }, + Client: client, + Lister: lister, + Recorder: recorder, + reconciler: r, + finalizerName: defaultFinalizerName, + } + + for _, opts := range options { + if opts.ConfigStore != nil { + rec.configStore = opts.ConfigStore + } + if opts.FinalizerName != "" { + rec.finalizerName = opts.FinalizerName + } + if opts.SkipStatusUpdates { + rec.skipStatusUpdates = true + } + if opts.DemoteFunc != nil { + rec.DemoteFunc = opts.DemoteFunc + } + } + + return rec +} + +// Reconcile implements controller.Reconciler +func (r *reconcilerImpl) Reconcile(ctx context.Context, key string) error { + logger := logging.FromContext(ctx) + + // Initialize the reconciler state. This will convert the namespace/name + // string into a distinct namespace and name, determine if this instance of + // the reconciler is the leader, and any additional interfaces implemented + // by the reconciler. Returns an error is the resource key is invalid. + s, err := newState(key, r) + if err != nil { + logger.Error("Invalid resource key: ", key) + return nil + } + + // If we are not the leader, and we don't implement either ReadOnly + // observer interfaces, then take a fast-path out. + if s.isNotLeaderNorObserver() { + return controller.NewSkipKey(key) + } + + // If configStore is set, attach the frozen configuration to the context. + if r.configStore != nil { + ctx = r.configStore.ToContext(ctx) + } + + // Add the recorder to context. + ctx = controller.WithEventRecorder(ctx, r.Recorder) + + // Get the resource with this namespace/name. + + getter := r.Lister.TaskRuns(s.namespace) + + original, err := getter.Get(s.name) + + if errors.IsNotFound(err) { + // The resource may no longer exist, in which case we stop processing and call + // the ObserveDeletion handler if appropriate. + logger.Debugf("Resource %q no longer exists", key) + if del, ok := r.reconciler.(reconciler.OnDeletionInterface); ok { + return del.ObserveDeletion(ctx, types.NamespacedName{ + Namespace: s.namespace, + Name: s.name, + }) + } + return nil + } else if err != nil { + return err + } + + // Don't modify the informers copy. + resource := original.DeepCopy() + + var reconcileEvent reconciler.Event + + name, do := s.reconcileMethodFor(resource) + // Append the target method to the logger. + logger = logger.With(zap.String("targetMethod", name)) + switch name { + case reconciler.DoReconcileKind: + // Set and update the finalizer on resource if r.reconciler + // implements Finalizer. + if resource, err = r.setFinalizerIfFinalizer(ctx, resource); err != nil { + return fmt.Errorf("failed to set finalizers: %w", err) + } + + // Reconcile this copy of the resource and then write back any status + // updates regardless of whether the reconciliation errored out. + reconcileEvent = do(ctx, resource) + + case reconciler.DoFinalizeKind: + // For finalizing reconcilers, if this resource being marked for deletion + // and reconciled cleanly (nil or normal event), remove the finalizer. + reconcileEvent = do(ctx, resource) + + if resource, err = r.clearFinalizer(ctx, resource, reconcileEvent); err != nil { + return fmt.Errorf("failed to clear finalizers: %w", err) + } + + case reconciler.DoObserveKind: + // Observe any changes to this resource, since we are not the leader. + reconcileEvent = do(ctx, resource) + + } + + // Synchronize the status. + switch { + case r.skipStatusUpdates: + // This reconciler implementation is configured to skip resource updates. + // This may mean this reconciler does not observe spec, but reconciles external changes. + case equality.Semantic.DeepEqual(original.Status, resource.Status): + // If we didn't change anything then don't call updateStatus. + // This is important because the copy we loaded from the injectionInformer's + // cache may be stale and we don't want to overwrite a prior update + // to status with this stale state. + case !s.isLeader: + // High-availability reconcilers may have many replicas watching the resource, but only + // the elected leader is expected to write modifications. + logger.Warn("Saw status changes when we aren't the leader!") + default: + if err = r.updateStatus(ctx, logger, original, resource); err != nil { + logger.Warnw("Failed to update resource status", zap.Error(err)) + r.Recorder.Eventf(resource, corev1.EventTypeWarning, "UpdateFailed", + "Failed to update status for %q: %v", resource.Name, err) + return err + } + } + + // Report the reconciler event, if any. + if reconcileEvent != nil { + var event *reconciler.ReconcilerEvent + if reconciler.EventAs(reconcileEvent, &event) { + logger.Infow("Returned an event", zap.Any("event", reconcileEvent)) + r.Recorder.Event(resource, event.EventType, event.Reason, event.Error()) + + // the event was wrapped inside an error, consider the reconciliation as failed + if _, isEvent := reconcileEvent.(*reconciler.ReconcilerEvent); !isEvent { + return reconcileEvent + } + return nil + } + + if controller.IsSkipKey(reconcileEvent) { + // This is a wrapped error, don't emit an event. + } else if ok, _ := controller.IsRequeueKey(reconcileEvent); ok { + // This is a wrapped error, don't emit an event. + } else { + logger.Errorw("Returned an error", zap.Error(reconcileEvent)) + r.Recorder.Event(resource, corev1.EventTypeWarning, "InternalError", reconcileEvent.Error()) + } + return reconcileEvent + } + + return nil +} + +func (r *reconcilerImpl) updateStatus(ctx context.Context, logger *zap.SugaredLogger, existing *v1.TaskRun, desired *v1.TaskRun) error { + existing = existing.DeepCopy() + return reconciler.RetryUpdateConflicts(func(attempts int) (err error) { + // The first iteration tries to use the injectionInformer's state, subsequent attempts fetch the latest state via API. + if attempts > 0 { + + getter := r.Client.TektonV1().TaskRuns(desired.Namespace) + + existing, err = getter.Get(ctx, desired.Name, metav1.GetOptions{}) + if err != nil { + return err + } + } + + // If there's nothing to update, just return. + if equality.Semantic.DeepEqual(existing.Status, desired.Status) { + return nil + } + + if logger.Desugar().Core().Enabled(zapcore.DebugLevel) { + if diff, err := kmp.SafeDiff(existing.Status, desired.Status); err == nil && diff != "" { + logger.Debug("Updating status with: ", diff) + } + } + + existing.Status = desired.Status + + updater := r.Client.TektonV1().TaskRuns(existing.Namespace) + + _, err = updater.UpdateStatus(ctx, existing, metav1.UpdateOptions{}) + return err + }) +} + +// updateFinalizersFiltered will update the Finalizers of the resource. +// TODO: this method could be generic and sync all finalizers. For now it only +// updates defaultFinalizerName or its override. +func (r *reconcilerImpl) updateFinalizersFiltered(ctx context.Context, resource *v1.TaskRun, desiredFinalizers sets.String) (*v1.TaskRun, error) { + // Don't modify the informers copy. + existing := resource.DeepCopy() + + var finalizers []string + + // If there's nothing to update, just return. + existingFinalizers := sets.NewString(existing.Finalizers...) + + if desiredFinalizers.Has(r.finalizerName) { + if existingFinalizers.Has(r.finalizerName) { + // Nothing to do. + return resource, nil + } + // Add the finalizer. + finalizers = append(existing.Finalizers, r.finalizerName) + } else { + if !existingFinalizers.Has(r.finalizerName) { + // Nothing to do. + return resource, nil + } + // Remove the finalizer. + existingFinalizers.Delete(r.finalizerName) + finalizers = existingFinalizers.List() + } + + mergePatch := map[string]interface{}{ + "metadata": map[string]interface{}{ + "finalizers": finalizers, + "resourceVersion": existing.ResourceVersion, + }, + } + + patch, err := json.Marshal(mergePatch) + if err != nil { + return resource, err + } + + patcher := r.Client.TektonV1().TaskRuns(resource.Namespace) + + resourceName := resource.Name + updated, err := patcher.Patch(ctx, resourceName, types.MergePatchType, patch, metav1.PatchOptions{}) + if err != nil { + r.Recorder.Eventf(existing, corev1.EventTypeWarning, "FinalizerUpdateFailed", + "Failed to update finalizers for %q: %v", resourceName, err) + } else { + r.Recorder.Eventf(updated, corev1.EventTypeNormal, "FinalizerUpdate", + "Updated %q finalizers", resource.GetName()) + } + return updated, err +} + +func (r *reconcilerImpl) setFinalizerIfFinalizer(ctx context.Context, resource *v1.TaskRun) (*v1.TaskRun, error) { + if _, ok := r.reconciler.(Finalizer); !ok { + return resource, nil + } + + finalizers := sets.NewString(resource.Finalizers...) + + // If this resource is not being deleted, mark the finalizer. + if resource.GetDeletionTimestamp().IsZero() { + finalizers.Insert(r.finalizerName) + } + + // Synchronize the finalizers filtered by r.finalizerName. + return r.updateFinalizersFiltered(ctx, resource, finalizers) +} + +func (r *reconcilerImpl) clearFinalizer(ctx context.Context, resource *v1.TaskRun, reconcileEvent reconciler.Event) (*v1.TaskRun, error) { + if _, ok := r.reconciler.(Finalizer); !ok { + return resource, nil + } + if resource.GetDeletionTimestamp().IsZero() { + return resource, nil + } + + finalizers := sets.NewString(resource.Finalizers...) + + if reconcileEvent != nil { + var event *reconciler.ReconcilerEvent + if reconciler.EventAs(reconcileEvent, &event) { + if event.EventType == corev1.EventTypeNormal { + finalizers.Delete(r.finalizerName) + } + } + } else { + finalizers.Delete(r.finalizerName) + } + + // Synchronize the finalizers filtered by r.finalizerName. + return r.updateFinalizersFiltered(ctx, resource, finalizers) +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/state.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/state.go new file mode 100644 index 0000000000..b989b339ba --- /dev/null +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/state.go @@ -0,0 +1,97 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package taskrun + +import ( + fmt "fmt" + + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + types "k8s.io/apimachinery/pkg/types" + cache "k8s.io/client-go/tools/cache" + reconciler "knative.dev/pkg/reconciler" +) + +// state is used to track the state of a reconciler in a single run. +type state struct { + // key is the original reconciliation key from the queue. + key string + // namespace is the namespace split from the reconciliation key. + namespace string + // name is the name split from the reconciliation key. + name string + // reconciler is the reconciler. + reconciler Interface + // roi is the read only interface cast of the reconciler. + roi ReadOnlyInterface + // isROI (Read Only Interface) the reconciler only observes reconciliation. + isROI bool + // isLeader the instance of the reconciler is the elected leader. + isLeader bool +} + +func newState(key string, r *reconcilerImpl) (*state, error) { + // Convert the namespace/name string into a distinct namespace and name. + namespace, name, err := cache.SplitMetaNamespaceKey(key) + if err != nil { + return nil, fmt.Errorf("invalid resource key: %s", key) + } + + roi, isROI := r.reconciler.(ReadOnlyInterface) + + isLeader := r.IsLeaderFor(types.NamespacedName{ + Namespace: namespace, + Name: name, + }) + + return &state{ + key: key, + namespace: namespace, + name: name, + reconciler: r.reconciler, + roi: roi, + isROI: isROI, + isLeader: isLeader, + }, nil +} + +// isNotLeaderNorObserver checks to see if this reconciler with the current +// state is enabled to do any work or not. +// isNotLeaderNorObserver returns true when there is no work possible for the +// reconciler. +func (s *state) isNotLeaderNorObserver() bool { + if !s.isLeader && !s.isROI { + // If we are not the leader, and we don't implement the ReadOnly + // interface, then take a fast-path out. + return true + } + return false +} + +func (s *state) reconcileMethodFor(o *v1.TaskRun) (string, doReconcile) { + if o.GetDeletionTimestamp().IsZero() { + if s.isLeader { + return reconciler.DoReconcileKind, s.reconciler.ReconcileKind + } else if s.isROI { + return reconciler.DoObserveKind, s.roi.ObserveKind + } + } else if fin, ok := s.reconciler.(Finalizer); s.isLeader && ok { + return reconciler.DoFinalizeKind, fin.FinalizeKind + } + return "unknown", nil +} diff --git a/vendor/modules.txt b/vendor/modules.txt index cfaedee968..e35b106d11 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1187,6 +1187,8 @@ github.com/josharian/intern # github.com/json-iterator/go v1.1.12 ## explicit; go 1.12 github.com/json-iterator/go +# github.com/jstemmer/go-junit-report v1.0.0 +## explicit; go 1.2 # github.com/julz/importas v0.1.0 ## explicit; go 1.15 github.com/julz/importas @@ -1778,11 +1780,9 @@ github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/clu github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/clustertask/fake github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/customrun github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/customrun/fake -github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun -github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/fake github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun -github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake -github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun +github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun +github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/taskrun github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1 github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1alpha1