From f35a5ff878600ed0cb099443bf133f4b5834a008 Mon Sep 17 00:00:00 2001 From: Edgar Garcia <63310723+edgarulg@users.noreply.github.com> Date: Wed, 11 Dec 2024 21:59:23 -0600 Subject: [PATCH] fix(pipelineRef): add resolvedExpectedArtifacts from pipelineTrigger to PipelineRefTrigger (#4816) * fix(pipelineRef): add resolvedExpectedArtifacts from pipelineTrigger to PipelineRefTrigger * fix(pipelineRef): add tests around PipelineRefTrigger --------- Co-authored-by: Jason (cherry picked from commit 3c65fe032d836e21d71c9e1f08c28fddb9f3a28d) --- .../PipelineRefTriggerDeserializerSupplier.kt | 5 +- .../pipeline/persistence/ExecutionMapper.kt | 16 +- .../persistence/PipelineRefTrigger.kt | 5 +- ...elineRefTriggerDeserializerSupplierTest.kt | 143 ++++++++++++++++++ .../persistence/ExecutionMapperTest.kt | 80 +++++++++- 5 files changed, 237 insertions(+), 12 deletions(-) create mode 100644 orca-sql/src/test/kotlin/com/netflix/spinnaker/orca/sql/PipelineRefTriggerDeserializerSupplierTest.kt diff --git a/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/PipelineRefTriggerDeserializerSupplier.kt b/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/PipelineRefTriggerDeserializerSupplier.kt index 9654002042..800d5edb1b 100644 --- a/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/PipelineRefTriggerDeserializerSupplier.kt +++ b/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/PipelineRefTriggerDeserializerSupplier.kt @@ -61,7 +61,10 @@ class PipelineRefTriggerDeserializerSupplier( isStrategy = get("strategy")?.booleanValue() == true, parentExecutionId = parentExecutionId, parentPipelineStageId = get("parentPipelineStageId")?.textValue() - ) + ).apply { + resolvedExpectedArtifacts = get("resolvedExpectedArtifacts")?.listValue(parser) ?: mutableListOf() + other = get("other")?.mapValue(parser) ?: mutableMapOf() + } } } diff --git a/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/ExecutionMapper.kt b/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/ExecutionMapper.kt index 888645a54e..d98ed79a09 100644 --- a/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/ExecutionMapper.kt +++ b/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/ExecutionMapper.kt @@ -26,7 +26,6 @@ import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution import java.sql.ResultSet import org.jooq.DSLContext import org.jooq.impl.DSL.field -import org.jooq.impl.DSL.name import org.slf4j.LoggerFactory import java.nio.charset.StandardCharsets @@ -142,11 +141,7 @@ class ExecutionMapper( fun convertPipelineRefTrigger(execution: PipelineExecution, context: DSLContext) { val trigger = execution.trigger if (trigger is PipelineRefTrigger) { - val parentExecution = context - .selectExecution(execution.type, compressionProperties) - .where(field("id").eq(trigger.parentExecutionId)) - .fetchExecutions(mapper, 200, compressionProperties, context, pipelineRefEnabled) - .firstOrNull() + val parentExecution = fetchParentExecution(execution.type, trigger, context) if (parentExecution == null) { // If someone deletes the parent execution, we'll be unable to load the full, valid child pipeline. Rather than @@ -160,4 +155,13 @@ class ExecutionMapper( execution.trigger = trigger.toPipelineTrigger(parentExecution) } } + + @VisibleForTesting + fun fetchParentExecution(type: ExecutionType, trigger: PipelineRefTrigger, context: DSLContext): PipelineExecution? { + return context + .selectExecution(type, compressionProperties) + .where(field("id").eq(trigger.parentExecutionId)) + .fetchExecutions(mapper, 200, compressionProperties, context, pipelineRefEnabled) + .firstOrNull() + } } diff --git a/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/PipelineRefTrigger.kt b/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/PipelineRefTrigger.kt index b73f8aabdc..c68c465715 100644 --- a/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/PipelineRefTrigger.kt +++ b/orca-sql/src/main/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/PipelineRefTrigger.kt @@ -100,5 +100,8 @@ data class PipelineRefTrigger( isStrategy = isStrategy, parentExecution = parentExecution, parentPipelineStageId = parentPipelineStageId - ) + ).apply { + this.resolvedExpectedArtifacts = this@PipelineRefTrigger.resolvedExpectedArtifacts + this.other = this@PipelineRefTrigger.other + } } diff --git a/orca-sql/src/test/kotlin/com/netflix/spinnaker/orca/sql/PipelineRefTriggerDeserializerSupplierTest.kt b/orca-sql/src/test/kotlin/com/netflix/spinnaker/orca/sql/PipelineRefTriggerDeserializerSupplierTest.kt new file mode 100644 index 0000000000..02559e62fe --- /dev/null +++ b/orca-sql/src/test/kotlin/com/netflix/spinnaker/orca/sql/PipelineRefTriggerDeserializerSupplierTest.kt @@ -0,0 +1,143 @@ +/* + * Copyright 2024 Harness Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.orca.sql + +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.databind.node.JsonNodeFactory +import com.fasterxml.jackson.databind.node.ObjectNode +import com.netflix.spinnaker.orca.sql.pipeline.persistence.PipelineRefTrigger +import dev.minutest.junit.JUnit5Minutests +import dev.minutest.rootContext +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertEquals + +class PipelineRefTriggerDeserializerSupplierTest : JUnit5Minutests { + + fun tests() = rootContext { + context("pipelineRef feature is disabled") { + val deserializerSupplier = PipelineRefTriggerDeserializerSupplier(pipelineRefEnabled = false) + val jsonNodeFactory = JsonNodeFactory.instance + + test("predicate is true when the trigger is a pipelineRef") { + val node = jsonNodeFactory.objectNode().apply { + put("type", "pipelineRef") + } + assertTrue(deserializerSupplier.predicate(node)) + } + + test("predicate is false when the trigger is not a pipelineRef") { + val node = jsonNodeFactory.objectNode().apply { + put("type", "manual") + } + assertFalse(deserializerSupplier.predicate(node)) + } + } + + context("pipelineRef feature is enabled") { + val deserializerSupplier = PipelineRefTriggerDeserializerSupplier(pipelineRefEnabled = true) + val jsonNodeFactory = JsonNodeFactory.instance + + test("predicate is true when the trigger is a pipelineRef") { + val node = jsonNodeFactory.objectNode().apply { + put("type", "pipelineRef") + } + assertTrue(deserializerSupplier.predicate(node)) + } + + test("predicate is true when the trigger has parentExecution") { + val node = jsonNodeFactory.objectNode().apply { + set("parentExecution", jsonNodeFactory.objectNode().put("id", "execution-id")) + } + assertTrue(deserializerSupplier.predicate(node)) + } + + test("predicate is false when the trigger is not a pipelineRef") { + val node = jsonNodeFactory.objectNode().apply { + put("type", "manual") + } + assertFalse(deserializerSupplier.predicate(node)) + } + + } + + context("deserializing pipelineRef") { + val deserializerSupplier = PipelineRefTriggerDeserializerSupplier(pipelineRefEnabled = true) + val jsonNodeFactory = JsonNodeFactory.instance + val jsonParser = ObjectMapper().createParser("") + + test("all fields in pipelineRef are added") { + val node = jsonNodeFactory.objectNode().apply { + put("correlationId", "correlation-id") + put("user", "test-user") + set("parameters", jsonNodeFactory.objectNode().put("key1", "value1")) + set("artifacts", jsonNodeFactory.arrayNode().add(jsonNodeFactory.objectNode().put("type", "artifact-type"))) + put("rebake", true) + put("dryRun", false) + put("strategy", true) + put("parentExecutionId", "parent-execution-id") + set("resolvedExpectedArtifacts", jsonNodeFactory.arrayNode().add(jsonNodeFactory.objectNode().put("id", "resolved-artifact-id"))) + set("other", jsonNodeFactory.objectNode().put("extra1", "value1")) + } + + val trigger = deserializerSupplier.deserializer(node, jsonParser) as PipelineRefTrigger + + assertEquals("correlation-id", trigger.correlationId) + assertEquals("test-user", trigger.user) + assertEquals(mapOf("key1" to "value1"), trigger.parameters) + assertEquals(1, trigger.artifacts.size) + assertTrue(trigger.notifications.isEmpty()) + assertTrue(trigger.isRebake) + assertFalse(trigger.isDryRun) + assertTrue(trigger.isStrategy) + assertEquals("parent-execution-id", trigger.parentExecutionId) + assertEquals(1, trigger.resolvedExpectedArtifacts.size) + assertEquals(1, trigger.other.size) + } + + test("pipelineTrigger is deserialized into pipelineRef") { + val node = jsonNodeFactory.objectNode().apply { + put("correlationId", "correlation-id") + put("user", "test-user") + set("parameters", jsonNodeFactory.objectNode().put("key1", "value1")) + set("artifacts", jsonNodeFactory.arrayNode().add(jsonNodeFactory.objectNode().put("type", "artifact-type"))) + put("rebake", true) + put("dryRun", false) + put("strategy", true) + set("parentExecution", jsonNodeFactory.objectNode().put("id", "parent-execution-id-from-pipeline-trigger")) + set("resolvedExpectedArtifacts", jsonNodeFactory.arrayNode().add(jsonNodeFactory.objectNode().put("id", "resolved-artifact-id"))) + set("other", jsonNodeFactory.objectNode().put("extra1", "value1")) + } + + val trigger = deserializerSupplier.deserializer(node, jsonParser) as PipelineRefTrigger + + assertEquals("correlation-id", trigger.correlationId) + assertEquals("test-user", trigger.user) + assertEquals(mapOf("key1" to "value1"), trigger.parameters) + assertEquals(1, trigger.artifacts.size) + assertTrue(trigger.notifications.isEmpty()) + assertTrue(trigger.isRebake) + assertFalse(trigger.isDryRun) + assertTrue(trigger.isStrategy) + assertEquals("parent-execution-id-from-pipeline-trigger", trigger.parentExecutionId) + assertEquals(1, trigger.resolvedExpectedArtifacts.size) + assertEquals(1, trigger.other.size) + } + } + } + +} diff --git a/orca-sql/src/test/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/ExecutionMapperTest.kt b/orca-sql/src/test/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/ExecutionMapperTest.kt index c27626a03a..4f8b35478a 100644 --- a/orca-sql/src/test/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/ExecutionMapperTest.kt +++ b/orca-sql/src/test/kotlin/com/netflix/spinnaker/orca/sql/pipeline/persistence/ExecutionMapperTest.kt @@ -18,14 +18,27 @@ package com.netflix.spinnaker.orca.sql.pipeline.persistence import com.fasterxml.jackson.databind.ObjectMapper import com.netflix.spinnaker.config.CompressionType import com.netflix.spinnaker.config.ExecutionCompressionProperties +import com.netflix.spinnaker.kork.artifacts.model.Artifact +import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact +import com.netflix.spinnaker.orca.api.pipeline.models.ExecutionType import com.netflix.spinnaker.orca.api.pipeline.models.PipelineExecution import com.netflix.spinnaker.orca.pipeline.model.DefaultTrigger -import com.nhaarman.mockito_kotlin.* +import com.netflix.spinnaker.orca.pipeline.model.PipelineExecutionImpl +import com.netflix.spinnaker.orca.pipeline.model.PipelineTrigger +import com.nhaarman.mockito_kotlin.mock +import com.nhaarman.mockito_kotlin.doReturn +import com.nhaarman.mockito_kotlin.verify +import com.nhaarman.mockito_kotlin.any +import com.nhaarman.mockito_kotlin.times import dev.minutest.junit.JUnit5Minutests import dev.minutest.rootContext import org.assertj.core.api.Assertions.assertThat import org.jooq.DSLContext import org.mockito.Mockito +import strikt.api.expectThat +import strikt.assertions.isA +import strikt.assertions.isEmpty +import strikt.assertions.isEqualTo import java.io.ByteArrayOutputStream import java.nio.charset.StandardCharsets import java.sql.ResultSet @@ -69,16 +82,75 @@ class ExecutionMapperTest : JUnit5Minutests { val compressionProperties = ExecutionCompressionProperties().apply { enabled = false } - val mapper = ExecutionMapper(mapper = ObjectMapper(), stageBatchSize = 200, compressionProperties, true) - val mockedExecution = mock() val database: DSLContext = Mockito.mock(DSLContext::class.java, Mockito.RETURNS_DEEP_STUBS) test("conversion ignored when trigger is not PipelineRef") { + val mockedExecution = mock() + val mapper = ExecutionMapper(mapper = ObjectMapper(), stageBatchSize = 200, compressionProperties = compressionProperties, true) + val spyMapper = Mockito.spy(mapper) + doReturn(DefaultTrigger(type = "default")).`when`(mockedExecution).trigger - mapper.convertPipelineRefTrigger(mockedExecution, database) + spyMapper.convertPipelineRefTrigger(mockedExecution, database) + verify(mockedExecution, times(1)).trigger + verify(spyMapper, times(0)).fetchParentExecution(any(), any(), any()) + } + + test("conversion is aborted when trigger is PipelineRef but parentExecution not found") { + val mockedExecution = mock() + val mapper = ExecutionMapper(mapper = ObjectMapper(), stageBatchSize = 200, compressionProperties = compressionProperties, true) + val spyMapper = Mockito.spy(mapper) + + doReturn(PipelineRefTrigger(parentExecutionId = "test-parent-id")).`when`(mockedExecution).trigger + doReturn(ExecutionType.PIPELINE).`when`(mockedExecution).type + doReturn(null).`when`(spyMapper).fetchParentExecution(any(), any(), any()) + spyMapper.convertPipelineRefTrigger(mockedExecution, database) verify(mockedExecution, times(1)).trigger + verify(spyMapper, times(1)).fetchParentExecution(any(), any(), any()) } + test("conversion is processed when trigger is PipelineRef") { + val correlationId = "test-correlation" + val parentExecutionId = "test-execution" + val parameters = mutableMapOf("test-parameter" to "test-body") + val artifacts = mutableListOf(Artifact.builder().build()) + val resolvedExpectedArtifact = mutableListOf(ExpectedArtifact.builder().boundArtifact(Artifact.builder().build()).build()) + val otherTest = mutableMapOf("test-other" to "other-body") + + val execution = PipelineExecutionImpl(ExecutionType.PIPELINE, "test-app").apply { + trigger = PipelineRefTrigger( + correlationId = correlationId, + parentExecutionId = parentExecutionId, + parameters = parameters, + artifacts = artifacts + ).apply { + resolvedExpectedArtifacts = resolvedExpectedArtifact + other = otherTest + } + } + + val mockedParentExecution = mock() + val mapper = ExecutionMapper(mapper = ObjectMapper(), stageBatchSize = 200, compressionProperties = compressionProperties, true) + val spyMapper = Mockito.spy(mapper) + + doReturn(mockedParentExecution).`when`(spyMapper).fetchParentExecution(any(), any(), any()) + + spyMapper.convertPipelineRefTrigger(execution, database) + + expectThat(execution.trigger) { + isA() + get { this.correlationId }.isEqualTo(correlationId) + get { this.parameters }.isEqualTo(parameters) + get { this.artifacts }.isEqualTo(artifacts) + get { this.resolvedExpectedArtifacts }.isEqualTo(resolvedExpectedArtifact) + get { this.other }.isEqualTo(otherTest) + get { this.notifications }.isEmpty() + } + + expectThat(execution.trigger as PipelineTrigger) + .get(PipelineTrigger::parentExecution).isEqualTo(mockedParentExecution) + + verify(spyMapper, times(1)).fetchParentExecution(any(), any(), any()) + } } } }