Skip to content

Commit

Permalink
Remove @Singular annotation
Browse files Browse the repository at this point in the history
While building orca with upgraded spring boot version 2.3.12, google error-prone package throwing IndexOutOfBoundException as given below:

orca/orca-api/src/main/java/com/netflix/spinnaker/orca/api/pipeline/TaskResult.java:32: error: An unhandled exception was thrown by the Error Prone static analysis plugin.
@builder
^
     Please report this at https://github.com/google/error-prone/issues/new and include the following:

     error-prone version: 2.4.0
     BugPattern: FallThrough
     Stack Trace:
     java.lang.IndexOutOfBoundsException
        at java.base/java.nio.HeapCharBuffer.subSequence(HeapCharBuffer.java:633)
        at java.base/java.nio.HeapCharBuffer.subSequence(HeapCharBuffer.java:41)
        at com.google.errorprone.bugpatterns.FallThrough.matchSwitch(FallThrough.java:70)
        at com.google.errorprone.scanner.ErrorProneScanner.processMatchers(ErrorProneScanner.java:451)
        at com.google.errorprone.scanner.ErrorProneScanner.visitSwitch(ErrorProneScanner.java:825)
        at com.google.errorprone.scanner.ErrorProneScanner.visitSwitch(ErrorProneScanner.java:152)
        at jdk.compiler/com.sun.tools.javac.tree.JCTree$JCSwitch.accept(JCTree.java:1229)
        at jdk.compiler/com.sun.source.util.TreePathScanner.scan(TreePathScanner.java:82)

The error is due to Lombok lib upgrade from 1.18.16 to 1.18.20 as transitive dependency of Spring boot. Similar to the issue mentioned below
google/error-prone#2575

Fix:
To remove @Singular annotation from classes using Lombok builder pattern.

Reference:
projectlombok/lombok#2221
  • Loading branch information
j-sandy committed Nov 24, 2021
1 parent c5716ad commit e0d57a2
Show file tree
Hide file tree
Showing 10 changed files with 46 additions and 25 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import lombok.Builder;
import lombok.Data;
import lombok.NonNull;
import lombok.Singular;

/** Represents the state of a {@link TaskExecution}. */
@Beta
Expand All @@ -49,19 +48,15 @@ public final class TaskResult {
* <p>Data stored in the context will be available to other tasks within this stage, but not to
* tasks in other stages.
*/
@Immutable
@Singular("context")
private final Map<String, ?> context;
@Immutable private final Map<String, ?> context;

/**
* Pipeline-scoped data.
*
* <p>Data stored in outputs will be available (via {@link StageExecution#getContext()} to tasks
* in later stages of the pipeline.
*/
@Immutable
@Singular("output")
private final Map<String, ?> outputs;
@Immutable private final Map<String, ?> outputs;

/**
* Creates a new TaskResult with the provided {@link ExecutionStatus}.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import com.netflix.spinnaker.orca.clouddriver.model.OperationContext;
import com.netflix.spinnaker.orca.clouddriver.model.SubmitOperationResult;
import java.time.Duration;
import java.util.Collections;
import javax.annotation.Nonnull;
import org.springframework.stereotype.Component;

Expand All @@ -44,7 +45,7 @@ public TaskResult execute(@Nonnull StageExecution stage) {

TaskResult.TaskResultBuilder builder = TaskResult.builder(ExecutionStatus.SUCCEEDED);
if (result.getId() != null) {
builder.context("kato.last.task.id", result.getId());
builder.context(Collections.singletonMap("kato.last.task.id", result.getId()));
}

return builder.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import com.netflix.spinnaker.orca.deploymentmonitor.models.EvaluateHealthRequest;
import com.netflix.spinnaker.orca.deploymentmonitor.models.EvaluateHealthResponse;
import java.time.Instant;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
Expand Down Expand Up @@ -85,7 +86,8 @@ private TaskResult.TaskResultBuilder processDirective(
switch (directive) {
case COMPLETE:
// TODO(mvulfson): Actually implement this case in the stages
return TaskResult.builder(ExecutionStatus.SUCCEEDED).output("skipToPercentage", 100);
return TaskResult.builder(ExecutionStatus.SUCCEEDED)
.outputs(Collections.singletonMap("skipToPercentage", 100));

case CONTINUE:
return TaskResult.builder(ExecutionStatus.SUCCEEDED);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ private TaskResult handleError(
e);

return TaskResult.builder(ExecutionStatus.RUNNING)
.context("deployMonitorHttpRetryCount", ++currentRetryCount)
.context(Collections.singletonMap("deployMonitorHttpRetryCount", ++currentRetryCount))
.build();
}
}
Expand Down Expand Up @@ -246,14 +246,18 @@ TaskResult buildTaskResult(
response.getNextStep().getDirective(), "Health evaluation results are unknown");
StatusExplanation explanation = new StatusExplanation(summary, response.getStatusReason());

return taskResultBuilder.context("deploymentMonitorReasons", explanation).build();
return taskResultBuilder
.context(Collections.singletonMap("deploymentMonitorReasons", explanation))
.build();
}

private TaskResult buildTaskResult(
TaskResult.TaskResultBuilder taskResultBuilder, String summary) {
StatusExplanation explanation = new StatusExplanation(summary);

return taskResultBuilder.context("deploymentMonitorReasons", explanation).build();
return taskResultBuilder
.context(Collections.singletonMap("deploymentMonitorReasons", explanation))
.build();
}

private DeploymentMonitorDefinition getDeploymentMonitorDefinition(StageExecution stage) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ public TaskResult execute(@Nonnull StageExecution stage) {
String changeSetName = (String) result.get("changeSetName");
log.info("CloudFormation ChangeSet {} empty. Requesting to be deleted.", changeSetName);
return TaskResult.builder(ExecutionStatus.SUCCEEDED)
.context("deleteChangeSet", true)
.context(Collections.singletonMap("deleteChangeSet", true))
.outputs(stack)
.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution;
import com.netflix.spinnaker.orca.clouddriver.OortService;
import com.netflix.spinnaker.orca.clouddriver.tasks.servicebroker.AbstractWaitForServiceTask;
import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import javax.annotation.Nonnull;
Expand Down Expand Up @@ -56,12 +57,16 @@ public TaskResult execute(@Nonnull StageExecution stage) {
.orElse("Failed to get last operation description")
.toString();
taskResultBuilder
.output(
"lastOperationStatus",
Optional.ofNullable(serviceInstance.get("status")).orElse("").toString())
.output("lastOperationDescription", lastOperationDescription);
.outputs(
Collections.singletonMap(
"lastOperationStatus",
Optional.ofNullable(serviceInstance.get("status")).orElse("").toString()))
.outputs(
Collections.singletonMap(
"lastOperationDescription", lastOperationDescription));
if (status == ExecutionStatus.TERMINAL) {
taskResultBuilder.output("failureMessage", lastOperationDescription);
taskResultBuilder.outputs(
Collections.singletonMap("failureMessage", lastOperationDescription));
}
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution;
import com.netflix.spinnaker.orca.pipeline.persistence.ExecutionNotFoundException;
import com.netflix.spinnaker.orca.pipeline.persistence.ExecutionRepository;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nonnull;
import javax.validation.constraints.NotNull;
Expand Down Expand Up @@ -69,16 +70,20 @@ public TaskResult execute(@Nonnull StageExecution stage) {

if (status.isComplete()) {
return TaskResult.builder(ExecutionStatus.CANCELED)
.context("reason", format("Depended-on execution completed with status %s", status))
.context(
Collections.singletonMap(
"reason", format("Depended-on execution completed with status %s", status)))
.build();
}

return TaskResult.RUNNING;
} catch (ExecutionNotFoundException e) {
return TaskResult.builder(ExecutionStatus.TERMINAL)
.context(
"error",
format("Execution (%s) %s not found.", context.executionType, context.executionId))
Collections.singletonMap(
"error",
format(
"Execution (%s) %s not found.", context.executionType, context.executionId)))
.build();
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import com.netflix.spinnaker.orca.front50.Front50Service;
import com.netflix.spinnaker.orca.front50.model.PluginInfo;
import java.time.Duration;
import java.util.Collections;
import java.util.Objects;
import javax.annotation.Nonnull;
import org.slf4j.Logger;
Expand Down Expand Up @@ -58,10 +59,16 @@ public TaskResult execute(@Nonnull StageExecution stage) {
return pluginInfo.getReleases().stream()
.filter(it -> it.getVersion().equals(version))
.findFirst()
.map(r -> TaskResult.builder(ExecutionStatus.SUCCEEDED).context("release", r).build())
.map(
r ->
TaskResult.builder(ExecutionStatus.SUCCEEDED)
.context(Collections.singletonMap("release", r))
.build())
.orElse(
TaskResult.builder(ExecutionStatus.TERMINAL)
.output("message", format("No release found for version '%s'", version))
.outputs(
Collections.singletonMap(
"message", format("No release found for version '%s'", version)))
.build());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import com.netflix.spinnaker.orca.api.pipeline.models.ExecutionStatus.SUCCEEDED
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution
import com.netflix.spinnaker.orca.ext.mapTo
import com.netflix.spinnaker.orca.kayenta.KayentaService
import java.util.Collections;
import org.slf4j.LoggerFactory
import org.springframework.stereotype.Component

Expand All @@ -30,6 +31,6 @@ class ResolveKayentaConfigIdTask(
} else if (candidates.size > 1) {
throw UserException("Found more than one canary configId for configName $configName and application $currentApplication")
}
return TaskResult.builder(SUCCEEDED).context("canaryConfigId", candidates[0].id).build()
return TaskResult.builder(SUCCEEDED).context(Collections.singletonMap("canaryConfigId", candidates[0].id)).build()
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import com.netflix.spinnaker.orca.ext.mapTo
import com.netflix.spinnaker.orca.kayenta.CanaryExecutionRequest
import com.netflix.spinnaker.orca.kayenta.KayentaService
import com.netflix.spinnaker.orca.kayenta.RunCanaryContext
import java.util.Collections;
import org.slf4j.LoggerFactory
import org.springframework.stereotype.Component

Expand Down Expand Up @@ -57,6 +58,6 @@ class RunKayentaCanaryTask(
"canaryConfigId" to context.canaryConfigId
)

return TaskResult.builder(SUCCEEDED).context("canaryPipelineExecutionId", canaryPipelineExecutionId).outputs(outputs).build()
return TaskResult.builder(SUCCEEDED).context(Collections.singletonMap("canaryPipelineExecutionId", canaryPipelineExecutionId)).outputs(outputs).build()
}
}

0 comments on commit e0d57a2

Please sign in to comment.