Skip to content

Commit

Permalink
Fix bug in calculating when ad-hoc tasks are out of attempts (#192907)
Browse files Browse the repository at this point in the history
In this PR, I'm fixing a bug where ad-hoc tasks would have one fewer
attempts to retry in failure scenarios when using mget.

## To verify

1. Apply the following diff to your code
```
diff --git a/x-pack/plugins/stack_connectors/server/connector_types/server_log/index.ts b/x-pack/plugins/stack_connectors/server/connector_types/server_log/index.ts
index 0275b2bdc2f..d481c3820a1 100644
--- a/x-pack/plugins/stack_connectors/server/connector_types/server_log/index.ts
+++ b/x-pack/plugins/stack_connectors/server/connector_types/server_log/index.ts
@@ -77,6 +77,10 @@ export function getConnectorType(): ServerLogConnectorType {
 async function executor(
   execOptions: ServerLogConnectorTypeExecutorOptions
 ): Promise<ConnectorTypeExecutorResult<void>> {
+
+  console.log('*** Server log execution');
+  throw new Error('Fail');
+
   const { actionId, params, logger } = execOptions;

   const sanitizedMessage = withoutControlCharacters(params.message);
diff --git a/x-pack/plugins/task_manager/server/config.ts b/x-pack/plugins/task_manager/server/config.ts
index db07494ef4f..07e277f8d16 100644
--- a/x-pack/plugins/task_manager/server/config.ts
+++ b/x-pack/plugins/task_manager/server/config.ts
@@ -202,7 +202,7 @@ export const configSchema = schema.object(
       max: 100,
       min: 1,
     }),
-    claim_strategy: schema.string({ defaultValue: CLAIM_STRATEGY_UPDATE_BY_QUERY }),
+    claim_strategy: schema.string({ defaultValue: CLAIM_STRATEGY_MGET }),
     request_timeouts: requestTimeoutsConfig,
   },
   {
diff --git a/x-pack/plugins/task_manager/server/lib/get_retry_at.ts b/x-pack/plugins/task_manager/server/lib/get_retry_at.ts
index 278ba18642d..c8fb911d500 100644
--- a/x-pack/plugins/task_manager/server/lib/get_retry_at.ts
+++ b/x-pack/plugins/task_manager/server/lib/get_retry_at.ts
@@ -54,6 +54,7 @@ export function getRetryDate({
 }

 export function calculateDelayBasedOnAttempts(attempts: number) {
+  return 10 * 1000;
   // Return 30s for the first retry attempt
   if (attempts === 1) {
     return 30 * 1000;
```
2. Create an always firing rule that runs every hour, triggering a
server log on check intervals
3. Let the rule run and observe the server log action running and
failing three times (as compared to two)
  • Loading branch information
mikecote authored Sep 13, 2024
1 parent b31d119 commit 36eedc1
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2304,6 +2304,32 @@ describe('TaskManagerRunner', () => {

expect(runner.isAdHocTaskAndOutOfAttempts).toEqual(true);
});

it(`should return true if attempts = max attempts and in claiming status`, async () => {
const { runner } = await pendingStageSetup({
instance: {
id: 'foo',
taskType: 'testbar',
attempts: 5,
status: TaskStatus.Claiming,
},
});

expect(runner.isAdHocTaskAndOutOfAttempts).toEqual(true);
});

it(`should return false if attempts = max attempts and in running status`, async () => {
const { runner } = await pendingStageSetup({
instance: {
id: 'foo',
taskType: 'testbar',
attempts: 5,
status: TaskStatus.Running,
},
});

expect(runner.isAdHocTaskAndOutOfAttempts).toEqual(false);
});
});

describe('removeTask()', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,10 @@ export class TaskManagerRunner implements TaskRunner {
* running a task, the task should be deleted instead of ran.
*/
public get isAdHocTaskAndOutOfAttempts() {
if (this.instance.task.status === 'running') {
// This function gets called with tasks marked as running when using MGET, so attempts is already incremented
return !this.instance.task.schedule && this.instance.task.attempts > this.getMaxAttempts();
}
return !this.instance.task.schedule && this.instance.task.attempts >= this.getMaxAttempts();
}

Expand Down

0 comments on commit 36eedc1

Please sign in to comment.