Skip to content

Commit

Permalink
[FTR] Collapse Alerting API Helpers Impl (elastic#192216)
Browse files Browse the repository at this point in the history
## Summary
Resolves: elastic#192201

- Expose `TryWithRetriesOptions`
  - Tune timeouts to pass ci
- Add attempt count debug info to `retry/retry_for_success.ts`
  - Helps with tuning timeout params
- Move exposure of `AlertingApiProvider` from
`x-pack/test_serverless/api_integration/services/index.ts` ->
`x-pack/test_serverless/shared/services/deployment_agnostic_services.ts`
- This exposes the alerting api under Deployment Agnostic Services (DA),
and DA is exposed within
`x-pack/test_serverless/functional/services/index.ts` (Shared Services
[Serverless])
- Collapse helper script functions into just another object literal
stanza within `AlertingApiProvider`
- Update all references
- Refactor alerting api to use `retry` service, instead of p-retry
(following [this pr](elastic#178660))

### Additional debug logging

Run in debug mode (add `-v`):
```
node scripts/functional_tests \
  --config x-pack/test_serverless/api_integration/test_suites/search/common_configs/config.group1.ts \
  --grep "Summary actions"
  -v
```

#### After
```
       │ sill retry.tryWithRetries('Alerting API - waitForDocumentInIndex, retryOptions: {"retryCount":5,"retryDelay":200}', [object AsyncFunction], [object Object])
         │ debg --- retry.tryWithRetries error: index_not_found_exception
         │      	Root causes:
         │      		index_not_found_exception: no such index [alert-action-es-query] - Attempt #: 1
         │ sill es.search([object Object])
         │ debg --- retry.tryWithRetries failed again with the same message... - Attempt #: 2
         │ sill es.search([object Object])
         │ debg --- retry.tryWithRetries failed again with the same message... - Attempt #: 3
         │ sill es.search([object Object])
         │ debg --- retry.tryWithRetries failed again with the same message... - Attempt #: 4
         │ sill es.search([object Object])
         │ debg --- retry.tryWithRetries failed again with the same message... - Attempt #: 5

...
// Msg after all attempts fail:

       │   Error: retry.tryWithRetries reached the limit of attempts waiting for 'Alerting API - waitForDocumentInIndex, retryOptions: {"retryCount":5,"retryDelay":200}': 5 out of 5
       │   ResponseError: index_not_found_exception
       │   	Root causes:
       │   		index_not_found_exception: no such index [alert-action-es-query]
       │       at SniffingTransport._request (node_modules/@elastic/transport/src/Transport.ts:601:17)
       │       at processTicksAndRejections (node:internal/process/task_queues:95:5)
       │       at /Users/trezworkbox/dev/main.worktrees/cleanup-alerting-api/node_modules/@elastic/transport/src/Transport.ts:704:22
       │       at SniffingTransport.request (node_modules/@elastic/transport/src/Transport.ts:701:14)
       │       at Proxy.SearchApi (node_modules/@elastic/elasticsearch/src/api/api/search.ts:96:10)
       │       at alerting_api.ts:123:28
       │       at runAttempt (retry_for_success.ts:30:15)
       │       at retryForSuccess (retry_for_success.ts:99:21)
       │       at Proxy.tryWithRetries (retry.ts:113:12)
       │       at Object.waitForDocumentInIndex (alerting_api.ts:120:14)
       │       at Context.<anonymous> (summary_actions.ts:146:20)
       │       at Object.apply (wrap_function.js:74:16)
       │       at Object.apply (wrap_function.js:74:16)
       │       at onFailure (retry_for_success.ts:18:9)
       │       at retryForSuccess (retry_for_success.ts:75:7)
       │       at Proxy.tryWithRetries (retry.ts:113:12)
       │       at Object.waitForDocumentInIndex (alerting_api.ts:120:14)
       │       at Context.<anonymous> (summary_actions.ts:146:20)
       │       at Object.apply (wrap_function.js:74:16)
       │       at Object.apply (wrap_function.js:74:16)
```
### Notes
Was put back in draft to additional scope detailed in issue linked
above.

---------

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
(cherry picked from commit 9d22e8c)
  • Loading branch information
wayneseymour committed Oct 25, 2024
1 parent e5416f3 commit 821993b
Show file tree
Hide file tree
Showing 17 changed files with 1,306 additions and 1,445 deletions.
2 changes: 1 addition & 1 deletion packages/kbn-ftr-common-functional-services/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import { KibanaServerProvider } from './services/kibana_server';
export { KibanaServerProvider } from './services/kibana_server';
export type KibanaServer = ProvidedType<typeof KibanaServerProvider>;

export { RetryService } from './services/retry';
export { RetryService, type TryWithRetriesOptions } from './services/retry';

import { EsArchiverProvider } from './services/es_archiver';
export type EsArchiver = ProvidedType<typeof EsArchiverProvider>;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/

export { RetryService } from './retry';
export { RetryService, type TryWithRetriesOptions } from './retry';
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import { FtrService } from '../ftr_provider_context';
import { retryForSuccess } from './retry_for_success';
import { retryForTruthy } from './retry_for_truthy';

interface TryWithRetriesOptions {
export interface TryWithRetriesOptions {
retryCount: number;
retryDelay?: number;
timeout?: number;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ export async function retryForSuccess<T>(log: ToolingLog, options: Options<T>) {
if (lastError && onFailureBlock) {
const before = await runAttempt(onFailureBlock);
if ('error' in before) {
log.debug(`--- onRetryBlock error: ${before.error.message}`);
log.debug(`--- onRetryBlock error: ${before.error.message} - Attempt #: ${attemptCounter}`);
}
}

Expand All @@ -104,9 +104,13 @@ export async function retryForSuccess<T>(log: ToolingLog, options: Options<T>) {

if ('error' in attempt) {
if (lastError && lastError.message === attempt.error.message) {
log.debug(`--- ${methodName} failed again with the same message...`);
log.debug(
`--- ${methodName} failed again with the same message... - Attempt #: ${attemptCounter}`
);
} else {
log.debug(`--- ${methodName} error: ${attempt.error.message}`);
log.debug(
`--- ${methodName} error: ${attempt.error.message} - Attempt #: ${attemptCounter}`
);
}

lastError = attempt.error;
Expand Down
176 changes: 0 additions & 176 deletions x-pack/test_serverless/api_integration/services/alerting_api.ts

This file was deleted.

2 changes: 0 additions & 2 deletions x-pack/test_serverless/api_integration/services/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import { GenericFtrProviderContext } from '@kbn/test';
import { services as deploymentAgnosticSharedServices } from '../../shared/services/deployment_agnostic_services';
import { services as svlSharedServices } from '../../shared/services';

import { AlertingApiProvider } from './alerting_api';
import { SamlToolsProvider } from './saml_tools';
import { SvlCasesServiceProvider } from './svl_cases';
import { SloApiProvider } from './slo_api';
Expand All @@ -35,7 +34,6 @@ export const services = {

// serverless FTR services
...svlSharedServices,
alertingApi: AlertingApiProvider,
samlTools: SamlToolsProvider,
svlCases: SvlCasesServiceProvider,
sloApi: SloApiProvider,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,22 +42,18 @@ import {
ALERT_PREVIOUS_ACTION_GROUP,
} from '@kbn/rule-data-utils';
import { FtrProviderContext } from '../../../ftr_provider_context';
import { createEsQueryRule } from './helpers/alerting_api_helper';
import { waitForAlertInIndex, waitForNumRuleRuns } from './helpers/alerting_wait_for_helpers';
import { ObjectRemover } from '../../../../shared/lib';
import { InternalRequestHeader, RoleCredentials } from '../../../../shared/services';
import { RoleCredentials } from '../../../../shared/services';

const OPEN_OR_ACTIVE = new Set(['open', 'active']);

export default function ({ getService }: FtrProviderContext) {
const svlCommonApi = getService('svlCommonApi');
const svlUserManager = getService('svlUserManager');
const supertestWithoutAuth = getService('supertestWithoutAuth');
let roleAdmin: RoleCredentials;
let internalReqHeader: InternalRequestHeader;
const supertest = getService('supertest');
const esClient = getService('es');
const objectRemover = new ObjectRemover(supertest);
const alertingApi = getService('alertingApi');

describe('Alert documents', function () {
// Timeout of 360000ms exceeded
Expand All @@ -68,7 +64,6 @@ export default function ({ getService }: FtrProviderContext) {

before(async () => {
roleAdmin = await svlUserManager.createM2mApiKeyWithRoleScope('admin');
internalReqHeader = svlCommonApi.getInternalRequestHeader();
});

afterEach(async () => {
Expand All @@ -80,10 +75,8 @@ export default function ({ getService }: FtrProviderContext) {
});

it('should generate an alert document for an active alert', async () => {
const createdRule = await createEsQueryRule({
supertestWithoutAuth,
const createdRule = await alertingApi.helpers.createEsQueryRule({
roleAuthc: roleAdmin,
internalReqHeader,
consumer: 'alerts',
name: 'always fire',
ruleTypeId: RULE_TYPE_ID,
Expand All @@ -103,17 +96,15 @@ export default function ({ getService }: FtrProviderContext) {

// get the first alert document written
const testStart1 = new Date();
await waitForNumRuleRuns({
supertestWithoutAuth,
await alertingApi.helpers.waitForNumRuleRuns({
roleAuthc: roleAdmin,
internalReqHeader,
numOfRuns: 1,
ruleId,
esClient,
testStart: testStart1,
});

const alResp1 = await waitForAlertInIndex({
const alResp1 = await alertingApi.helpers.waitForAlertInIndex({
esClient,
filter: testStart1,
indexName: ALERT_INDEX,
Expand Down Expand Up @@ -206,10 +197,8 @@ export default function ({ getService }: FtrProviderContext) {
});

it('should update an alert document for an ongoing alert', async () => {
const createdRule = await createEsQueryRule({
supertestWithoutAuth,
const createdRule = await alertingApi.helpers.createEsQueryRule({
roleAuthc: roleAdmin,
internalReqHeader,
consumer: 'alerts',
name: 'always fire',
ruleTypeId: RULE_TYPE_ID,
Expand All @@ -229,17 +218,15 @@ export default function ({ getService }: FtrProviderContext) {

// get the first alert document written
const testStart1 = new Date();
await waitForNumRuleRuns({
supertestWithoutAuth,
await alertingApi.helpers.waitForNumRuleRuns({
roleAuthc: roleAdmin,
internalReqHeader,
numOfRuns: 1,
ruleId,
esClient,
testStart: testStart1,
});

const alResp1 = await waitForAlertInIndex({
const alResp1 = await alertingApi.helpers.waitForAlertInIndex({
esClient,
filter: testStart1,
indexName: ALERT_INDEX,
Expand All @@ -249,17 +236,15 @@ export default function ({ getService }: FtrProviderContext) {

// wait for another run, get the updated alert document
const testStart2 = new Date();
await waitForNumRuleRuns({
supertestWithoutAuth,
await alertingApi.helpers.waitForNumRuleRuns({
roleAuthc: roleAdmin,
internalReqHeader,
numOfRuns: 1,
ruleId,
esClient,
testStart: testStart2,
});

const alResp2 = await waitForAlertInIndex({
const alResp2 = await alertingApi.helpers.waitForAlertInIndex({
esClient,
filter: testStart2,
indexName: ALERT_INDEX,
Expand Down
Loading

0 comments on commit 821993b

Please sign in to comment.