diff --git a/keep-ui/features/presets/presets-manager/ui/alerts-rules-builder.tsx b/keep-ui/features/presets/presets-manager/ui/alerts-rules-builder.tsx
index fbae12675c..a742ae595d 100644
--- a/keep-ui/features/presets/presets-manager/ui/alerts-rules-builder.tsx
+++ b/keep-ui/features/presets/presets-manager/ui/alerts-rules-builder.tsx
@@ -14,10 +14,10 @@ import "react-querybuilder/dist/query-builder.scss";
import { Table } from "@tanstack/react-table";
import { FiExternalLink, FiSave } from "react-icons/fi";
import { AlertDto } from "@/entities/alerts/model";
-import { TrashIcon, XMarkIcon } from "@heroicons/react/24/outline";
+import { TrashIcon } from "@heroicons/react/24/outline";
import { TbDatabaseImport } from "react-icons/tb";
import { components, GroupBase, MenuListProps } from "react-select";
-import { MonacoEditor, Select } from "@/shared/ui";
+import { Select } from "@/shared/ui";
import { useConfig } from "@/utils/hooks/useConfig";
import { IoSearchOutline } from "react-icons/io5";
import { usePathname, useRouter, useSearchParams } from "next/navigation";
@@ -360,8 +360,8 @@ export const AlertsRulesBuilder = ({
operators: getOperators(id),
}))
: customFields
- ? customFields
- : [];
+ ? customFields
+ : [];
const onImportSQL = () => {
setImportSQLOpen(true);
diff --git a/keep-ui/features/workflows/ai-assistant/ui/AddTriggerUI.tsx b/keep-ui/features/workflows/ai-assistant/ui/AddTriggerUI.tsx
index ca5deb3f2a..a3969abb83 100644
--- a/keep-ui/features/workflows/ai-assistant/ui/AddTriggerUI.tsx
+++ b/keep-ui/features/workflows/ai-assistant/ui/AddTriggerUI.tsx
@@ -1,12 +1,12 @@
import { useState, useCallback, useEffect } from "react";
import { useWorkflowStore } from "@/entities/workflows";
-import { WF_DEBUG_INFO } from "../../builder/ui/debug-settings";
import { Button } from "@/components/ui";
import { JsonCard } from "@/shared/ui";
import { StepPreview } from "./StepPreview";
import { SuggestionResult, SuggestionStatus } from "./SuggestionStatus";
import { getErrorMessage } from "../lib/utils";
import { V2StepTrigger } from "@/entities/workflows/model/types";
+import { useConfig } from "@/utils/hooks/useConfig";
type AddTriggerUIPropsCommon = {
trigger: V2StepTrigger;
@@ -34,6 +34,7 @@ export const AddTriggerUI = ({
}: AddTriggerUIProps) => {
const [isAddingTrigger, setIsAddingTrigger] = useState(false);
const { addNodeBetween, getNextEdge } = useWorkflowStore();
+ const { data: config } = useConfig();
const handleAddTrigger = useCallback(() => {
if (isAddingTrigger) {
@@ -85,7 +86,9 @@ export const AddTriggerUI = ({
if (status === "complete") {
return (
- {WF_DEBUG_INFO &&
}
+ {config?.KEEP_WORKFLOW_DEBUG && (
+
+ )}
Do you want to add this trigger to the workflow?
@@ -94,7 +97,9 @@ export const AddTriggerUI = ({
}
return (
- {WF_DEBUG_INFO &&
}
+ {config?.KEEP_WORKFLOW_DEBUG && (
+
+ )}
Do you want to add this trigger to the workflow?
diff --git a/keep-ui/features/workflows/ai-assistant/ui/StepPreview.tsx b/keep-ui/features/workflows/ai-assistant/ui/StepPreview.tsx
index 6b951209b8..87d3cda7d1 100644
--- a/keep-ui/features/workflows/ai-assistant/ui/StepPreview.tsx
+++ b/keep-ui/features/workflows/ai-assistant/ui/StepPreview.tsx
@@ -6,8 +6,8 @@ import { normalizeStepType } from "../../builder/lib/utils";
import { stringify } from "yaml";
import { getTriggerDescriptionFromStep } from "@/entities/workflows/lib/getTriggerDescription";
import { getYamlFromStep } from "../lib/utils";
-import { WF_DEBUG_INFO } from "../../builder/ui/debug-settings";
import { JsonCard, MonacoEditor } from "@/shared/ui";
+import { useConfig } from "@/utils/hooks/useConfig";
function getStepIconUrl(data: V2Step | V2StepTrigger) {
const { type } = data || {};
@@ -25,6 +25,7 @@ export const StepPreview = ({
step: V2Step | V2StepTrigger;
className?: string;
}) => {
+ const { data: config } = useConfig();
const yamlDefinition = getYamlFromStep(step);
const yaml = yamlDefinition ? stringify(yamlDefinition) : null;
@@ -33,7 +34,7 @@ export const StepPreview = ({
return (
- {WF_DEBUG_INFO &&
}
+ {config?.KEEP_WORKFLOW_DEBUG &&
}
{
- acc[filter.attribute] = filter.value;
- return acc;
- },
- {} as Record
- ),
+ cel: args.args.alertFilters,
};
const trigger = getTriggerDefinitionFromCopilotAction(
@@ -480,7 +470,9 @@ export function WorkflowBuilderChat({
parameters: [
{
name: "incidentEvents",
- description: `The events of the incident trigger, one of: ${IncidentEventEnum.options.map((o) => `"${o}"`).join(", ")}`,
+ description: `The events of the incident trigger, one of: ${IncidentEventEnum.options
+ .map((o) => `"${o}"`)
+ .join(", ")}`,
type: "string[]",
required: true,
},
@@ -1063,7 +1055,7 @@ Example: 'node_123__empty_true'`,
}
>
{/* Debug info */}
- {WF_DEBUG_INFO && (
+ {config?.KEEP_WORKFLOW_DEBUG && (
) => {
updateSelectedNodeData(key, value);
@@ -32,10 +44,19 @@ export function TriggerEditor() {
};
const updateAlertFilter = (filter: string, value: string) => {
- const currentFilters = properties.alert || {};
- const updatedFilters = { ...currentFilters, [filter]: value };
- updateV2Properties({ alert: updatedFilters });
- saveNodeDataDebounced("properties", { alert: updatedFilters });
+ const currentProperties = properties.alert || {};
+ if (!currentProperties.filters) {
+ currentProperties.filters = {};
+ }
+ const newProperties = { ...currentProperties, [filter]: value };
+ updateV2Properties({ alert: newProperties });
+ saveNodeDataDebounced("properties", newProperties);
+ };
+
+ const updateAlertCel = (value: string) => {
+ const currentProperties = properties.alert || {};
+ updateV2Properties({ alert: { ...currentProperties, cel: value } });
+ saveNodeDataDebounced("properties", { ...currentProperties, cel: value });
};
const addFilter = () => {
@@ -46,9 +67,9 @@ export function TriggerEditor() {
};
const deleteFilter = (filter: string) => {
- const currentFilters = { ...properties.alert };
- delete currentFilters[filter];
- updateV2Properties({ alert: currentFilters });
+ const currentProperties = { ...properties.alert };
+ delete currentProperties.filters[filter];
+ updateV2Properties({ alert: currentProperties });
};
const triggerKeys = ["alert", "incident", "interval", "manual"];
@@ -63,6 +84,8 @@ export function TriggerEditor() {
const error = validationErrors?.[selectedTriggerKey];
const renderTriggerContent = () => {
+ const { data: alertFields } = useFacetPotentialFields("alerts");
+
switch (selectedTriggerKey) {
case "manual":
return (
@@ -85,45 +108,90 @@ export function TriggerEditor() {
case "alert":
return (
<>
- Alert filter
- {error && {error} }
-
-
- Add Filter
-
+ {error && (
+
+ {Array.isArray(error) ? error[0] : error}
+
+ )}
+
+
+ CEL Expression
+ {
+ window.open(`${docsUrl}/overview/cel`, "_blank");
+ }}
+ tooltip="Read more about CEL expressions"
+ />
+
+
+ updateAlertCel(value)}
+ onClearValue={() => updateAlertCel("")}
+ fieldsForSuggestions={alertFields}
+ />
+
+
- {properties.alert &&
- Object.keys(properties.alert ?? {}).map((filter) => (
-
-
{filter}
-
-
- updateAlertFilter(filter, e.target.value)
- }
- value={
- (properties.alert as any)[filter] || ("" as string)
- }
- />
- deleteFilter(filter)}
- />
+
+
Alert filter (deprecated)
+
+ Please convert your alert filters to CEL expressions to ensure
+ stability and performance.
+
+
+
+ Add Filter
+
+
+ {properties.alert.filters &&
+ Object.keys(properties.alert.filters ?? {}).map((filter) => (
+
+
{filter}
+
+
+ updateAlertFilter(filter, e.target.value)
+ }
+ value={
+ (properties.alert.filters as any)[filter] ||
+ ("" as string)
+ }
+ />
+ deleteFilter(filter)}
+ />
+
-
- ))}
+ ))}
+
>
);
diff --git a/keep-ui/features/workflows/builder/ui/WorkflowEdge.tsx b/keep-ui/features/workflows/builder/ui/WorkflowEdge.tsx
index 01d2d1d327..a7ecb31376 100644
--- a/keep-ui/features/workflows/builder/ui/WorkflowEdge.tsx
+++ b/keep-ui/features/workflows/builder/ui/WorkflowEdge.tsx
@@ -6,8 +6,8 @@ import { Button } from "@tremor/react";
import "@xyflow/react/dist/style.css";
import { PlusIcon } from "@heroicons/react/24/outline";
import clsx from "clsx";
-import { WF_DEBUG_INFO } from "./debug-settings";
import { edgeCanHaveAddButton } from "../lib/utils";
+import { useConfig } from "@/utils/hooks/useConfig";
export function DebugEdgeInfo({
id,
@@ -21,7 +21,8 @@ export function DebugEdgeInfo({
labelY: number;
isLayouted: boolean;
}) {
- if (!WF_DEBUG_INFO) {
+ const { data: config } = useConfig();
+ if (!config?.KEEP_WORKFLOW_DEBUG) {
return null;
}
return (
diff --git a/keep-ui/features/workflows/builder/ui/WorkflowNode.tsx b/keep-ui/features/workflows/builder/ui/WorkflowNode.tsx
index c4a6c948c1..a57d985b77 100644
--- a/keep-ui/features/workflows/builder/ui/WorkflowNode.tsx
+++ b/keep-ui/features/workflows/builder/ui/WorkflowNode.tsx
@@ -11,7 +11,6 @@ import { toast } from "react-toastify";
import { FlowNode } from "@/entities/workflows/model/types";
import { DynamicImageProviderIcon } from "@/components/ui";
import clsx from "clsx";
-import { WF_DEBUG_INFO } from "./debug-settings";
import {
ExclamationCircleIcon,
ExclamationTriangleIcon,
@@ -21,9 +20,11 @@ import { NodeTriggerIcon } from "@/entities/workflows/ui/NodeTriggerIcon";
import { normalizeStepType, triggerTypes } from "../lib/utils";
import { getTriggerDescriptionFromStep } from "@/entities/workflows/lib/getTriggerDescription";
import { ValidationError } from "@/entities/workflows/lib/validation";
+import { useConfig } from "@/utils/hooks/useConfig";
export function DebugNodeInfo({ id, data }: Pick
) {
- if (!WF_DEBUG_INFO) {
+ const { data: config } = useConfig();
+ if (!config?.KEEP_WORKFLOW_DEBUG) {
return null;
}
return (
@@ -190,7 +191,7 @@ function WorkflowNode({ id, data }: FlowNode) {
List[dict]:
return result
-def get_all_workflows(tenant_id: str):
+def get_all_workflows(tenant_id: str, exclude_disabled: bool = False) -> List[Workflow]:
with Session(engine) as session:
- workflows = session.exec(
+ query = (
select(Workflow)
.where(Workflow.tenant_id == tenant_id)
.where(Workflow.is_deleted == False)
.where(Workflow.is_test == False)
- ).all()
+ )
+
+ if exclude_disabled:
+ query = query.where(Workflow.is_disabled == False)
+
+ workflows = session.exec(query).all()
return workflows
diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py
index c6fd85d2a0..193f38e476 100644
--- a/keep/workflowmanager/workflowmanager.py
+++ b/keep/workflowmanager/workflowmanager.py
@@ -5,6 +5,8 @@
import typing
import uuid
+import celpy
+
from keep.api.core.config import config
from keep.api.core.db import (
get_enrichment,
@@ -40,6 +42,7 @@ def __init__(self):
self.scheduler = WorkflowScheduler(self)
self.workflow_store = WorkflowStore()
self.started = False
+ self.cel_environment = celpy.Environment()
# this is to enqueue the workflows in the REDIS queue
# SHAHAR: todo - finish the REDIS implementation
# self.loop = None
@@ -162,128 +165,204 @@ def insert_incident(self, tenant_id: str, incident: IncidentDto, trigger: str):
)
self.logger.info("Workflow added to run")
+ # @tb: should I move it to cel_utils.py?
+ # logging is easier here and I don't see other places who might use this >.<
+ def _convert_filters_to_cel(self, filters: list[dict[str, str]]):
+ # Convert filters ({"key": "key", "value": "value"}) and friends to CEL
+ self.logger.info(
+ "Converting filters to CEL",
+ extra={"original_filters": filters},
+ )
+ try:
+ cel_filters = []
+ for filter in filters:
+ key = filter.get("key")
+ value = filter.get("value")
+ exclude = filter.get("exclude", False)
+
+ # malformed filter?
+ if not key or not value:
+ self.logger.warning(
+ "Filter is missing key or value",
+ extra={"filter": filter},
+ )
+ continue
+
+ if value.startswith('r"'):
+ # Try to parse regex in to CEL
+ cel_regex = []
+ value = value[2:-1]
+
+ # for example: value: r"error\\.[a-z]+\\..*" is to hard to convert to CEL
+ # so we'll just hit the last else and raise an exception, that it's deprecated
+ if "]^" in value or "]+" in value:
+ raise Exception(
+ f"Unsupported regex: {value}, move to new CEL filters"
+ )
+ elif "|" in value:
+ value_split = value.split("|")
+ for value_ in value_split:
+ value_ = value_.lstrip("(").rstrip(")").strip()
+ if key == "source":
+ if exclude:
+ cel_regex.append(f'!{key}.contains("{value_}")')
+ else:
+ cel_regex.append(f'{key}.contains("{value_}")')
+ else:
+ if exclude:
+ cel_regex.append(f'{key} != "{value_}"')
+ else:
+ cel_regex.append(f'{key} == "{value_}"')
+ elif value == ".*":
+ cel_regex.append(f"has({key})")
+ elif value == "^$":
+ # empty string
+ if exclude:
+ cel_regex.append(f'{key} != ""')
+ else:
+ cel_regex.append(f'{key} == ""')
+ elif value.startswith(".*") and value.endswith(".*"):
+ # for example: r".*prometheus.*"
+ if exclude:
+ cel_regex.append(f'!{key}.contains("{value[2:-2]}")')
+ else:
+ cel_regex.append(f'{key}.contains("{value[2:-2]}")')
+ elif value.endswith(".*"):
+ # for example: r"2025-01-30T09:.*"
+ if exclude:
+ cel_regex.append(f'!{key}.contains("{value[:-2]}")')
+ else:
+ cel_regex.append(f'{key}.contains("{value[:-2]}")')
+ else:
+ raise Exception(
+ f"Unsupported regex: {value}, move to new CEL filters"
+ )
+ # if we're talking about excluded, we need to do AND between the regexes
+ # for example:
+ # filters: [{"key": "source", "value": 'r"prometheus|grafana"', "exclude": true}]
+ # cel: !source.contains("prometheus") && !source.contains("grafana")
+ # otherwise, we do OR between the regexes
+ # for example:
+ # filters: [{"key": "source", "value": 'r"prometheus|grafana"'}]
+ # cel: source.contains("prometheus") || source.contains("grafana")
+ if exclude:
+ cel_filters.append(f"({' && '.join(cel_regex)})")
+ else:
+ cel_filters.append(f"({' || '.join(cel_regex)})")
+ else:
+ if key == "source":
+ # handle source, which is a list of sources
+ if exclude:
+ cel_filters.append(f'!{key}.contains("{value}")')
+ else:
+ cel_filters.append(f'{key}.contains("{value}")')
+ else:
+ if exclude:
+ cel_filters.append(f'{key} != "{value}"')
+ else:
+ cel_filters.append(f'{key} == "{value}"')
+
+ self.logger.info(
+ "Converted filters to CEL",
+ extra={"cel_filters": cel_filters, "original_filters": filters},
+ )
+
+ return " && ".join(cel_filters)
+ except Exception as e:
+ self.logger.exception(
+ "Error converting filters to CEL", extra={"exception": e}
+ )
+ raise
+
def insert_events(self, tenant_id, events: typing.List[AlertDto | IncidentDto]):
for event in events:
- self.logger.info("Getting all workflows")
- all_workflow_models = self.workflow_store.get_all_workflows(tenant_id)
+ self.logger.info("Getting all workflows", extra={"tenant_id": tenant_id})
+ all_workflow_models = self.workflow_store.get_all_workflows(
+ tenant_id, exclude_disabled=True
+ )
self.logger.info(
"Got all workflows",
extra={
"num_of_workflows": len(all_workflow_models),
+ "tenant_id": tenant_id,
},
)
for workflow_model in all_workflow_models:
-
- if workflow_model.is_disabled:
- self.logger.debug(
- f"Skipping the workflow: id={workflow_model.id}, name={workflow_model.name}, "
- f"tenant_id={workflow_model.tenant_id} - Workflow is disabled."
- )
- continue
workflow = self._get_workflow_from_store(tenant_id, workflow_model)
- # FIX: this will fail silently if error in the workflow provider configuration
+
if workflow is None:
+ # Exception is thrown in _get_workflow_from_store, we don't need to log it here, just continue.
continue
for trigger in workflow.workflow_triggers:
- # TODO: handle it better
+ # If the trigger is not an alert, it's not relevant for this event.
if not trigger.get("type") == "alert":
- self.logger.debug("trigger type is not alert, skipping")
- continue
- should_run = True
- # apply filters
- for filter in trigger.get("filters", []):
- # TODO: more sophisticated filtering/attributes/nested, etc
- self.logger.debug(f"Running filter {filter}")
- filter_key = filter.get("key")
- filter_val = filter.get("value")
- filter_exclude = filter.get("exclude", False)
- event_val = self._get_event_value(event, filter_key)
self.logger.debug(
- "Filtering",
+ "Trigger type is not alert, skipping",
extra={
- "filter_key": filter_key,
- "filter_val": filter_val,
- "event": event,
+ "trigger": trigger,
+ "workflow_id": workflow_model.id,
+ "tenant_id": tenant_id,
+ },
+ )
+ continue
+
+ if "filters" not in trigger and "cel" not in trigger:
+ self.logger.warning(
+ "Trigger is missing filters or cel",
+ extra={
+ "trigger": trigger,
+ "workflow_id": workflow_model.id,
+ "tenant_id": tenant_id,
},
)
- if event_val is None:
- self.logger.debug(
- "Failed to run filter, skipping the event. This may happen if the event does not have the filter_key as attribute.",
+ should_run = True
+ else:
+
+ # By default, the workflow should not run. Only if the CEL evaluates to true, the workflow will run.
+ should_run = False
+
+ # backward compatibility for filter. should be removed in the future
+ # if triggers and cel are set, we override the cel with filters.
+ if "filters" in trigger:
+ # this is old format, so let's convert it to CEL
+ trigger["cel"] = self._convert_filters_to_cel(
+ trigger["filters"]
+ )
+
+ compiled_ast = self.cel_environment.compile(trigger["cel"])
+ program = self.cel_environment.program(compiled_ast)
+ activation = celpy.json_to_cel(event.dict())
+ try:
+ should_run = program.evaluate(activation)
+ except celpy.evaluation.CELEvalError as e:
+ self.logger.exception(
+ "Error evaluating CEL for event in insert_events",
extra={
- "tenant_id": tenant_id,
- "filter_key": filter_key,
- "filter_val": filter_val,
+ "exception": e,
+ "event": event,
+ "trigger": trigger,
"workflow_id": workflow_model.id,
+ "tenant_id": tenant_id,
+ "cel": trigger["cel"],
+ "deprecated_filters": trigger.get("filters"),
},
)
- should_run = False
continue
- # if its list, check if the filter is in the list
- if isinstance(event_val, list):
- for val in event_val:
- # if one filter applies, it should run
- if self._apply_filter(filter_val, val):
- self.logger.debug(
- "Filter matched, running",
- extra={
- "filter_key": filter_key,
- "filter_val": filter_val,
- "event": event,
- },
- )
- # depends on the exclude flag
- if filter_exclude:
- should_run = False
- else:
- should_run = True
- break
- self.logger.debug(
- "Filter didn't match, skipping",
- extra={
- "filter_key": filter_key,
- "filter_val": filter_val,
- "event": event,
- },
- )
- if not filter_exclude:
- should_run = False
- # elif the filter is string/int/float, compare them:
- elif type(event_val) in [int, str, float, bool]:
- filter_applied = self._apply_filter(filter_val, event_val)
- if not filter_applied and not filter_exclude:
- self.logger.debug(
- "Filter didn't match, skipping",
- extra={
- "filter_key": filter_key,
- "filter_val": filter_val,
- "event": event,
- },
- )
- should_run = False
- break
- # if the filter applies but its exclusion filter, don't run
- elif filter_applied and filter_exclude:
- self.logger.debug(
- "Filter matched but it's exclusion filter, skipping",
- extra={
- "filter_key": filter_key,
- "filter_val": filter_val,
- "event": event,
- },
- )
- should_run = False
- # other types currently does not supported
- else:
- self.logger.warning(
- "Could not run the filter on unsupported type, skipping the event. Probably misconfigured workflow."
- )
- should_run = False
- break
- if not should_run:
- self.logger.debug("Skipping the workflow")
+ if bool(should_run) is False:
+ self.logger.debug(
+ "Workflow should not run, skipping",
+ extra={
+ "triggers": workflow.workflow_triggers,
+ "workflow_id": workflow_model.id,
+ "tenant_id": tenant_id,
+ "cel": trigger["cel"],
+ "deprecated_filters": trigger.get("filters"),
+ },
+ )
continue
+
# enrich the alert with more data
self.logger.info("Found a workflow to run")
event.trigger = "alert"
diff --git a/keep/workflowmanager/workflowstore.py b/keep/workflowmanager/workflowstore.py
index d9a2e476e2..50696d117b 100644
--- a/keep/workflowmanager/workflowstore.py
+++ b/keep/workflowmanager/workflowstore.py
@@ -157,8 +157,12 @@ def get_workflow_from_dict(self, tenant_id: str, workflow_dict: dict) -> Workflo
detail="Unable to parse workflow from dict",
)
- def get_all_workflows(self, tenant_id: str) -> list[WorkflowModel]:
- return list(get_all_workflows(tenant_id))
+ def get_all_workflows(
+ self, tenant_id: str, exclude_disabled: bool = False
+ ) -> list[WorkflowModel]:
+ # list all tenant's workflows
+ workflows = get_all_workflows(tenant_id, exclude_disabled)
+ return workflows
def get_all_workflows_with_last_execution(
self,
diff --git a/pyproject.toml b/pyproject.toml
index 9d2c6ac3d5..fa4fc0e360 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "keep"
-version = "0.42.5"
+version = "0.43.0"
description = "Alerting. for developers, by developers."
authors = ["Keep Alerting LTD"]
packages = [{include = "keep"}]
diff --git a/tests/test_workflow_filters.py b/tests/test_workflow_filters.py
index 9c8cd94ac9..bc92d37f09 100644
--- a/tests/test_workflow_filters.py
+++ b/tests/test_workflow_filters.py
@@ -1,3 +1,5 @@
+import pytest
+
from keep.api.core.dependencies import SINGLE_TENANT_UUID
from keep.api.models.alert import AlertDto
from keep.api.models.db.workflow import Workflow as WorkflowDB
@@ -604,22 +606,10 @@ def test_nested_regex_patterns(db_session):
lastReceived="2025-01-30T09:19:02.519Z",
),
]
-
- workflow_manager.insert_events(
- SINGLE_TENANT_UUID, matching_alerts + non_matching_alerts
- )
- assert len(workflow_manager.scheduler.workflows_to_run) == 2
-
- triggered_alerts = [
- w.get("event") for w in workflow_manager.scheduler.workflows_to_run
- ]
- assert any(
- a.id == "alert-1" and a.name == "error.database.critical"
- for a in triggered_alerts
- )
- assert any(
- a.id == "alert-2" and a.name == "error.api.warning" for a in triggered_alerts
- )
+ with pytest.raises(Exception, match="Unsupported regex"):
+ workflow_manager.insert_events(
+ SINGLE_TENANT_UUID, matching_alerts + non_matching_alerts
+ )
def test_time_based_filters(db_session):
@@ -898,16 +888,12 @@ def test_regex_exclusion_patterns(db_session):
),
]
- workflow_manager.insert_events(
- SINGLE_TENANT_UUID, matching_alerts + excluded_alerts
- )
- assert len(workflow_manager.scheduler.workflows_to_run) == 2
-
- triggered_alerts = [
- w.get("event") for w in workflow_manager.scheduler.workflows_to_run
- ]
- assert any(a.id == "alert-1" and "database" not in a.name for a in triggered_alerts)
- assert any(a.id == "alert-2" and "database" not in a.name for a in triggered_alerts)
+ # Deprecated complex regex should raise an exception.
+ # We encourage users to use CEL instead.
+ with pytest.raises(Exception, match="Unsupported regex"):
+ workflow_manager.insert_events(
+ SINGLE_TENANT_UUID, matching_alerts + excluded_alerts
+ )
def test_exclusion_with_source_list(db_session):
@@ -992,3 +978,177 @@ def test_exclusion_with_source_list(db_session):
]
assert any(a.id == "alert-1" and a.source == ["grafana"] for a in triggered_alerts)
assert any(a.id == "alert-2" and a.source == ["custom"] for a in triggered_alerts)
+
+
+def test_regex_dotstar_substring_match(db_session):
+ """Test that r".*abc.*" matches any alert name containing 'abc' anywhere in the string."""
+ workflow_manager = WorkflowManager()
+ workflow_definition = """workflow:
+id: dotstar-substring-check
+triggers:
+- type: alert
+ filters:
+ - key: name
+ value: r".*abc.*"
+ - key: severity
+ value: critical
+"""
+ workflow = WorkflowDB(
+ id="dotstar-substring-check",
+ name="dotstar-substring-check",
+ tenant_id=SINGLE_TENANT_UUID,
+ description="Match alerts where name contains 'abc'",
+ created_by="test@keephq.dev",
+ interval=0,
+ workflow_raw=workflow_definition,
+ )
+ db_session.add(workflow)
+ db_session.commit()
+
+ matching_alerts = [
+ AlertDto(
+ id="alert-1",
+ source=["grafana"],
+ name="abc",
+ status="firing",
+ severity="critical",
+ fingerprint="fp1",
+ lastReceived="2025-01-30T09:19:02.519Z",
+ ),
+ AlertDto(
+ id="alert-2",
+ source=["grafana"],
+ name="prefix-abc-suffix",
+ status="firing",
+ severity="critical",
+ fingerprint="fp2",
+ lastReceived="2025-01-30T09:19:02.519Z",
+ ),
+ AlertDto(
+ id="alert-3",
+ source=["grafana"],
+ name="somethingabc",
+ status="firing",
+ severity="critical",
+ fingerprint="fp3",
+ lastReceived="2025-01-30T09:19:02.519Z",
+ ),
+ AlertDto(
+ id="alert-4",
+ source=["grafana"],
+ name="abc-something",
+ status="firing",
+ severity="critical",
+ fingerprint="fp4",
+ lastReceived="2025-01-30T09:19:02.519Z",
+ ),
+ ]
+
+ non_matching_alerts = [
+ AlertDto(
+ id="alert-5",
+ source=["grafana"],
+ name="def",
+ status="firing",
+ severity="critical",
+ fingerprint="fp5",
+ lastReceived="2025-01-30T09:19:02.519Z",
+ ),
+ AlertDto(
+ id="alert-6",
+ source=["grafana"],
+ name="prefix-abc-suffix",
+ status="firing",
+ severity="warning",
+ fingerprint="fp6",
+ lastReceived="2025-01-30T09:19:02.519Z",
+ ),
+ ]
+
+ workflow_manager.insert_events(
+ SINGLE_TENANT_UUID, matching_alerts + non_matching_alerts
+ )
+ assert len(workflow_manager.scheduler.workflows_to_run) == 4
+ triggered_alerts = [
+ w.get("event") for w in workflow_manager.scheduler.workflows_to_run
+ ]
+ assert all("abc" in a.name for a in triggered_alerts)
+ assert all(a.severity == "critical" for a in triggered_alerts)
+ assert not any(a.id == "alert-5" for a in triggered_alerts)
+ assert not any(a.id == "alert-6" for a in triggered_alerts)
+
+
+def test_cel_expression_filter(db_session):
+ """Test CEL expression filter for alert name and severity."""
+ workflow_manager = WorkflowManager()
+ workflow_definition = """workflow:
+id: cel-expression-check
+triggers:
+- type: alert
+ cel: 'name.contains("abc") && severity == "critical"'
+"""
+ workflow = WorkflowDB(
+ id="cel-expression-check",
+ name="cel-expression-check",
+ tenant_id=SINGLE_TENANT_UUID,
+ description="Match alerts using CEL expression",
+ created_by="test@keephq.dev",
+ interval=0,
+ workflow_raw=workflow_definition,
+ )
+ db_session.add(workflow)
+ db_session.commit()
+
+ matching_alerts = [
+ AlertDto(
+ id="alert-1",
+ source=["grafana"],
+ name="abc",
+ status="firing",
+ severity="critical",
+ fingerprint="fp1",
+ lastReceived="2025-01-30T09:19:02.519Z",
+ ),
+ AlertDto(
+ id="alert-2",
+ source=["grafana"],
+ name="prefix-abc-suffix",
+ status="firing",
+ severity="critical",
+ fingerprint="fp2",
+ lastReceived="2025-01-30T09:19:02.519Z",
+ ),
+ ]
+
+ non_matching_alerts = [
+ AlertDto(
+ id="alert-3",
+ source=["grafana"],
+ name="def",
+ status="firing",
+ severity="critical",
+ fingerprint="fp3",
+ lastReceived="2025-01-30T09:19:02.519Z",
+ ),
+ AlertDto(
+ id="alert-4",
+ source=["grafana"],
+ name="abc",
+ status="firing",
+ severity="warning",
+ fingerprint="fp4",
+ lastReceived="2025-01-30T09:19:02.519Z",
+ ),
+ ]
+
+ workflow_manager.insert_events(
+ SINGLE_TENANT_UUID, matching_alerts + non_matching_alerts
+ )
+ assert len(workflow_manager.scheduler.workflows_to_run) == 2
+ triggered_alerts = [
+ w.get("event") for w in workflow_manager.scheduler.workflows_to_run
+ ]
+ assert all("abc" in a.name for a in triggered_alerts)
+ assert all(a.severity == "critical" for a in triggered_alerts)
+ assert not any(a.id == "alert-3" for a in triggered_alerts)
+ assert not any(a.id == "alert-4" for a in triggered_alerts)