From 23bd7b5e29b78dc8fcbb507f7a83e018b32e571b Mon Sep 17 00:00:00 2001
From: GitHub Actions
Date: Mon, 3 Feb 2025 18:45:47 +0000
Subject: [PATCH 1/7] Release oncall Helm chart 1.14.3
---
helm/oncall/Chart.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/helm/oncall/Chart.yaml b/helm/oncall/Chart.yaml
index 2f6a73219c..fc626a4973 100644
--- a/helm/oncall/Chart.yaml
+++ b/helm/oncall/Chart.yaml
@@ -2,8 +2,8 @@ apiVersion: v2
name: oncall
description: Developer-friendly incident response with brilliant Slack integration
type: application
-version: 1.14.1
-appVersion: v1.14.1
+version: 1.14.3
+appVersion: v1.14.3
dependencies:
- name: cert-manager
version: v1.8.0
From 226cae9afd4d0d27fb38ad162a26b7095ea0ab05 Mon Sep 17 00:00:00 2001
From: Michael Derynck
Date: Fri, 7 Feb 2025 12:34:10 -0700
Subject: [PATCH 2/7] chore: remove reference to recaptcha site (#5443)
---
engine/settings/base.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/engine/settings/base.py b/engine/settings/base.py
index d5be922b09..affb27fe8f 100644
--- a/engine/settings/base.py
+++ b/engine/settings/base.py
@@ -926,7 +926,7 @@ class BrokerTypes:
} # noqa
# RECAPTCHA_V3 settings
-RECAPTCHA_V3_SITE_KEY = os.environ.get("RECAPTCHA_SITE_KEY", default="6LeIPJ8kAAAAAJdUfjO3uUtQtVxsYf93y46mTec1")
+RECAPTCHA_V3_SITE_KEY = os.environ.get("RECAPTCHA_SITE_KEY", default=None)
RECAPTCHA_V3_SECRET_KEY = os.environ.get("RECAPTCHA_SECRET_KEY", default=None)
RECAPTCHA_V3_ENABLED = os.environ.get("RECAPTCHA_ENABLED", default=False)
RECAPTCHA_V3_HOSTNAME_VALIDATION = os.environ.get("RECAPTCHA_HOSTNAME_VALIDATION", default=False)
From 615e1521ce9ad71cacfaa95833984da2866e744a Mon Sep 17 00:00:00 2001
From: Vadim Stepanov
Date: Wed, 12 Feb 2025 17:48:37 +0000
Subject: [PATCH 3/7] Use a different GH secret to sign plugin (#5447)
related to https://github.com/grafana/irm/issues/455, the secret was
populated as part of
https://github.com/grafana/deployment_tools/pull/221022
---
.github/workflows/on-release-published.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/on-release-published.yml b/.github/workflows/on-release-published.yml
index 75661ba664..0a1ea456f6 100644
--- a/.github/workflows/on-release-published.yml
+++ b/.github/workflows/on-release-published.yml
@@ -36,7 +36,7 @@ jobs:
uses: grafana/shared-workflows/actions/get-vault-secrets@main
with:
repo_secrets: |
- GRAFANA_ACCESS_POLICY_TOKEN=github_actions:cloud-access-policy-token
+ GRAFANA_ACCESS_POLICY_TOKEN=grafana_cloud_access_policy_token:value
GCS_PLUGIN_PUBLISHER_SERVICE_ACCOUNT_JSON=github_actions:gcs-plugin-publisher
- name: Build, sign, and package plugin
id: build-sign-and-package-plugin
From cdb2946b7018e628c53dfd80b69ae822fbc2ec57 Mon Sep 17 00:00:00 2001
From: Michael Derynck
Date: Fri, 14 Feb 2025 07:30:08 -0700
Subject: [PATCH 4/7] fix: disable recaptcha when site key is not set (#5451)
# What this PR does
Although recaptcha verification is disabled by default on the backend
for OSS installs, the plugin was still making use of the site key and
trying to load recaptcha. As that recaptcha site was removed this no
longer works. The updated plugin code will skip recaptcha verification
if it does not have a site key set.
## Which issue(s) this PR closes
Related to #5449
## Checklist
- [ ] Unit, integration, and e2e (if applicable) tests updated
- [x] Documentation added (or `pr:no public docs` PR label added if not
required)
- [x] Added the relevant release notes label (see labels prefixed w/
`release:`). These labels dictate how your PR will
show up in the autogenerated release notes.
---------
Co-authored-by: GitHub Actions
---
.../PhoneVerification/PhoneVerification.tsx | 22 +++++++++++++------
1 file changed, 15 insertions(+), 7 deletions(-)
diff --git a/grafana-plugin/src/containers/UserSettings/parts/tabs/PhoneVerification/PhoneVerification.tsx b/grafana-plugin/src/containers/UserSettings/parts/tabs/PhoneVerification/PhoneVerification.tsx
index 87dcff14f4..3e0fbd2441 100644
--- a/grafana-plugin/src/containers/UserSettings/parts/tabs/PhoneVerification/PhoneVerification.tsx
+++ b/grafana-plugin/src/containers/UserSettings/parts/tabs/PhoneVerification/PhoneVerification.tsx
@@ -108,10 +108,7 @@ export const PhoneVerification = observer((props: PhoneVerificationProps) => {
await UserHelper.verifyPhone(userPk, code);
userStore.fetchItemById({ userPk });
} else {
- window.grecaptcha.ready(async function () {
- const token = await window.grecaptcha.execute(rootStore.recaptchaSiteKey, {
- action: 'mobile_verification_code',
- });
+ async function start_verification(token) {
await userStore.updateUser({
pk: userPk,
email: user.email,
@@ -121,20 +118,31 @@ export const PhoneVerification = observer((props: PhoneVerificationProps) => {
switch (type) {
case 'verification_call':
await UserHelper.fetchVerificationCall(userPk, token);
- setState({ isPhoneCallInitiated: true });
+ setState({isPhoneCallInitiated: true});
if (codeInputRef.current) {
codeInputRef.current.focus();
}
break;
case 'verification_sms':
await UserHelper.fetchVerificationCode(userPk, token);
- setState({ isCodeSent: true });
+ setState({isCodeSent: true});
if (codeInputRef.current) {
codeInputRef.current.focus();
}
break;
}
- });
+ }
+
+ if (!rootStore.recaptchaSiteKey?.trim()) {
+ await start_verification(null)
+ } else {
+ window.grecaptcha.ready(async function () {
+ const token = await window.grecaptcha.execute(rootStore.recaptchaSiteKey, {
+ action: 'mobile_verification_code',
+ });
+ await start_verification(token);
+ });
+ }
}
},
[
From 2e3fbf3bcba9c4b37543f0180d02cc31f27b7a9a Mon Sep 17 00:00:00 2001
From: GitHub Actions
Date: Fri, 14 Feb 2025 16:13:13 +0000
Subject: [PATCH 5/7] Release oncall Helm chart 1.14.4
---
helm/oncall/Chart.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/helm/oncall/Chart.yaml b/helm/oncall/Chart.yaml
index fc626a4973..dc0a6dcb64 100644
--- a/helm/oncall/Chart.yaml
+++ b/helm/oncall/Chart.yaml
@@ -2,8 +2,8 @@ apiVersion: v2
name: oncall
description: Developer-friendly incident response with brilliant Slack integration
type: application
-version: 1.14.3
-appVersion: v1.14.3
+version: 1.14.4
+appVersion: v1.14.4
dependencies:
- name: cert-manager
version: v1.8.0
From aaae31a23256cbdde2397ff5e79d0f92e7e60dff Mon Sep 17 00:00:00 2001
From: Joey Orlando
Date: Tue, 18 Feb 2025 08:12:05 -0500
Subject: [PATCH 6/7] PagerDuty Migrator: Add filtering capabilities and fix
user notification rule preservation (#5454)
This PR adds filtering capabilities to the PagerDuty migrator tool and
fixes user notification rule preservation behavior.
Closes https://github.com/grafana/irm/issues/612
## Changes
### 1. Added Resource Filtering
Added the ability to filter PagerDuty resources during migration based
on:
- Team membership
- User association
- Name patterns (using regex)
New environment variables for filtering:
```
PAGERDUTY_FILTER_TEAM
PAGERDUTY_FILTER_USERS
PAGERDUTY_FILTER_SCHEDULE_REGEX
PAGERDUTY_FILTER_ESCALATION_POLICY_REGEX
PAGERDUTY_FILTER_INTEGRATION_REGEX
```
#### Example Usage
Filter by team:
```bash
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e MODE="plan" \
-e ONCALL_API_URL="" \
-e ONCALL_API_TOKEN="" \
-e PAGERDUTY_API_TOKEN="" \
-e PAGERDUTY_FILTER_TEAM="SRE Team" \
oncall-migrator
```
Filter by specific users:
```bash
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e MODE="plan" \
-e ONCALL_API_URL="" \
-e ONCALL_API_TOKEN="" \
-e PAGERDUTY_API_TOKEN="" \
-e PAGERDUTY_FILTER_USERS="P123ABC,P456DEF" \
oncall-migrator
```
Filter schedules by name pattern:
```bash
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e MODE="plan" \
-e ONCALL_API_URL="" \
-e ONCALL_API_TOKEN="" \
-e PAGERDUTY_API_TOKEN="" \
-e PAGERDUTY_FILTER_SCHEDULE_REGEX="^(Primary|Secondary)" \
oncall-migrator
```
Filter escalation policies by name pattern:
```bash
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e MODE="plan" \
-e ONCALL_API_URL="" \
-e ONCALL_API_TOKEN="" \
-e PAGERDUTY_API_TOKEN="" \
-e PAGERDUTY_FILTER_ESCALATION_POLICY_REGEX="^Prod" \
oncall-migrator
```
Filter integrations by name pattern:
```bash
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e MODE="plan" \
-e ONCALL_API_URL="" \
-e ONCALL_API_TOKEN="" \
-e PAGERDUTY_API_TOKEN="" \
-e PAGERDUTY_FILTER_INTEGRATION_REGEX="Prometheus$" \
oncall-migrator
```
### 2. Fixed User Notification Rule Preservation
Introduces a `PRESERVE_EXISTING_USER_NOTIFICATION_RULES` config (default
of `true`). The migrator now:
- does not delete user notification rules in Grafana OnCall, if the
Grafana user already has some defined, AND
`PRESERVE_EXISTING_USER_NOTIFICATION_RULES` is True
- if the Grafana user has no personal notification rules defined in
OnCall, we will create them
- deletes existing user notification rules, and creates new ones, in
Grafana OnCall, if `PRESERVE_EXISTING_USER_NOTIFICATION_RULES` is False
- basically make sure that the state in Grafana OnCall matches the
_latest_ state in PagerDuty
- Improves logging to clearly indicate when rules are being preserved
#### Example Usage
Preserve existing notification policies (default):
```bash
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e MODE="migrate" \
-e ONCALL_API_URL="" \
-e ONCALL_API_TOKEN="" \
-e PAGERDUTY_API_TOKEN="" \
oncall-migrator
```
Replace existing notification policies:
```bash
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e MODE="migrate" \
-e ONCALL_API_URL="" \
-e ONCALL_API_TOKEN="" \
-e PAGERDUTY_API_TOKEN="" \
-e PRESERVE_EXISTING_USER_NOTIFICATION_RULES="false" \
oncall-migrator
```
### 3. Improved Testing
Added comprehensive test coverage for filtering functionality and
updated user notification rule preservation tests
## Testing Done
- Manual testing of filtering capabilities in both plan and migrate
modes
- Verified notification policy preservation behavior
---
tools/migrators/README.md | 82 +++--
tools/migrators/lib/pagerduty/config.py | 23 ++
tools/migrators/lib/pagerduty/migrate.py | 157 +++++++++-
tools/migrators/lib/pagerduty/report.py | 19 +-
.../resources/escalation_policies.py | 4 +
.../pagerduty/resources/notification_rules.py | 12 +-
.../lib/tests/pagerduty/test_matching.py | 6 +-
.../lib/tests/pagerduty/test_migrate.py | 283 +++++++++++++++++-
.../test_migrate_notification_rules.py | 260 ++++++++++++----
9 files changed, 745 insertions(+), 101 deletions(-)
diff --git a/tools/migrators/README.md b/tools/migrators/README.md
index 841f80eb15..dbf0d5e81c 100644
--- a/tools/migrators/README.md
+++ b/tools/migrators/README.md
@@ -13,8 +13,8 @@ Currently the migration tool supports migrating from:
2. Build the docker image: `docker build -t oncall-migrator .`
3. Obtain a Grafana OnCall API token and API URL on the "Settings" page of your Grafana OnCall instance
4. Depending on which tool you are migrating from, see more specific instructions there:
- - [PagerDuty](#prerequisites)
- - [Splunk OnCall](#prerequisites-1)
+ - [PagerDuty](#prerequisites)
+ - [Splunk OnCall](#prerequisites-1)
5. Run a [migration plan](#migration-plan)
6. If you are pleased with the results of the migration plan, run the tool in [migrate mode](#migration)
@@ -47,12 +47,12 @@ docker run --rm \
oncall-migrator
```
-Please read the generated report carefully since depending on the content of the report, some resources
+Please read the generated report carefully since depending on the content of the report, some resources
could be not migrated and some existing Grafana OnCall resources could be deleted.
```text
User notification rules report:
- ✅ John Doe (john.doe@example.com) (existing notification rules will be deleted)
+ ✅ John Doe (john.doe@example.com) (existing notification rules will be preserved)
❌ Ben Thompson (ben@example.com) — no Grafana OnCall user found with this email
Schedule report:
@@ -223,18 +223,24 @@ oncall-migrator
Configuration is done via environment variables passed to the docker container.
-| Name | Description | Type | Default |
-| --------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------- | ------- |
-| `MIGRATING_FROM` | Set to `pagerduty` | String | N/A |
-| `PAGERDUTY_API_TOKEN` | PagerDuty API **user token**. To create a token, refer to [PagerDuty docs](https://support.pagerduty.com/docs/api-access-keys#generate-a-user-token-rest-api-key). | String | N/A |
-| `ONCALL_API_URL` | Grafana OnCall API URL. This can be found on the "Settings" page of your Grafana OnCall instance. | String | N/A |
-| `ONCALL_API_TOKEN` | Grafana OnCall API Token. To create a token, navigate to the "Settings" page of your Grafana OnCall instance. | String | N/A |
-| `MODE` | Migration mode (plan vs actual migration). | String (choices: `plan`, `migrate`) | `plan` |
-| `SCHEDULE_MIGRATION_MODE` | Determines how on-call schedules are migrated. | String (choices: `ical`, `web`) | `ical` |
-| `UNSUPPORTED_INTEGRATION_TO_WEBHOOKS` | When set to `true`, integrations with unsupported type will be migrated to Grafana OnCall integrations with type "webhook". When set to `false`, integrations with unsupported type won't be migrated. | Boolean | `false` |
-| `EXPERIMENTAL_MIGRATE_EVENT_RULES` | Migrate global event rulesets to Grafana OnCall integrations. | Boolean | `false` |
-| `EXPERIMENTAL_MIGRATE_EVENT_RULES_LONG_NAMES` | Include service & integrations names from PD in migrated integrations (only effective when `EXPERIMENTAL_MIGRATE_EVENT_RULES` is `true`). | Boolean | `false` |
-| `MIGRATE_USERS` | If `false`, will allow you to important all objects, while ignoring user references in schedules and escalation policies. In addition, if `false`, will also skip importing User notification rules. This may be helpful in cases where you are unable to import your list of Grafana users, but would like to experiment with OnCall using your existing PagerDuty setup as a starting point for experimentation. | Boolean | `true` |
+| Name | Description | Type | Default |
+| --------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------- | ------- |
+| `MIGRATING_FROM` | Set to `pagerduty` | String | N/A |
+| `PAGERDUTY_API_TOKEN` | PagerDuty API **user token**. To create a token, refer to [PagerDuty docs](https://support.pagerduty.com/docs/api-access-keys#generate-a-user-token-rest-api-key). | String | N/A |
+| `ONCALL_API_URL` | Grafana OnCall API URL. This can be found on the "Settings" page of your Grafana OnCall instance. | String | N/A |
+| `ONCALL_API_TOKEN` | Grafana OnCall API Token. To create a token, navigate to the "Settings" page of your Grafana OnCall instance. | String | N/A |
+| `MODE` | Migration mode (plan vs actual migration). | String (choices: `plan`, `migrate`) | `plan` |
+| `SCHEDULE_MIGRATION_MODE` | Determines how on-call schedules are migrated. | String (choices: `ical`, `web`) | `ical` |
+| `UNSUPPORTED_INTEGRATION_TO_WEBHOOKS` | When set to `true`, integrations with unsupported type will be migrated to Grafana OnCall integrations with type "webhook". When set to `false`, integrations with unsupported type won't be migrated. | Boolean | `false` |
+| `EXPERIMENTAL_MIGRATE_EVENT_RULES` | Migrate global event rulesets to Grafana OnCall integrations. | Boolean | `false` |
+| `EXPERIMENTAL_MIGRATE_EVENT_RULES_LONG_NAMES` | Include service & integrations names from PD in migrated integrations (only effective when `EXPERIMENTAL_MIGRATE_EVENT_RULES` is `true`). | Boolean | `false` |
+| `MIGRATE_USERS` | If `false`, will allow you to important all objects, while ignoring user references in schedules and escalation policies. In addition, if `false`, will also skip importing User notification rules. This may be helpful in cases where you are unable to import your list of Grafana users, but would like to experiment with OnCall using your existing PagerDuty setup as a starting point for experimentation. | Boolean | `true` |
+| `PAGERDUTY_FILTER_TEAM` | Filter resources by team name. Only resources associated with this team will be migrated. | String | N/A |
+| `PAGERDUTY_FILTER_USERS` | Filter resources by PagerDuty user IDs (comma-separated). Only resources associated with these users will be migrated. | String | N/A |
+| `PAGERDUTY_FILTER_SCHEDULE_REGEX` | Filter schedules by name using a regex pattern. Only schedules whose names match this pattern will be migrated. | String | N/A |
+| `PAGERDUTY_FILTER_ESCALATION_POLICY_REGEX` | Filter escalation policies by name using a regex pattern. Only policies whose names match this pattern will be migrated. | String | N/A |
+| `PAGERDUTY_FILTER_INTEGRATION_REGEX` | Filter integrations by name using a regex pattern. Only integrations whose names match this pattern will be migrated. | String | N/A |
+| `PRESERVE_EXISTING_USER_NOTIFICATION_RULES` | Whether to preserve existing notification rules when migrating users | Boolean | `true` |
### Resources
@@ -246,7 +252,11 @@ taken into account and will be migrated to both default and important notificati
for each user. Note that delays between notification rules may be slightly different in Grafana OnCall,
see [Limitations](#limitations) for more info.
-When running the migration, existing notification rules in Grafana OnCall will be deleted for every affected user.
+By default (when `PRESERVE_EXISTING_USER_NOTIFICATION_RULES` is `true`), existing notification rules in Grafana OnCall will
+be preserved and PagerDuty rules won't be imported for users who already have notification rules configured in Grafana OnCall.
+
+If you want to replace existing notification rules with ones from PagerDuty, set `PRESERVE_EXISTING_USER_NOTIFICATION_RULES`
+to `false`.
See [Migrating Users](#migrating-users) for some more information on how users are migrated.
@@ -290,6 +300,20 @@ For every service in PD, the tool will migrate all integrations to Grafana OnCal
Any services that reference escalation policies that cannot be migrated won't be migrated as well.
Any integrations with unsupported type won't be migrated unless `UNSUPPORTED_INTEGRATION_TO_WEBHOOKS` is set to `true`.
+The following integration types are supported:
+
+- Datadog
+- Pingdom
+- Prometheus
+- PRTG
+- Stackdriver
+- UptimeRobot
+- New Relic
+- Zabbix Webhook (for 5.0 and 5.2)
+- Elastic Alerts
+- Firebase
+- Amazon CloudWatch (maps to Amazon SNS integration in Grafana OnCall)
+
#### Event rules (global event rulesets)
The tool is capable of migrating global event rulesets from PagerDuty to Grafana OnCall integrations. This feature is
@@ -319,7 +343,7 @@ Resources that can be migrated using this tool:
- Escalation Policies
- On-Call Schedules (including Rotations + Scheduled Overrides)
-- Teams + team memberships
+
- User Paging Policies
### Limitations
@@ -337,14 +361,14 @@ Resources that can be migrated using this tool:
Configuration is done via environment variables passed to the docker container.
-| Name | Description | Type | Default |
-| --------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------- | ------- |
-| `MIGRATING_FROM` | Set to `splunk` | String | N/A |
-| `SPLUNK_API_KEY` | Splunk API **key**. To create an API Key, refer to [Splunk OnCall docs](https://help.victorops.com/knowledge-base/api/#:~:text=currently%20in%20place.-,API%20Configuration%20in%20Splunk%20On%2DCall,-To%20access%20the). | String | N/A |
-| `SPLUNK_API_ID` | Splunk API **ID**. To retrieve this ID, refer to [Splunk OnCall docs](https://help.victorops.com/knowledge-base/api/#:~:text=currently%20in%20place.-,API%20Configuration%20in%20Splunk%20On%2DCall,-To%20access%20the). | String | N/A |
-| `ONCALL_API_URL` | Grafana OnCall API URL. This can be found on the "Settings" page of your Grafana OnCall instance. | String | N/A |
-| `ONCALL_API_TOKEN` | Grafana OnCall API Token. To create a token, navigate to the "Settings" page of your Grafana OnCall instance. | String | N/A |
-| `MODE` | Migration mode (plan vs actual migration). | String (choices: `plan`, `migrate`) | `plan` |
+| Name | Description | Type | Default |
+| ------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------- | ------- |
+| `MIGRATING_FROM` | Set to `splunk` | String | N/A |
+| `SPLUNK_API_KEY` | Splunk API **key**. To create an API Key, refer to [Splunk OnCall docs](https://help.victorops.com/knowledge-base/api/#:~:text=currently%20in%20place.-,API%20Configuration%20in%20Splunk%20On%2DCall,-To%20access%20the). | String | N/A |
+| `SPLUNK_API_ID` | Splunk API **ID**. To retrieve this ID, refer to [Splunk OnCall docs](https://help.victorops.com/knowledge-base/api/#:~:text=currently%20in%20place.-,API%20Configuration%20in%20Splunk%20On%2DCall,-To%20access%20the). | String | N/A |
+| `ONCALL_API_URL` | Grafana OnCall API URL. This can be found on the "Settings" page of your Grafana OnCall instance. | String | N/A |
+| `ONCALL_API_TOKEN` | Grafana OnCall API Token. To create a token, navigate to the "Settings" page of your Grafana OnCall instance. | String | N/A |
+| `MODE` | Migration mode (plan vs actual migration). | String (choices: `plan`, `migrate`) | `plan` |
### Resources
@@ -359,7 +383,7 @@ unmatched users or schedules that cannot be migrated won't be migrated as well.
##### Caveats
- delays between escalation steps may be slightly different in Grafana OnCall, see [Limitations](#limitations-1) for
-more info.
+ more info.
- the following Splunk OnCall escalation step types are not supported and will not be migrated:
- "Notify the next user(s) in the current on-duty shift"
- "Notify the previous user(s) in the current on-duty shift"
@@ -391,9 +415,9 @@ See [Migrating Users](#migrating-users) for some more information on how users a
##### Caveats
- The WhatsApp escalation type is not supported and will not be migrated to the Grafana OnCall
-user's personal notification policy
+ user's personal notification policy
- Note that delays between escalation steps may be slightly different in Grafana OnCall,
-see [Limitations](#limitations-1) for more info.
+ see [Limitations](#limitations-1) for more info.
## Migrating Users
diff --git a/tools/migrators/lib/pagerduty/config.py b/tools/migrators/lib/pagerduty/config.py
index eabd0c4ae3..6e063d93c7 100644
--- a/tools/migrators/lib/pagerduty/config.py
+++ b/tools/migrators/lib/pagerduty/config.py
@@ -20,6 +20,7 @@
"Zabbix Webhook (for 5.0 and 5.2)": "zabbix",
"Elastic Alerts": "elastalert",
"Firebase": "fabric",
+ "Amazon CloudWatch": "amazon_sns",
}
# Experimental feature to migrate PD rulesets to OnCall integrations
@@ -38,3 +39,25 @@
)
MIGRATE_USERS = os.getenv("MIGRATE_USERS", "true").lower() == "true"
+
+# Filter resources by team
+PAGERDUTY_FILTER_TEAM = os.getenv("PAGERDUTY_FILTER_TEAM")
+
+# Filter resources by users (comma-separated list of PagerDuty user IDs)
+PAGERDUTY_FILTER_USERS = [
+ user_id.strip()
+ for user_id in os.getenv("PAGERDUTY_FILTER_USERS", "").split(",")
+ if user_id.strip()
+]
+
+# Filter resources by name regex patterns
+PAGERDUTY_FILTER_SCHEDULE_REGEX = os.getenv("PAGERDUTY_FILTER_SCHEDULE_REGEX")
+PAGERDUTY_FILTER_ESCALATION_POLICY_REGEX = os.getenv(
+ "PAGERDUTY_FILTER_ESCALATION_POLICY_REGEX"
+)
+PAGERDUTY_FILTER_INTEGRATION_REGEX = os.getenv("PAGERDUTY_FILTER_INTEGRATION_REGEX")
+
+# Whether to preserve existing notification rules when migrating users
+PRESERVE_EXISTING_USER_NOTIFICATION_RULES = (
+ os.getenv("PRESERVE_EXISTING_USER_NOTIFICATION_RULES", "true").lower() == "true"
+)
diff --git a/tools/migrators/lib/pagerduty/migrate.py b/tools/migrators/lib/pagerduty/migrate.py
index 68d5652ec5..938248f85c 100644
--- a/tools/migrators/lib/pagerduty/migrate.py
+++ b/tools/migrators/lib/pagerduty/migrate.py
@@ -1,4 +1,5 @@
import datetime
+import re
from pdpyras import APISession
@@ -11,6 +12,11 @@
MODE,
MODE_PLAN,
PAGERDUTY_API_TOKEN,
+ PAGERDUTY_FILTER_ESCALATION_POLICY_REGEX,
+ PAGERDUTY_FILTER_INTEGRATION_REGEX,
+ PAGERDUTY_FILTER_SCHEDULE_REGEX,
+ PAGERDUTY_FILTER_TEAM,
+ PAGERDUTY_FILTER_USERS,
)
from lib.pagerduty.report import (
escalation_policy_report,
@@ -43,6 +49,136 @@
)
+def filter_schedules(schedules):
+ """Filter schedules based on configured filters"""
+ filtered_schedules = []
+ filtered_out = 0
+
+ for schedule in schedules:
+ should_include = True
+ reason = None
+
+ # Filter by team
+ if PAGERDUTY_FILTER_TEAM:
+ teams = schedule.get("teams", [])
+ if not any(team["summary"] == PAGERDUTY_FILTER_TEAM for team in teams):
+ should_include = False
+ reason = f"No teams found for team filter: {PAGERDUTY_FILTER_TEAM}"
+
+ # Filter by users
+ if should_include and PAGERDUTY_FILTER_USERS:
+ schedule_users = set()
+ for layer in schedule.get("schedule_layers", []):
+ for user in layer.get("users", []):
+ schedule_users.add(user["user"]["id"])
+
+ if not any(user_id in schedule_users for user_id in PAGERDUTY_FILTER_USERS):
+ should_include = False
+ reason = f"No users found for user filter: {','.join(PAGERDUTY_FILTER_USERS)}"
+
+ # Filter by name regex
+ if should_include and PAGERDUTY_FILTER_SCHEDULE_REGEX:
+ if not re.match(PAGERDUTY_FILTER_SCHEDULE_REGEX, schedule["name"]):
+ should_include = False
+ reason = f"Schedule regex filter: {PAGERDUTY_FILTER_SCHEDULE_REGEX}"
+
+ if should_include:
+ filtered_schedules.append(schedule)
+ else:
+ filtered_out += 1
+ print(f"{TAB}Schedule {schedule['id']}: {reason}")
+
+ if filtered_out > 0:
+ print(f"Filtered out {filtered_out} schedules")
+
+ return filtered_schedules
+
+
+def filter_escalation_policies(policies):
+ """Filter escalation policies based on configured filters"""
+ filtered_policies = []
+ filtered_out = 0
+
+ for policy in policies:
+ should_include = True
+ reason = None
+
+ # Filter by team
+ if PAGERDUTY_FILTER_TEAM:
+ teams = policy.get("teams", [])
+ if not any(team["summary"] == PAGERDUTY_FILTER_TEAM for team in teams):
+ should_include = False
+ reason = f"No teams found for team filter: {PAGERDUTY_FILTER_TEAM}"
+
+ # Filter by users
+ if should_include and PAGERDUTY_FILTER_USERS:
+ policy_users = set()
+ for rule in policy.get("escalation_rules", []):
+ for target in rule.get("targets", []):
+ if target["type"] == "user":
+ policy_users.add(target["id"])
+
+ if not any(user_id in policy_users for user_id in PAGERDUTY_FILTER_USERS):
+ should_include = False
+ reason = f"No users found for user filter: {','.join(PAGERDUTY_FILTER_USERS)}"
+
+ # Filter by name regex
+ if should_include and PAGERDUTY_FILTER_ESCALATION_POLICY_REGEX:
+ if not re.match(PAGERDUTY_FILTER_ESCALATION_POLICY_REGEX, policy["name"]):
+ should_include = False
+ reason = f"Escalation policy regex filter: {PAGERDUTY_FILTER_ESCALATION_POLICY_REGEX}"
+
+ if should_include:
+ filtered_policies.append(policy)
+ else:
+ filtered_out += 1
+ print(f"{TAB}Policy {policy['id']}: {reason}")
+
+ if filtered_out > 0:
+ print(f"Filtered out {filtered_out} escalation policies")
+
+ return filtered_policies
+
+
+def filter_integrations(integrations):
+ """Filter integrations based on configured filters"""
+ filtered_integrations = []
+ filtered_out = 0
+
+ for integration in integrations:
+ should_include = True
+ reason = None
+
+ # Filter by team
+ if PAGERDUTY_FILTER_TEAM:
+ teams = integration["service"].get("teams", [])
+ if not any(team["summary"] == PAGERDUTY_FILTER_TEAM for team in teams):
+ should_include = False
+ reason = f"No teams found for team filter: {PAGERDUTY_FILTER_TEAM}"
+
+ # Filter by name regex
+ if should_include and PAGERDUTY_FILTER_INTEGRATION_REGEX:
+ integration_name = (
+ f"{integration['service']['name']} - {integration['name']}"
+ )
+ if not re.match(PAGERDUTY_FILTER_INTEGRATION_REGEX, integration_name):
+ should_include = False
+ reason = (
+ f"Integration regex filter: {PAGERDUTY_FILTER_INTEGRATION_REGEX}"
+ )
+
+ if should_include:
+ filtered_integrations.append(integration)
+ else:
+ filtered_out += 1
+ print(f"{TAB}Integration {integration['id']}: {reason}")
+
+ if filtered_out > 0:
+ print(f"Filtered out {filtered_out} integrations")
+
+ return filtered_integrations
+
+
def migrate() -> None:
session = APISession(PAGERDUTY_API_TOKEN)
session.timeout = 20
@@ -59,9 +195,13 @@ def migrate() -> None:
print("▶ Fetching schedules...")
# Fetch schedules from PagerDuty
schedules = session.list_all(
- "schedules", params={"include[]": "schedule_layers", "time_zone": "UTC"}
+ "schedules",
+ params={"include[]": ["schedule_layers", "teams"], "time_zone": "UTC"},
)
+ # Apply filters to schedules
+ schedules = filter_schedules(schedules)
+
# Fetch overrides from PagerDuty
since = datetime.datetime.now(datetime.timezone.utc)
until = since + datetime.timedelta(
@@ -78,11 +218,19 @@ def migrate() -> None:
oncall_schedules = OnCallAPIClient.list_all("schedules")
print("▶ Fetching escalation policies...")
- escalation_policies = session.list_all("escalation_policies")
+ escalation_policies = session.list_all(
+ "escalation_policies", params={"include[]": "teams"}
+ )
+
+ # Apply filters to escalation policies
+ escalation_policies = filter_escalation_policies(escalation_policies)
+
oncall_escalation_chains = OnCallAPIClient.list_all("escalation_chains")
print("▶ Fetching integrations...")
- services = session.list_all("services", params={"include[]": "integrations"})
+ services = session.list_all(
+ "services", params={"include[]": ["integrations", "teams"]}
+ )
vendors = session.list_all("vendors")
integrations = []
@@ -92,6 +240,9 @@ def migrate() -> None:
integration["service"] = service
integrations.append(integration)
+ # Apply filters to integrations
+ integrations = filter_integrations(integrations)
+
oncall_integrations = OnCallAPIClient.list_all("integrations")
rulesets = None
diff --git a/tools/migrators/lib/pagerduty/report.py b/tools/migrators/lib/pagerduty/report.py
index cf2fe779ac..82bf7aa375 100644
--- a/tools/migrators/lib/pagerduty/report.py
+++ b/tools/migrators/lib/pagerduty/report.py
@@ -1,4 +1,5 @@
from lib.common.report import ERROR_SIGN, SUCCESS_SIGN, TAB, WARNING_SIGN
+from lib.pagerduty.config import PRESERVE_EXISTING_USER_NOTIFICATION_RULES
def format_user(user: dict) -> str:
@@ -88,8 +89,22 @@ def user_report(users: list[dict]) -> str:
for user in sorted(users, key=lambda u: bool(u["oncall_user"]), reverse=True):
result += "\n" + TAB + format_user(user)
- if user["oncall_user"] and user["notification_rules"]:
- result += " (existing notification rules will be deleted)"
+ if user["oncall_user"]:
+ if (
+ user["oncall_user"]["notification_rules"]
+ and PRESERVE_EXISTING_USER_NOTIFICATION_RULES
+ ):
+ # already has user notification rules defined in OnCall.. we won't touch these
+ result += " (existing notification rules will be preserved due to the PRESERVE_EXISTING_USER_NOTIFICATION_RULES being set to True and this user already having notification rules defined in OnCall)"
+ elif (
+ user["oncall_user"]["notification_rules"]
+ and not PRESERVE_EXISTING_USER_NOTIFICATION_RULES
+ ):
+ # already has user notification rules defined in OnCall.. we will overwrite these
+ result += " (existing notification rules will be overwritten due to the PRESERVE_EXISTING_USER_NOTIFICATION_RULES being set to False)"
+ elif user["notification_rules"]:
+ # user has notification rules defined in PagerDuty, but none defined in OnCall, we will migrate these
+ result += " (existing PagerDuty notification rules will be migrated due to this user not having any notification rules defined in OnCall)"
return result
diff --git a/tools/migrators/lib/pagerduty/resources/escalation_policies.py b/tools/migrators/lib/pagerduty/resources/escalation_policies.py
index 198ab73ed4..0a8869db9f 100644
--- a/tools/migrators/lib/pagerduty/resources/escalation_policies.py
+++ b/tools/migrators/lib/pagerduty/resources/escalation_policies.py
@@ -17,6 +17,10 @@ def match_escalation_policy_for_integration(
policy_id = integration["service"]["escalation_policy"]["id"]
policy = find_by_id(escalation_policies, policy_id)
+ if policy is None:
+ integration["is_escalation_policy_flawed"] = True
+ return
+
integration["is_escalation_policy_flawed"] = bool(
policy["unmatched_users"] or policy["flawed_schedules"]
)
diff --git a/tools/migrators/lib/pagerduty/resources/notification_rules.py b/tools/migrators/lib/pagerduty/resources/notification_rules.py
index 7f712d7c55..b8bfda232a 100644
--- a/tools/migrators/lib/pagerduty/resources/notification_rules.py
+++ b/tools/migrators/lib/pagerduty/resources/notification_rules.py
@@ -1,7 +1,10 @@
import copy
from lib.oncall.api_client import OnCallAPIClient
-from lib.pagerduty.config import PAGERDUTY_TO_ONCALL_CONTACT_METHOD_MAP
+from lib.pagerduty.config import (
+ PAGERDUTY_TO_ONCALL_CONTACT_METHOD_MAP,
+ PRESERVE_EXISTING_USER_NOTIFICATION_RULES,
+)
from lib.utils import remove_duplicates, transform_wait_delay
@@ -23,6 +26,13 @@ def remove_duplicate_rules_between_waits(rules: list[dict]) -> list[dict]:
def migrate_notification_rules(user: dict) -> None:
+ if (
+ PRESERVE_EXISTING_USER_NOTIFICATION_RULES
+ and user["oncall_user"]["notification_rules"]
+ ):
+ print(f"Preserving existing notification rules for {user['email']}")
+ return
+
notification_rules = [
rule for rule in user["notification_rules"] if rule["urgency"] == "high"
]
diff --git a/tools/migrators/lib/tests/pagerduty/test_matching.py b/tools/migrators/lib/tests/pagerduty/test_matching.py
index 137be652bc..baa9bfea7c 100644
--- a/tools/migrators/lib/tests/pagerduty/test_matching.py
+++ b/tools/migrators/lib/tests/pagerduty/test_matching.py
@@ -1330,7 +1330,7 @@
"scheduled_actions": [],
},
"oncall_integration": None,
- "oncall_type": None,
+ "oncall_type": "amazon_sns",
"is_escalation_policy_flawed": False,
},
{
@@ -1420,7 +1420,7 @@
"scheduled_actions": [],
},
"oncall_integration": None,
- "oncall_type": None,
+ "oncall_type": "amazon_sns",
"is_escalation_policy_flawed": True,
},
{
@@ -1510,7 +1510,7 @@
"scheduled_actions": [],
},
"oncall_integration": None,
- "oncall_type": None,
+ "oncall_type": "amazon_sns",
"is_escalation_policy_flawed": True,
},
{
diff --git a/tools/migrators/lib/tests/pagerduty/test_migrate.py b/tools/migrators/lib/tests/pagerduty/test_migrate.py
index 6a7b42eddc..bc16efac8c 100644
--- a/tools/migrators/lib/tests/pagerduty/test_migrate.py
+++ b/tools/migrators/lib/tests/pagerduty/test_migrate.py
@@ -1,6 +1,11 @@
from unittest.mock import call, patch
-from lib.pagerduty.migrate import migrate
+from lib.pagerduty.migrate import (
+ filter_escalation_policies,
+ filter_integrations,
+ filter_schedules,
+ migrate,
+)
@patch("lib.pagerduty.migrate.MIGRATE_USERS", False)
@@ -17,11 +22,281 @@ def test_users_are_skipped_when_migrate_users_is_false(
# Assert no user-related fetching or migration occurs
assert mock_session.list_all.call_args_list == [
- call("schedules", params={"include[]": "schedule_layers", "time_zone": "UTC"}),
- call("escalation_policies"),
- call("services", params={"include[]": "integrations"}),
+ call(
+ "schedules",
+ params={"include[]": ["schedule_layers", "teams"], "time_zone": "UTC"},
+ ),
+ call("escalation_policies", params={"include[]": "teams"}),
+ call("services", params={"include[]": ["integrations", "teams"]}),
call("vendors"),
# no user notification rules fetching
]
mock_oncall_client.list_users_with_notification_rules.assert_not_called()
+
+
+class TestPagerDutyFiltering:
+ def setup_method(self):
+ self.mock_schedule = {
+ "id": "SCHEDULE1",
+ "name": "Test Schedule",
+ "teams": [{"summary": "Team 1"}],
+ "schedule_layers": [
+ {
+ "users": [
+ {"user": {"id": "USER1"}},
+ {"user": {"id": "USER2"}},
+ ]
+ }
+ ],
+ }
+
+ self.mock_policy = {
+ "id": "POLICY1",
+ "name": "Test Policy",
+ "teams": [{"summary": "Team 1"}],
+ "escalation_rules": [
+ {
+ "targets": [
+ {"type": "user", "id": "USER1"},
+ {"type": "user", "id": "USER2"},
+ ]
+ }
+ ],
+ }
+
+ self.mock_integration = {
+ "id": "INTEGRATION1",
+ "name": "Test Integration",
+ "service": {
+ "name": "Service 1",
+ "teams": [{"summary": "Team 1"}],
+ },
+ }
+
+ @patch("lib.pagerduty.migrate.PAGERDUTY_FILTER_TEAM", "Team 1")
+ def test_filter_schedules_by_team(self):
+ schedules = [
+ self.mock_schedule,
+ {**self.mock_schedule, "teams": [{"summary": "Team 2"}]},
+ ]
+ filtered = filter_schedules(schedules)
+ assert len(filtered) == 1
+ assert filtered[0]["id"] == "SCHEDULE1"
+
+ @patch("lib.pagerduty.migrate.PAGERDUTY_FILTER_USERS", ["USER1"])
+ def test_filter_schedules_by_users(self):
+ schedules = [
+ self.mock_schedule,
+ {
+ **self.mock_schedule,
+ "schedule_layers": [{"users": [{"user": {"id": "USER3"}}]}],
+ },
+ ]
+ filtered = filter_schedules(schedules)
+ assert len(filtered) == 1
+ assert filtered[0]["id"] == "SCHEDULE1"
+
+ @patch("lib.pagerduty.migrate.PAGERDUTY_FILTER_SCHEDULE_REGEX", "^Test")
+ def test_filter_schedules_by_regex(self):
+ schedules = [
+ self.mock_schedule,
+ {**self.mock_schedule, "name": "Production Schedule"},
+ ]
+ filtered = filter_schedules(schedules)
+ assert len(filtered) == 1
+ assert filtered[0]["id"] == "SCHEDULE1"
+
+ @patch("lib.pagerduty.migrate.PAGERDUTY_FILTER_TEAM", "Team 1")
+ def test_filter_escalation_policies_by_team(self):
+ policies = [
+ self.mock_policy,
+ {**self.mock_policy, "teams": [{"summary": "Team 2"}]},
+ ]
+ filtered = filter_escalation_policies(policies)
+ assert len(filtered) == 1
+ assert filtered[0]["id"] == "POLICY1"
+
+ @patch("lib.pagerduty.migrate.PAGERDUTY_FILTER_USERS", ["USER1"])
+ def test_filter_escalation_policies_by_users(self):
+ policies = [
+ self.mock_policy,
+ {
+ **self.mock_policy,
+ "escalation_rules": [{"targets": [{"type": "user", "id": "USER3"}]}],
+ },
+ ]
+ filtered = filter_escalation_policies(policies)
+ assert len(filtered) == 1
+ assert filtered[0]["id"] == "POLICY1"
+
+ @patch("lib.pagerduty.migrate.PAGERDUTY_FILTER_ESCALATION_POLICY_REGEX", "^Test")
+ def test_filter_escalation_policies_by_regex(self):
+ policies = [
+ self.mock_policy,
+ {**self.mock_policy, "name": "Production Policy"},
+ ]
+ filtered = filter_escalation_policies(policies)
+ assert len(filtered) == 1
+ assert filtered[0]["id"] == "POLICY1"
+
+ @patch("lib.pagerduty.migrate.PAGERDUTY_FILTER_TEAM", "Team 1")
+ def test_filter_integrations_by_team(self):
+ integrations = [
+ self.mock_integration,
+ {
+ **self.mock_integration,
+ "service": {"teams": [{"summary": "Team 2"}]},
+ },
+ ]
+ filtered = filter_integrations(integrations)
+ assert len(filtered) == 1
+ assert filtered[0]["id"] == "INTEGRATION1"
+
+ @patch(
+ "lib.pagerduty.migrate.PAGERDUTY_FILTER_INTEGRATION_REGEX", "^Service 1 - Test"
+ )
+ def test_filter_integrations_by_regex(self):
+ integrations = [
+ self.mock_integration,
+ {
+ **self.mock_integration,
+ "service": {"name": "Service 2"},
+ "name": "Production Integration",
+ },
+ ]
+ filtered = filter_integrations(integrations)
+ assert len(filtered) == 1
+ assert filtered[0]["id"] == "INTEGRATION1"
+
+
+class TestPagerDutyMigrationFiltering:
+ @patch("lib.pagerduty.migrate.filter_schedules")
+ @patch("lib.pagerduty.migrate.filter_escalation_policies")
+ @patch("lib.pagerduty.migrate.filter_integrations")
+ @patch("lib.pagerduty.migrate.APISession")
+ @patch("lib.pagerduty.migrate.OnCallAPIClient")
+ def test_migrate_calls_filters(
+ self,
+ MockOnCallAPIClient,
+ MockAPISession,
+ mock_filter_integrations,
+ mock_filter_policies,
+ mock_filter_schedules,
+ ):
+ # Setup mock returns
+ mock_session = MockAPISession.return_value
+ mock_session.list_all.side_effect = [
+ [{"id": "U1", "name": "Test User", "email": "test@example.com"}], # users
+ [{"id": "S1"}], # schedules
+ [{"id": "P1"}], # policies
+ [{"id": "SVC1", "integrations": []}], # services
+ [{"id": "V1"}], # vendors
+ ]
+ mock_session.jget.return_value = {"overrides": []} # Mock schedule overrides
+ mock_oncall_client = MockOnCallAPIClient.return_value
+ mock_oncall_client.list_all.return_value = []
+
+ # Run migration
+ migrate()
+
+ # Verify filters were called with correct data
+ mock_filter_schedules.assert_called_once_with([{"id": "S1"}])
+ mock_filter_policies.assert_called_once_with([{"id": "P1"}])
+ mock_filter_integrations.assert_called_once() # Service data is transformed, so just check it was called
+
+ @patch("lib.pagerduty.migrate.PAGERDUTY_FILTER_TEAM", "Team 1")
+ @patch("lib.pagerduty.migrate.filter_schedules")
+ @patch("lib.pagerduty.migrate.filter_escalation_policies")
+ @patch("lib.pagerduty.migrate.filter_integrations")
+ @patch("lib.pagerduty.migrate.APISession")
+ @patch("lib.pagerduty.migrate.OnCallAPIClient")
+ def test_migrate_with_team_filter(
+ self,
+ MockOnCallAPIClient,
+ MockAPISession,
+ mock_filter_integrations,
+ mock_filter_policies,
+ mock_filter_schedules,
+ ):
+ # Setup mock returns
+ mock_session = MockAPISession.return_value
+ mock_session.list_all.side_effect = [
+ [{"id": "U1", "name": "Test User", "email": "test@example.com"}], # users
+ [{"id": "S1", "teams": [{"summary": "Team 1"}]}], # schedules
+ [{"id": "P1", "teams": [{"summary": "Team 1"}]}], # policies
+ [
+ {"id": "SVC1", "teams": [{"summary": "Team 1"}], "integrations": []}
+ ], # services
+ [{"id": "V1"}], # vendors
+ ]
+ mock_session.jget.return_value = {"overrides": []} # Mock schedule overrides
+ mock_oncall_client = MockOnCallAPIClient.return_value
+ mock_oncall_client.list_all.return_value = []
+
+ # Run migration
+ migrate()
+
+ # Verify filters were called and filtered by team
+ mock_filter_schedules.assert_called_once()
+ mock_filter_policies.assert_called_once()
+ mock_filter_integrations.assert_called_once()
+
+ # Verify team parameter was included in API calls
+ assert mock_session.list_all.call_args_list == [
+ call("users", params={"include[]": "notification_rules"}),
+ call(
+ "schedules",
+ params={"include[]": ["schedule_layers", "teams"], "time_zone": "UTC"},
+ ),
+ call("escalation_policies", params={"include[]": "teams"}),
+ call("services", params={"include[]": ["integrations", "teams"]}),
+ call("vendors"),
+ ]
+
+ @patch("lib.pagerduty.migrate.PAGERDUTY_FILTER_USERS", ["USER1"])
+ @patch("lib.pagerduty.migrate.filter_schedules")
+ @patch("lib.pagerduty.migrate.filter_escalation_policies")
+ @patch("lib.pagerduty.migrate.filter_integrations")
+ @patch("lib.pagerduty.migrate.APISession")
+ @patch("lib.pagerduty.migrate.OnCallAPIClient")
+ def test_migrate_with_users_filter(
+ self,
+ MockOnCallAPIClient,
+ MockAPISession,
+ mock_filter_integrations,
+ mock_filter_policies,
+ mock_filter_schedules,
+ ):
+ # Setup mock returns
+ mock_session = MockAPISession.return_value
+ mock_session.list_all.side_effect = [
+ [{"id": "U1", "name": "Test User", "email": "test@example.com"}], # users
+ [
+ {
+ "id": "S1",
+ "schedule_layers": [{"users": [{"user": {"id": "USER1"}}]}],
+ }
+ ], # schedules
+ [
+ {
+ "id": "P1",
+ "escalation_rules": [
+ {"targets": [{"type": "user", "id": "USER1"}]}
+ ],
+ }
+ ], # policies
+ [{"id": "SVC1", "integrations": []}], # services
+ [{"id": "V1"}], # vendors
+ ]
+ mock_session.jget.return_value = {"overrides": []} # Mock schedule overrides
+ mock_oncall_client = MockOnCallAPIClient.return_value
+ mock_oncall_client.list_all.return_value = []
+
+ # Run migration
+ migrate()
+
+ # Verify filters were called and filtered by users
+ mock_filter_schedules.assert_called_once()
+ mock_filter_policies.assert_called_once()
+ mock_filter_integrations.assert_called_once()
diff --git a/tools/migrators/lib/tests/pagerduty/test_migrate_notification_rules.py b/tools/migrators/lib/tests/pagerduty/test_migrate_notification_rules.py
index 566d16149f..af4e28a4e1 100644
--- a/tools/migrators/lib/tests/pagerduty/test_migrate_notification_rules.py
+++ b/tools/migrators/lib/tests/pagerduty/test_migrate_notification_rules.py
@@ -1,14 +1,144 @@
from unittest.mock import call, patch
-from lib.oncall.api_client import OnCallAPIClient
from lib.pagerduty.resources.notification_rules import migrate_notification_rules
-@patch.object(OnCallAPIClient, "delete")
-@patch.object(OnCallAPIClient, "create")
-def test_migrate_notification_rules(api_client_create_mock, api_client_delete_mock):
- migrate_notification_rules(
- {
+class TestNotificationRulesPreservation:
+ def setup_method(self):
+ self.pd_user = {
+ "id": "U1",
+ "name": "Test User",
+ "email": "test@example.com",
+ "notification_rules": [
+ {
+ "id": "PD1",
+ "urgency": "high",
+ "start_delay_in_minutes": 0,
+ "contact_method": {"type": "email_contact_method"},
+ }
+ ],
+ }
+ self.oncall_user = {
+ "id": "OC1",
+ "email": "test@example.com",
+ "notification_rules": [],
+ }
+ self.pd_user["oncall_user"] = self.oncall_user
+
+ @patch(
+ "lib.pagerduty.resources.notification_rules.PRESERVE_EXISTING_USER_NOTIFICATION_RULES",
+ True,
+ )
+ @patch("lib.pagerduty.resources.notification_rules.OnCallAPIClient")
+ def test_existing_notification_rules_are_preserved(self, MockOnCallAPIClient):
+ # Setup user with existing notification rules
+ self.oncall_user["notification_rules"] = [{"id": "NR1"}]
+
+ # Run migration
+ migrate_notification_rules(self.pd_user)
+
+ # Verify no notification rules were migrated
+ MockOnCallAPIClient.create.assert_not_called()
+ MockOnCallAPIClient.delete.assert_not_called()
+
+ @patch(
+ "lib.pagerduty.resources.notification_rules.PRESERVE_EXISTING_USER_NOTIFICATION_RULES",
+ True,
+ )
+ @patch("lib.pagerduty.resources.notification_rules.OnCallAPIClient")
+ def test_notification_rules_migrated_when_none_exist(self, MockOnCallAPIClient):
+ # Run migration
+ migrate_notification_rules(self.pd_user)
+
+ # Verify notification rules were migrated for both important and non-important cases
+ expected_calls = [
+ call(
+ "personal_notification_rules",
+ {"user_id": "OC1", "type": "notify_by_email", "important": False},
+ ),
+ call(
+ "personal_notification_rules",
+ {"user_id": "OC1", "type": "notify_by_email", "important": True},
+ ),
+ ]
+ MockOnCallAPIClient.create.assert_has_calls(expected_calls)
+ MockOnCallAPIClient.delete.assert_not_called()
+
+ @patch(
+ "lib.pagerduty.resources.notification_rules.PRESERVE_EXISTING_USER_NOTIFICATION_RULES",
+ False,
+ )
+ @patch("lib.pagerduty.resources.notification_rules.OnCallAPIClient")
+ def test_existing_notification_rules_are_replaced_when_preserve_is_false(
+ self, MockOnCallAPIClient
+ ):
+ # Setup user with existing notification rules
+ self.oncall_user["notification_rules"] = [
+ {"id": "NR1", "important": False},
+ {"id": "NR2", "important": True},
+ ]
+
+ # Run migration
+ migrate_notification_rules(self.pd_user)
+
+ # Verify old rules were deleted
+ expected_delete_calls = [
+ call("personal_notification_rules/NR1"),
+ call("personal_notification_rules/NR2"),
+ ]
+ MockOnCallAPIClient.delete.assert_has_calls(
+ expected_delete_calls, any_order=True
+ )
+
+ # Verify new rules were created
+ expected_create_calls = [
+ call(
+ "personal_notification_rules",
+ {"user_id": "OC1", "type": "notify_by_email", "important": False},
+ ),
+ call(
+ "personal_notification_rules",
+ {"user_id": "OC1", "type": "notify_by_email", "important": True},
+ ),
+ ]
+ MockOnCallAPIClient.create.assert_has_calls(expected_create_calls)
+
+ @patch(
+ "lib.pagerduty.resources.notification_rules.PRESERVE_EXISTING_USER_NOTIFICATION_RULES",
+ False,
+ )
+ @patch("lib.pagerduty.resources.notification_rules.OnCallAPIClient")
+ def test_notification_rules_migrated_when_none_exist_and_preserve_is_false(
+ self, MockOnCallAPIClient
+ ):
+ # Run migration
+ migrate_notification_rules(self.pd_user)
+
+ # Verify no rules were deleted (since none existed)
+ MockOnCallAPIClient.delete.assert_not_called()
+
+ # Verify new rules were created
+ expected_create_calls = [
+ call(
+ "personal_notification_rules",
+ {"user_id": "OC1", "type": "notify_by_email", "important": False},
+ ),
+ call(
+ "personal_notification_rules",
+ {"user_id": "OC1", "type": "notify_by_email", "important": True},
+ ),
+ ]
+ MockOnCallAPIClient.create.assert_has_calls(expected_create_calls)
+
+ @patch(
+ "lib.pagerduty.resources.notification_rules.PRESERVE_EXISTING_USER_NOTIFICATION_RULES",
+ False,
+ )
+ @patch("lib.pagerduty.resources.notification_rules.OnCallAPIClient")
+ def test_complex_notification_rules_migration(self, MockOnCallAPIClient):
+ # Test a more complex case with multiple notification methods and delays
+ user = {
+ "email": "test@example.com",
"notification_rules": [
{
"contact_method": {"type": "sms_contact_method"},
@@ -29,57 +159,69 @@ def test_migrate_notification_rules(api_client_create_mock, api_client_delete_mo
],
},
}
- )
- assert api_client_create_mock.call_args_list == [
- call(
- "personal_notification_rules",
- {
- "user_id": "EXISTING_USER_ID",
- "type": "notify_by_sms",
- "important": False,
- },
- ),
- call(
- "personal_notification_rules",
- {
- "user_id": "EXISTING_USER_ID",
- "type": "wait",
- "duration": 300,
- "important": False,
- },
- ),
- call(
- "personal_notification_rules",
- {
- "user_id": "EXISTING_USER_ID",
- "type": "notify_by_mobile_app",
- "important": False,
- },
- ),
- call(
- "personal_notification_rules",
- {"user_id": "EXISTING_USER_ID", "type": "notify_by_sms", "important": True},
- ),
- call(
- "personal_notification_rules",
- {
- "user_id": "EXISTING_USER_ID",
- "type": "wait",
- "duration": 300,
- "important": True,
- },
- ),
- call(
- "personal_notification_rules",
- {
- "user_id": "EXISTING_USER_ID",
- "type": "notify_by_mobile_app",
- "important": True,
- },
- ),
- ]
- assert api_client_delete_mock.call_args_list == [
- call("personal_notification_rules/EXISTING_RULE_ID_1"),
- call("personal_notification_rules/EXISTING_RULE_ID_2"),
- ]
+ migrate_notification_rules(user)
+
+ # Verify old rules were deleted
+ expected_delete_calls = [
+ call("personal_notification_rules/EXISTING_RULE_ID_1"),
+ call("personal_notification_rules/EXISTING_RULE_ID_2"),
+ ]
+ MockOnCallAPIClient.delete.assert_has_calls(
+ expected_delete_calls, any_order=True
+ )
+
+ # Verify new rules were created in correct order with correct delays
+ expected_create_calls = [
+ call(
+ "personal_notification_rules",
+ {
+ "user_id": "EXISTING_USER_ID",
+ "type": "notify_by_sms",
+ "important": False,
+ },
+ ),
+ call(
+ "personal_notification_rules",
+ {
+ "user_id": "EXISTING_USER_ID",
+ "type": "wait",
+ "duration": 300,
+ "important": False,
+ },
+ ),
+ call(
+ "personal_notification_rules",
+ {
+ "user_id": "EXISTING_USER_ID",
+ "type": "notify_by_mobile_app",
+ "important": False,
+ },
+ ),
+ call(
+ "personal_notification_rules",
+ {
+ "user_id": "EXISTING_USER_ID",
+ "type": "notify_by_sms",
+ "important": True,
+ },
+ ),
+ call(
+ "personal_notification_rules",
+ {
+ "user_id": "EXISTING_USER_ID",
+ "type": "wait",
+ "duration": 300,
+ "important": True,
+ },
+ ),
+ call(
+ "personal_notification_rules",
+ {
+ "user_id": "EXISTING_USER_ID",
+ "type": "notify_by_mobile_app",
+ "important": True,
+ },
+ ),
+ ]
+ MockOnCallAPIClient.create.assert_has_calls(expected_create_calls)
From 576fecf6d30b21b6b5dc664b54d35103f8359004 Mon Sep 17 00:00:00 2001
From: Matias Bordese
Date: Tue, 18 Feb 2025 14:53:07 -0300
Subject: [PATCH 7/7] feat: add personal webhook notification backend (#5426)
Related to https://github.com/grafana/irm/issues/332
(see https://github.com/grafana/oncall/pull/5440 and
https://github.com/grafana/oncall/pull/5446 for the related UI changes)
---------
Co-authored-by: Matt Thorning
---
.../integrations/outgoing-webhooks/index.md | 13 +
docs/sources/manage/notify/webhook/index.md | 37 +++
.../oncall-api-reference/outgoing_webhooks.md | 1 +
.../personal_notification_rules.md | 2 +-
engine/apps/api/tests/test_webhooks.py | 97 +++++++
engine/apps/api/views/features.py | 4 +
engine/apps/api/views/webhooks.py | 81 +++++-
engine/apps/webhooks/backend.py | 41 +++
...018_alter_webhook_trigger_type_and_more.py | 35 +++
engine/apps/webhooks/models/__init__.py | 2 +-
engine/apps/webhooks/models/webhook.py | 29 ++-
engine/apps/webhooks/tasks/__init__.py | 1 +
engine/apps/webhooks/tasks/notify_user.py | 67 +++++
engine/apps/webhooks/tasks/trigger_webhook.py | 11 +-
engine/apps/webhooks/tests/factories.py | 7 +-
engine/apps/webhooks/tests/test_backend.py | 97 +++++++
.../apps/webhooks/tests/test_notify_user.py | 154 +++++++++++
.../webhooks/tests/test_trigger_webhook.py | 63 ++++-
engine/apps/webhooks/tests/test_webhook.py | 26 ++
engine/conftest.py | 15 +-
engine/settings/base.py | 5 +
engine/settings/celery_task_routes.py | 1 +
.../containers/UserSettings/UserSettings.tsx | 3 +
.../UserSettings/UserSettings.types.ts | 1 +
.../UserSettings/parts/UserSettingsParts.tsx | 31 ++-
.../parts/connectors/Connectors.tsx | 2 +
.../connectors/PersonalWebhookConnector.tsx | 60 +++++
.../PersonalWebhookInfo.tsx | 245 ++++++++++++++++++
.../outgoing_webhook.types.ts | 5 +
grafana-plugin/src/models/user/user.ts | 26 ++
grafana-plugin/src/state/features.ts | 1 +
31 files changed, 1145 insertions(+), 18 deletions(-)
create mode 100644 docs/sources/manage/notify/webhook/index.md
create mode 100644 engine/apps/webhooks/backend.py
create mode 100644 engine/apps/webhooks/migrations/0018_alter_webhook_trigger_type_and_more.py
create mode 100644 engine/apps/webhooks/tasks/notify_user.py
create mode 100644 engine/apps/webhooks/tests/test_backend.py
create mode 100644 engine/apps/webhooks/tests/test_notify_user.py
create mode 100644 grafana-plugin/src/containers/UserSettings/parts/connectors/PersonalWebhookConnector.tsx
create mode 100644 grafana-plugin/src/containers/UserSettings/parts/tabs/PersonalWebhookInfo/PersonalWebhookInfo.tsx
diff --git a/docs/sources/configure/integrations/outgoing-webhooks/index.md b/docs/sources/configure/integrations/outgoing-webhooks/index.md
index 0bd79870c4..a423450361 100644
--- a/docs/sources/configure/integrations/outgoing-webhooks/index.md
+++ b/docs/sources/configure/integrations/outgoing-webhooks/index.md
@@ -20,6 +20,11 @@ refs:
destination: /docs/oncall//configure/integrations/labels/#alert-group-labels
- pattern: /docs/grafana-cloud/
destination: /docs/grafana-cloud/alerting-and-irm/oncall/configure/integrations/labels/#alert-group-labels
+ personal-webhook:
+ - pattern: /docs/oncall/
+ destination: /docs/oncall//manage/notify/webhook
+ - pattern: /docs/grafana-cloud/
+ destination: /docs/grafana-cloud/alerting-and-irm/oncall/manage/notify/webhook
integration-labels:
- pattern: /docs/oncall/
destination: /docs/oncall//configure/integrations/labels/
@@ -109,6 +114,7 @@ This setting does not restrict outgoing webhook execution to events from the sel
The type of event that will cause this outgoing webhook to execute. The types of triggers are:
- [Manual or Escalation Step](#escalation-step)
+- [Personal Notification](#personal-notification)
- [Alert Group Created](#alert-group-created)
- [Acknowledged](#acknowledged)
- [Resolved](#resolved)
@@ -310,6 +316,7 @@ Context information about the event that triggered the outgoing webhook.
- `{{ event.type }}` - Lower case string matching [type of event](#event-types)
- `{{ event.time }}` - Time event was triggered
+- `{{ event.user.* }}` - Context data as provided by the user for [Personal Notification](ref:personal-webhook) webhooks
#### `user`
@@ -482,6 +489,12 @@ Now the result is correct:
This event will trigger when the outgoing webhook is included as a step in an escalation chain.
Webhooks with this trigger type can also be manually triggered in the context of an alert group in the web UI.
+### Personal Notification
+
+`event.type` `personal notification`
+
+This event will trigger when the outgoing webhook is included as a step in a user's personal notification rules.
+
### Alert Group Created
`event.type` `alert group created`
diff --git a/docs/sources/manage/notify/webhook/index.md b/docs/sources/manage/notify/webhook/index.md
new file mode 100644
index 0000000000..39ddc6e866
--- /dev/null
+++ b/docs/sources/manage/notify/webhook/index.md
@@ -0,0 +1,37 @@
+---
+title: Webhook as personal notification channel
+menuTitle: Webhook
+description: Learn more about using webhooks as a personal notification channel in Grafana OnCall.
+weight: 700
+keywords:
+ - OnCall
+ - Notifications
+ - ChatOps
+ - Webhook
+ - Channels
+canonical: https://grafana.com/docs/oncall/latest/manage/notify/webhook/
+aliases:
+ - /docs/grafana-cloud/alerting-and-irm/oncall/manage/notify/webhook/
+ - /docs/grafana-cloud/alerting-and-irm/oncall/notify/webhook/
+refs:
+ outgoing-webhooks:
+ - pattern: /docs/oncall/
+ destination: /docs/oncall//configure/integrations/outgoing-webhooks/
+ - pattern: /docs/grafana-cloud/
+ destination: /docs/grafana-cloud/alerting-and-irm/oncall/configure/integrations/outgoing-webhooks/
+---
+
+# Webhook as a personal notification channel
+
+It is possible to setup a webhook as a personal notification channel in your user profile.
+The webhook will be triggered as a personal notification rule according to your notification policy configuration.
+
+## Configure a webhook to be used as personal notification
+
+In the webhooks page, you (or a user with the right permissions) need to define a [webhook](ref:outgoing-webhooks) as usual,
+but with the `Personal Notification` trigger type.
+
+Each user will then be able to choose a webhook (between those with the above trigger type) as a notification channel in
+their profile. Optionally, they can also provide additional context data (as a JSON dict, e.g. `{"user_ID": "some-id"}`)
+which will be available when evaluating the webhook templates. This data can be referenced via `{{ event.user. }}`
+(e.g. `{{ event.user.user_ID }}`).
diff --git a/docs/sources/oncall-api-reference/outgoing_webhooks.md b/docs/sources/oncall-api-reference/outgoing_webhooks.md
index 8b032db0be..eee13e5cd6 100644
--- a/docs/sources/oncall-api-reference/outgoing_webhooks.md
+++ b/docs/sources/oncall-api-reference/outgoing_webhooks.md
@@ -132,6 +132,7 @@ curl "{{API_URL}}/api/v1/webhooks/" \
For more detail, refer to [Event types](ref:event-types).
- `escalation`
+- `personal notification`
- `alert group created`
- `acknowledge`
- `resolve`
diff --git a/docs/sources/oncall-api-reference/personal_notification_rules.md b/docs/sources/oncall-api-reference/personal_notification_rules.md
index c2aa9006c5..8473fa03de 100644
--- a/docs/sources/oncall-api-reference/personal_notification_rules.md
+++ b/docs/sources/oncall-api-reference/personal_notification_rules.md
@@ -43,7 +43,7 @@ The above command returns JSON structured in the following way:
| ----------- | :------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `user_id` | Yes | User ID |
| `position` | Optional | Personal notification rules execute one after another starting from `position=0`. `Position=-1` will put the escalation policy to the end of the list. A new escalation policy created with a position of an existing escalation policy will move the old one (and all following) down on the list. |
-| `type` | Yes | One of: `wait`, `notify_by_slack`, `notify_by_sms`, `notify_by_phone_call`, `notify_by_telegram`, `notify_by_email`, `notify_by_mobile_app`, `notify_by_mobile_app_critical`, or `notify_by_msteams` (**NOTE** `notify_by_msteams` is only available on Grafana Cloud). |
+| `type` | Yes | One of: `wait`, `notify_by_slack`, `notify_by_sms`, `notify_by_phone_call`, `notify_by_telegram`, `notify_by_email`, `notify_by_mobile_app`, `notify_by_mobile_app_critical`, `notify_by_webhook` or `notify_by_msteams` (**NOTE** `notify_by_msteams` is only available on Grafana Cloud). |
| `duration` | Optional | A time in seconds to wait (when `type=wait`). Can be one of 60, 300, 900, 1800, or 3600. |
| `important` | Optional | Boolean value indicates if a rule is "important". Default is `false`. |
diff --git a/engine/apps/api/tests/test_webhooks.py b/engine/apps/api/tests/test_webhooks.py
index a3b9109638..f4edacdfb8 100644
--- a/engine/apps/api/tests/test_webhooks.py
+++ b/engine/apps/api/tests/test_webhooks.py
@@ -2,6 +2,7 @@
from unittest.mock import patch
import pytest
+from django.core.exceptions import ObjectDoesNotExist
from django.urls import reverse
from rest_framework import status
from rest_framework.response import Response
@@ -1253,3 +1254,99 @@ def test_webhook_trigger_manual(
)
assert response.status_code == status.HTTP_404_NOT_FOUND
assert mock_execute.apply_async.call_count == 0
+
+
+@pytest.mark.django_db
+def test_current_personal_notification(
+ make_organization_and_user_with_plugin_token,
+ make_custom_webhook,
+ make_user_auth_headers,
+ make_personal_notification_webhook,
+):
+ organization, user, token = make_organization_and_user_with_plugin_token()
+ with pytest.raises(ObjectDoesNotExist):
+ user.personal_webhook
+
+ webhook = make_custom_webhook(organization, trigger_type=Webhook.TRIGGER_PERSONAL_NOTIFICATION)
+
+ client = APIClient()
+ url = reverse("api-internal:webhooks-current-personal-notification")
+
+ # no webhook setup
+ response = client.get(url, **make_user_auth_headers(user, token))
+ assert response.status_code == status.HTTP_200_OK
+ assert response.json() == {"webhook": None, "context": None}
+
+ # setup personal webhook
+ personal_webhook = make_personal_notification_webhook(user, webhook)
+ response = client.get(url, **make_user_auth_headers(user, token))
+ assert response.status_code == status.HTTP_200_OK
+ assert response.json() == {"webhook": webhook.public_primary_key, "context": {}}
+
+ # update context data
+ personal_webhook.context_data = {"test": "test"}
+ response = client.get(url, **make_user_auth_headers(user, token))
+ assert response.status_code == status.HTTP_200_OK
+ assert response.json() == {"webhook": webhook.public_primary_key, "context": {"test": "test"}}
+
+
+@pytest.mark.django_db
+def test_set_personal_notification(
+ make_organization_and_user_with_plugin_token,
+ make_custom_webhook,
+ make_user_auth_headers,
+):
+ organization, user, token = make_organization_and_user_with_plugin_token()
+ with pytest.raises(ObjectDoesNotExist):
+ user.personal_webhook
+
+ webhook = make_custom_webhook(organization, trigger_type=Webhook.TRIGGER_PERSONAL_NOTIFICATION)
+ other_webhook = make_custom_webhook(organization, trigger_type=Webhook.TRIGGER_MANUAL)
+
+ client = APIClient()
+ url = reverse("api-internal:webhooks-set-personal-notification")
+
+ # webhook id is required
+ data = {}
+ response = client.post(
+ url, data=json.dumps(data), content_type="application/json", **make_user_auth_headers(user, token)
+ )
+ assert response.status_code == status.HTTP_400_BAD_REQUEST
+ assert response.json()["webhook"] == "This field is required."
+
+ # invalid webhook type
+ data = {"webhook": other_webhook.public_primary_key}
+ response = client.post(
+ url, data=json.dumps(data), content_type="application/json", **make_user_auth_headers(user, token)
+ )
+ assert response.status_code == status.HTTP_400_BAD_REQUEST
+ assert response.json()["webhook"] == "Webhook not found."
+
+ # check backend info
+ data = {"webhook": webhook.public_primary_key}
+ response = client.post(
+ url, data=json.dumps(data), content_type="application/json", **make_user_auth_headers(user, token)
+ )
+ assert response.status_code == status.HTTP_200_OK
+ user.refresh_from_db()
+ assert user.personal_webhook.webhook == webhook
+ assert user.personal_webhook.context_data == {}
+
+ # update context data
+ data = {"webhook": webhook.public_primary_key, "context": {"test": "test"}}
+ response = client.post(
+ url, data=json.dumps(data), content_type="application/json", **make_user_auth_headers(user, token)
+ )
+ assert response.status_code == status.HTTP_200_OK
+ user.refresh_from_db()
+ assert user.personal_webhook.context_data == {"test": "test"}
+
+ # invalid context
+ data = {"webhook": webhook.public_primary_key, "context": "not-json"}
+ response = client.post(
+ url, data=json.dumps(data), content_type="application/json", **make_user_auth_headers(user, token)
+ )
+ assert response.status_code == status.HTTP_400_BAD_REQUEST
+ assert response.json()["context"] == "Invalid context."
+ user.refresh_from_db()
+ assert user.personal_webhook.context_data == {"test": "test"}
diff --git a/engine/apps/api/views/features.py b/engine/apps/api/views/features.py
index 597fd92cb3..b134e03327 100644
--- a/engine/apps/api/views/features.py
+++ b/engine/apps/api/views/features.py
@@ -27,6 +27,7 @@ class Feature(enum.StrEnum):
LABELS = "labels"
GOOGLE_OAUTH2 = "google_oauth2"
SERVICE_DEPENDENCIES = "service_dependencies"
+ PERSONAL_WEBHOOK = "personal_webhook"
class FeaturesAPIView(APIView):
@@ -76,4 +77,7 @@ def _get_enabled_features(self, request):
if settings.FEATURE_SERVICE_DEPENDENCIES_ENABLED:
enabled_features.append(Feature.SERVICE_DEPENDENCIES)
+ if settings.FEATURE_PERSONAL_WEBHOOK_ENABLED:
+ enabled_features.append(Feature.PERSONAL_WEBHOOK)
+
return enabled_features
diff --git a/engine/apps/api/views/webhooks.py b/engine/apps/api/views/webhooks.py
index d249ef87e6..fcf8464ca3 100644
--- a/engine/apps/api/views/webhooks.py
+++ b/engine/apps/api/views/webhooks.py
@@ -19,7 +19,7 @@
from apps.api.views.labels import schedule_update_label_cache
from apps.auth_token.auth import PluginAuthentication
from apps.labels.utils import is_labels_feature_enabled
-from apps.webhooks.models import Webhook, WebhookResponse
+from apps.webhooks.models import PersonalNotificationWebhook, Webhook, WebhookResponse
from apps.webhooks.presets.preset_options import WebhookPresetOptions
from apps.webhooks.tasks import execute_webhook
from apps.webhooks.utils import apply_jinja_template_for_json
@@ -89,6 +89,8 @@ class WebhooksView(TeamFilteringMixin, PublicPrimaryKeyMixin[Webhook], ModelView
"preview_template": [RBACPermission.Permissions.OUTGOING_WEBHOOKS_WRITE],
"preset_options": [RBACPermission.Permissions.OUTGOING_WEBHOOKS_READ],
"trigger_manual": [RBACPermission.Permissions.OUTGOING_WEBHOOKS_READ],
+ "current_personal_notification": [RBACPermission.Permissions.USER_SETTINGS_READ],
+ "set_personal_notification": [RBACPermission.Permissions.USER_SETTINGS_WRITE],
}
model = Webhook
@@ -336,3 +338,80 @@ def trigger_manual(self, request, pk):
(webhook.pk, alert_group.pk, user.pk, None), kwargs={"trigger_type": Webhook.TRIGGER_MANUAL}
)
return Response(status=status.HTTP_200_OK)
+
+ @extend_schema(
+ responses={
+ status.HTTP_200_OK: inline_serializer(
+ name="PersonalNotificationWebhook",
+ fields={
+ "webhook": serializers.CharField(),
+ "context": serializers.DictField(required=False, allow_null=True),
+ },
+ )
+ },
+ )
+ @action(methods=["get"], detail=False)
+ def current_personal_notification(self, request):
+ user = self.request.user
+ notification_channel = {
+ "webhook": None,
+ "context": None,
+ }
+ try:
+ personal_webhook = PersonalNotificationWebhook.objects.get(user=user)
+ except PersonalNotificationWebhook.DoesNotExist:
+ personal_webhook = None
+
+ if personal_webhook is not None:
+ notification_channel["webhook"] = personal_webhook.webhook.public_primary_key
+ notification_channel["context"] = personal_webhook.context_data
+
+ return Response(notification_channel)
+
+ @extend_schema(
+ request=inline_serializer(
+ name="PersonalNotificationWebhookRequest",
+ fields={
+ "webhook": serializers.CharField(),
+ "context": serializers.DictField(required=False, allow_null=True),
+ },
+ ),
+ responses={status.HTTP_200_OK: None},
+ )
+ @action(methods=["post"], detail=False)
+ def set_personal_notification(self, request):
+ """Set up a webhook as personal notification channel for the user."""
+ user = self.request.user
+
+ webhook_id = request.data.get("webhook")
+ if not webhook_id:
+ raise BadRequest(detail={"webhook": "This field is required."})
+
+ try:
+ webhook = Webhook.objects.get(
+ organization=user.organization,
+ public_primary_key=webhook_id,
+ trigger_type=Webhook.TRIGGER_PERSONAL_NOTIFICATION,
+ )
+ except Webhook.DoesNotExist:
+ raise BadRequest(detail={"webhook": "Webhook not found."})
+
+ context = request.data.get("context", None)
+ if context is not None:
+ if not isinstance(context, dict):
+ raise BadRequest(detail={"context": "Invalid context."})
+
+ try:
+ context = json.dumps(context)
+ except TypeError:
+ raise BadRequest(detail={"context": "Invalid context."})
+
+ # set or update personal webhook for user
+ PersonalNotificationWebhook.objects.update_or_create(
+ user=user,
+ defaults={
+ "webhook": webhook,
+ "additional_context_data": context,
+ },
+ )
+ return Response(status=status.HTTP_200_OK)
diff --git a/engine/apps/webhooks/backend.py b/engine/apps/webhooks/backend.py
new file mode 100644
index 0000000000..4081279e67
--- /dev/null
+++ b/engine/apps/webhooks/backend.py
@@ -0,0 +1,41 @@
+import typing
+
+from django.core.exceptions import ObjectDoesNotExist
+
+from apps.base.messaging import BaseMessagingBackend
+
+if typing.TYPE_CHECKING:
+ from apps.alerts.models import AlertGroup
+ from apps.base.models import UserNotificationPolicy
+ from apps.user_management.models import User
+
+
+class PersonalWebhookBackend(BaseMessagingBackend):
+ backend_id = "WEBHOOK"
+ label = "Webhook"
+ short_label = "Webhook"
+ available_for_use = True
+
+ def serialize_user(self, user: "User"):
+ try:
+ personal_webhook = user.personal_webhook
+ except ObjectDoesNotExist:
+ return None
+ return {"id": personal_webhook.webhook.public_primary_key, "name": personal_webhook.webhook.name}
+
+ def unlink_user(self, user):
+ try:
+ user.personal_webhook.delete()
+ except ObjectDoesNotExist:
+ pass
+
+ def notify_user(
+ self, user: "User", alert_group: "AlertGroup", notification_policy: typing.Optional["UserNotificationPolicy"]
+ ):
+ from apps.webhooks.tasks import notify_user_async
+
+ notify_user_async.delay(
+ user_pk=user.pk,
+ alert_group_pk=alert_group.pk,
+ notification_policy_pk=notification_policy.pk if notification_policy else None,
+ )
diff --git a/engine/apps/webhooks/migrations/0018_alter_webhook_trigger_type_and_more.py b/engine/apps/webhooks/migrations/0018_alter_webhook_trigger_type_and_more.py
new file mode 100644
index 0000000000..9a9b80915d
--- /dev/null
+++ b/engine/apps/webhooks/migrations/0018_alter_webhook_trigger_type_and_more.py
@@ -0,0 +1,35 @@
+# Generated by Django 4.2.16 on 2025-01-27 18:46
+
+from django.db import migrations, models
+import django.db.models.deletion
+import mirage.fields
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('user_management', '0029_remove_organization_general_log_channel_id_db'),
+ ('webhooks', '0017_alter_webhook_trigger_type_and_more'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='webhook',
+ name='trigger_type',
+ field=models.IntegerField(choices=[(0, 'Manual or escalation step'), (1, 'Alert Group Created'), (2, 'Acknowledged'), (3, 'Resolved'), (4, 'Silenced'), (5, 'Unsilenced'), (6, 'Unresolved'), (7, 'Unacknowledged'), (8, 'Status change'), (9, 'Personal notification')], default=0, null=True),
+ ),
+ migrations.AlterField(
+ model_name='webhookresponse',
+ name='trigger_type',
+ field=models.IntegerField(choices=[(0, 'Manual or escalation step'), (1, 'Alert Group Created'), (2, 'Acknowledged'), (3, 'Resolved'), (4, 'Silenced'), (5, 'Unsilenced'), (6, 'Unresolved'), (7, 'Unacknowledged'), (8, 'Status change'), (9, 'Personal notification')]),
+ ),
+ migrations.CreateModel(
+ name='PersonalNotificationWebhook',
+ fields=[
+ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('additional_context_data', mirage.fields.EncryptedTextField(null=True)),
+ ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='personal_webhook', to='user_management.user')),
+ ('webhook', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='personal_channels', to='webhooks.webhook')),
+ ],
+ ),
+ ]
diff --git a/engine/apps/webhooks/models/__init__.py b/engine/apps/webhooks/models/__init__.py
index d276a7ccc2..76bd7c8d86 100644
--- a/engine/apps/webhooks/models/__init__.py
+++ b/engine/apps/webhooks/models/__init__.py
@@ -1 +1 @@
-from .webhook import Webhook, WebhookResponse # noqa: F401
+from .webhook import PersonalNotificationWebhook, Webhook, WebhookResponse # noqa: F401
diff --git a/engine/apps/webhooks/models/webhook.py b/engine/apps/webhooks/models/webhook.py
index 9d65a5d774..cb219257f0 100644
--- a/engine/apps/webhooks/models/webhook.py
+++ b/engine/apps/webhooks/models/webhook.py
@@ -88,7 +88,8 @@ class Webhook(models.Model):
TRIGGER_UNRESOLVE,
TRIGGER_UNACKNOWLEDGE,
TRIGGER_STATUS_CHANGE,
- ) = range(9)
+ TRIGGER_PERSONAL_NOTIFICATION,
+ ) = range(10)
# Must be the same order as previous
TRIGGER_TYPES = (
@@ -101,6 +102,7 @@ class Webhook(models.Model):
(TRIGGER_UNRESOLVE, "Unresolved"),
(TRIGGER_UNACKNOWLEDGE, "Unacknowledged"),
(TRIGGER_STATUS_CHANGE, "Status change"),
+ (TRIGGER_PERSONAL_NOTIFICATION, "Personal notification"),
)
ALL_TRIGGER_TYPES = [i[0] for i in TRIGGER_TYPES]
@@ -123,6 +125,7 @@ class Webhook(models.Model):
TRIGGER_UNRESOLVE: "unresolve",
TRIGGER_UNACKNOWLEDGE: "unacknowledge",
TRIGGER_STATUS_CHANGE: "status change",
+ TRIGGER_PERSONAL_NOTIFICATION: "personal notification",
}
PUBLIC_ALL_TRIGGER_TYPES = [i for i in PUBLIC_TRIGGER_TYPES_MAP.values()]
@@ -363,3 +366,27 @@ def webhook_response_post_save(sender, instance, created, *args, **kwargs):
source_alert_receive_channel = instance.webhook.get_source_alert_receive_channel()
if source_alert_receive_channel and hasattr(source_alert_receive_channel.config, "on_webhook_response_created"):
source_alert_receive_channel.config.on_webhook_response_created(instance, source_alert_receive_channel)
+
+
+class PersonalNotificationWebhook(models.Model):
+ user = models.OneToOneField(
+ "user_management.User",
+ on_delete=models.CASCADE,
+ related_name="personal_webhook",
+ )
+ webhook = models.ForeignKey(
+ "webhooks.Webhook",
+ on_delete=models.CASCADE,
+ related_name="personal_channels",
+ )
+ # only visible to owner
+ additional_context_data = mirage_fields.EncryptedTextField(null=True)
+
+ @property
+ def context_data(self):
+ return json.loads(self.additional_context_data) if self.additional_context_data else {}
+
+ @context_data.setter
+ def context_data(self, value):
+ self.additional_context_data = json.dumps(value) if value else None
+ self.save(update_fields=["additional_context_data"])
diff --git a/engine/apps/webhooks/tasks/__init__.py b/engine/apps/webhooks/tasks/__init__.py
index d35d009529..ad33b0bd79 100644
--- a/engine/apps/webhooks/tasks/__init__.py
+++ b/engine/apps/webhooks/tasks/__init__.py
@@ -1,2 +1,3 @@
from .alert_group_status import alert_group_created, alert_group_status_change # noqa: F401
+from .notify_user import notify_user_async # noqa: F401
from .trigger_webhook import execute_webhook, send_webhook_event # noqa: F401
diff --git a/engine/apps/webhooks/tasks/notify_user.py b/engine/apps/webhooks/tasks/notify_user.py
new file mode 100644
index 0000000000..71ace43c89
--- /dev/null
+++ b/engine/apps/webhooks/tasks/notify_user.py
@@ -0,0 +1,67 @@
+from celery.utils.log import get_task_logger
+from django.conf import settings
+from django.core.exceptions import ObjectDoesNotExist
+
+from apps.alerts.models import AlertGroup
+from apps.user_management.models import User
+from apps.webhooks.models import Webhook
+from common.custom_celery_tasks import shared_dedicated_queue_retry_task
+
+MAX_RETRIES = 1 if settings.DEBUG else 10
+logger = get_task_logger(__name__)
+
+
+@shared_dedicated_queue_retry_task(autoretry_for=(Exception,), retry_backoff=True, max_retries=MAX_RETRIES)
+def notify_user_async(user_pk, alert_group_pk, notification_policy_pk):
+ # imported here to avoid circular import error
+ from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord
+ from apps.webhooks.tasks import execute_webhook
+
+ try:
+ user = User.objects.get(pk=user_pk)
+ except User.DoesNotExist:
+ logger.warning(f"User {user_pk} does not exist")
+ return
+
+ try:
+ alert_group = AlertGroup.objects.get(pk=alert_group_pk)
+ except AlertGroup.DoesNotExist:
+ logger.warning(f"Alert group {alert_group_pk} does not exist")
+ return
+
+ try:
+ notification_policy = UserNotificationPolicy.objects.get(pk=notification_policy_pk)
+ except UserNotificationPolicy.DoesNotExist:
+ logger.warning(f"User notification policy {notification_policy_pk} does not exist")
+ return
+
+ try:
+ personal_webhook = user.personal_webhook
+ except ObjectDoesNotExist:
+ logger.warning(f"Personal webhook is not set for user {user_pk}")
+ # record log notification error
+ UserNotificationPolicyLogRecord.objects.create(
+ author=user,
+ type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED,
+ notification_policy=notification_policy,
+ alert_group=alert_group,
+ notification_step=notification_policy.step,
+ notification_channel=notification_policy.notify_by,
+ )
+ return
+
+ # trigger webhook via task
+ execute_webhook.apply_async(
+ (personal_webhook.webhook.pk, alert_group.pk, user.pk, notification_policy.pk),
+ kwargs={"trigger_type": Webhook.TRIGGER_PERSONAL_NOTIFICATION},
+ )
+
+ # record log notification success
+ UserNotificationPolicyLogRecord.objects.create(
+ author=user,
+ type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_SUCCESS,
+ notification_policy=notification_policy,
+ alert_group=alert_group,
+ notification_step=notification_policy.step,
+ notification_channel=notification_policy.notify_by,
+ )
diff --git a/engine/apps/webhooks/tasks/trigger_webhook.py b/engine/apps/webhooks/tasks/trigger_webhook.py
index 803beb9f31..8d6b4c3895 100644
--- a/engine/apps/webhooks/tasks/trigger_webhook.py
+++ b/engine/apps/webhooks/tasks/trigger_webhook.py
@@ -46,6 +46,7 @@
Webhook.TRIGGER_MANUAL: "escalation",
Webhook.TRIGGER_UNACKNOWLEDGE: "unacknowledge",
Webhook.TRIGGER_STATUS_CHANGE: "status change",
+ Webhook.TRIGGER_PERSONAL_NOTIFICATION: "personal notification",
}
@@ -107,9 +108,14 @@ def _build_payload(
elif payload_trigger_type == Webhook.TRIGGER_SILENCE:
event["time"] = _isoformat_date(alert_group.silenced_at)
event["until"] = _isoformat_date(alert_group.silenced_until)
- elif payload_trigger_type == Webhook.TRIGGER_MANUAL:
+ elif payload_trigger_type in (Webhook.TRIGGER_MANUAL, Webhook.TRIGGER_PERSONAL_NOTIFICATION):
event["time"] = _isoformat_date(timezone.now())
+ # if this is a personal notification triggered webhook, event will include additional user data
+ if payload_trigger_type == Webhook.TRIGGER_PERSONAL_NOTIFICATION:
+ user_context_data = user.personal_webhook.context_data if user.personal_webhook else {}
+ event["user"] = user_context_data
+
# include latest response data per webhook in the event input data
# exclude past responses from webhook being executed
responses_data = {}
@@ -179,6 +185,9 @@ def make_request(
status["request_data"] = json.dumps(request_kwargs["json"])
else:
status["request_data"] = request_kwargs.get("data")
+ if webhook.trigger_type == Webhook.TRIGGER_PERSONAL_NOTIFICATION:
+ # mask data for personal webhooks
+ status["request_data"] = WEBHOOK_FIELD_PLACEHOLDER
response = webhook.make_request(status["url"], request_kwargs)
status["status_code"] = response.status_code
content_length = len(response.content)
diff --git a/engine/apps/webhooks/tests/factories.py b/engine/apps/webhooks/tests/factories.py
index 1490df204d..9c1d72bd44 100644
--- a/engine/apps/webhooks/tests/factories.py
+++ b/engine/apps/webhooks/tests/factories.py
@@ -1,7 +1,7 @@
import factory
import pytz
-from apps.webhooks.models import Webhook, WebhookResponse
+from apps.webhooks.models import PersonalNotificationWebhook, Webhook, WebhookResponse
from common.utils import UniqueFaker
@@ -13,6 +13,11 @@ class Meta:
model = Webhook
+class PersonalNotificationWebhookFactory(factory.DjangoModelFactory):
+ class Meta:
+ model = PersonalNotificationWebhook
+
+
class WebhookResponseFactory(factory.DjangoModelFactory):
timestamp = factory.Faker("date_time", tzinfo=pytz.UTC)
diff --git a/engine/apps/webhooks/tests/test_backend.py b/engine/apps/webhooks/tests/test_backend.py
new file mode 100644
index 0000000000..46e6574b25
--- /dev/null
+++ b/engine/apps/webhooks/tests/test_backend.py
@@ -0,0 +1,97 @@
+from unittest.mock import patch
+
+import pytest
+from django.conf import settings
+from django.core.exceptions import ObjectDoesNotExist
+
+from apps.base.models import UserNotificationPolicy
+from apps.webhooks.backend import PersonalWebhookBackend
+from apps.webhooks.models import Webhook
+
+
+@pytest.mark.django_db
+def test_serialize_user(
+ make_organization, make_user_for_organization, make_custom_webhook, make_personal_notification_webhook
+):
+ organization = make_organization()
+ user = make_user_for_organization(organization)
+
+ backend = PersonalWebhookBackend()
+
+ # by default, there is no personal webhook set
+ assert backend.serialize_user(user) is None
+
+ # set personal webhook
+ webhook = make_custom_webhook(
+ organization=organization,
+ trigger_type=Webhook.TRIGGER_PERSONAL_NOTIFICATION,
+ )
+ make_personal_notification_webhook(user=user, webhook=webhook)
+
+ assert backend.serialize_user(user) == {"id": webhook.public_primary_key, "name": webhook.name}
+
+
+@pytest.mark.django_db
+def test_unlink_webhook(
+ make_organization,
+ make_user_for_organization,
+ make_custom_webhook,
+ make_personal_notification_webhook,
+):
+ organization = make_organization()
+ user = make_user_for_organization(organization)
+
+ backend = PersonalWebhookBackend()
+ # set personal webhook
+ webhook = make_custom_webhook(
+ organization=organization,
+ trigger_type=Webhook.TRIGGER_PERSONAL_NOTIFICATION,
+ )
+ make_personal_notification_webhook(user=user, webhook=webhook)
+
+ assert user.personal_webhook is not None
+
+ backend.unlink_user(user)
+ user.refresh_from_db()
+ with pytest.raises(ObjectDoesNotExist):
+ user.personal_webhook
+
+
+@pytest.mark.django_db
+def test_notify_user_triggers_task(
+ make_organization,
+ make_user_for_organization,
+ make_user_notification_policy,
+ make_alert_receive_channel,
+ make_alert_group,
+ make_custom_webhook,
+ make_personal_notification_webhook,
+):
+ organization = make_organization()
+ user = make_user_for_organization(organization)
+ alert_receive_channel = make_alert_receive_channel(organization)
+ alert_group = make_alert_group(alert_receive_channel)
+
+ backend = PersonalWebhookBackend()
+ # set personal webhook
+ webhook = make_custom_webhook(
+ organization=organization,
+ trigger_type=Webhook.TRIGGER_PERSONAL_NOTIFICATION,
+ )
+ make_personal_notification_webhook(user=user, webhook=webhook)
+
+ notification_policy = make_user_notification_policy(
+ user,
+ UserNotificationPolicy.Step.NOTIFY,
+ notify_by=settings.PERSONAL_WEBHOOK_BACKEND_ID,
+ important=False,
+ )
+
+ with patch("apps.webhooks.tasks.notify_user_async") as mock_notify_user_async:
+ backend.notify_user(user, alert_group, notification_policy)
+
+ mock_notify_user_async.delay.assert_called_once_with(
+ user_pk=user.pk,
+ alert_group_pk=alert_group.pk,
+ notification_policy_pk=notification_policy.pk,
+ )
diff --git a/engine/apps/webhooks/tests/test_notify_user.py b/engine/apps/webhooks/tests/test_notify_user.py
new file mode 100644
index 0000000000..98d93e9e54
--- /dev/null
+++ b/engine/apps/webhooks/tests/test_notify_user.py
@@ -0,0 +1,154 @@
+from unittest.mock import patch
+
+import pytest
+from django.conf import settings
+
+from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord
+from apps.webhooks.models import Webhook
+from apps.webhooks.tasks import notify_user_async
+
+
+@pytest.mark.django_db
+def test_notify_user_not_found(
+ make_organization,
+ make_user_for_organization,
+ make_alert_receive_channel,
+ make_alert_group,
+ make_user_notification_policy,
+ caplog,
+):
+ organization = make_organization()
+ user = make_user_for_organization(organization)
+ alert_receive_channel = make_alert_receive_channel(organization)
+ alert_group = make_alert_group(alert_receive_channel)
+
+ notification_policy = make_user_notification_policy(
+ user,
+ UserNotificationPolicy.Step.NOTIFY,
+ notify_by=settings.PERSONAL_WEBHOOK_BACKEND_ID,
+ important=False,
+ )
+
+ with patch("apps.webhooks.tasks.execute_webhook") as mock_execute_webhook:
+ notify_user_async(42, alert_group.pk, notification_policy.pk)
+
+ assert mock_execute_webhook.apply_async.called is False
+ assert "User 42 does not exist" in caplog.text
+
+
+@pytest.mark.django_db
+def test_notify_user_alert_group_not_found(
+ make_organization,
+ make_user_for_organization,
+ make_user_notification_policy,
+ caplog,
+):
+ organization = make_organization()
+ user = make_user_for_organization(organization)
+
+ notification_policy = make_user_notification_policy(
+ user,
+ UserNotificationPolicy.Step.NOTIFY,
+ notify_by=settings.PERSONAL_WEBHOOK_BACKEND_ID,
+ important=False,
+ )
+
+ with patch("apps.webhooks.tasks.execute_webhook") as mock_execute_webhook:
+ notify_user_async(user.pk, 42, notification_policy.pk)
+
+ assert mock_execute_webhook.apply_async.called is False
+ assert "Alert group 42 does not exist" in caplog.text
+
+
+@pytest.mark.django_db
+def test_notify_user_policy_not_found(
+ make_organization,
+ make_user_for_organization,
+ make_alert_receive_channel,
+ make_alert_group,
+ caplog,
+):
+ organization = make_organization()
+ user = make_user_for_organization(organization)
+ alert_receive_channel = make_alert_receive_channel(organization)
+ alert_group = make_alert_group(alert_receive_channel)
+
+ with patch("apps.webhooks.tasks.execute_webhook") as mock_execute_webhook:
+ notify_user_async(user.pk, alert_group.pk, 42)
+
+ assert mock_execute_webhook.apply_async.called is False
+ assert "User notification policy 42 does not exist" in caplog.text
+
+
+@pytest.mark.django_db
+def test_notify_user_personal_webhook_not_set(
+ make_organization,
+ make_user_for_organization,
+ make_alert_receive_channel,
+ make_alert_group,
+ make_user_notification_policy,
+ caplog,
+):
+ organization = make_organization()
+ user = make_user_for_organization(organization)
+ alert_receive_channel = make_alert_receive_channel(organization)
+ alert_group = make_alert_group(alert_receive_channel)
+
+ notification_policy = make_user_notification_policy(
+ user,
+ UserNotificationPolicy.Step.NOTIFY,
+ notify_by=settings.PERSONAL_WEBHOOK_BACKEND_ID,
+ important=False,
+ )
+
+ with patch("apps.webhooks.tasks.execute_webhook") as mock_execute_webhook:
+ notify_user_async(user.pk, alert_group.pk, notification_policy.pk)
+
+ assert mock_execute_webhook.apply_async.called is False
+ assert f"Personal webhook is not set for user {user.pk}" in caplog.text
+ log_record = notification_policy.personal_log_records.last()
+ assert log_record.type == UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED
+
+
+@pytest.mark.django_db
+def test_notify_user_ok(
+ make_organization,
+ make_user_for_organization,
+ make_alert_receive_channel,
+ make_alert_group,
+ make_user_notification_policy,
+ make_custom_webhook,
+ make_personal_notification_webhook,
+):
+ organization = make_organization()
+ user = make_user_for_organization(organization)
+ alert_receive_channel = make_alert_receive_channel(organization)
+ alert_group = make_alert_group(alert_receive_channel)
+
+ # set personal webhook
+ webhook = make_custom_webhook(
+ organization=organization,
+ trigger_type=Webhook.TRIGGER_PERSONAL_NOTIFICATION,
+ )
+ make_personal_notification_webhook(user=user, webhook=webhook)
+
+ notification_policy = make_user_notification_policy(
+ user,
+ UserNotificationPolicy.Step.NOTIFY,
+ notify_by=settings.PERSONAL_WEBHOOK_BACKEND_ID,
+ important=False,
+ )
+
+ with patch("apps.webhooks.tasks.execute_webhook") as mock_execute_webhook:
+ notify_user_async(user.pk, alert_group.pk, notification_policy.pk)
+
+ mock_execute_webhook.apply_async.assert_called_once_with(
+ (user.personal_webhook.webhook.pk, alert_group.pk, user.pk, notification_policy.pk),
+ kwargs={"trigger_type": Webhook.TRIGGER_PERSONAL_NOTIFICATION},
+ )
+
+ log_record = notification_policy.personal_log_records.last()
+ assert log_record.type == UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_SUCCESS
+
+
+# tests: user does not exist, ag does not exist, policy does not exist; no webhook; webhook triggered
diff --git a/engine/apps/webhooks/tests/test_trigger_webhook.py b/engine/apps/webhooks/tests/test_trigger_webhook.py
index 152020f6fa..9532375ddc 100644
--- a/engine/apps/webhooks/tests/test_trigger_webhook.py
+++ b/engine/apps/webhooks/tests/test_trigger_webhook.py
@@ -11,7 +11,7 @@
from apps.base.models import UserNotificationPolicyLogRecord
from apps.public_api.serializers import AlertGroupSerializer
from apps.webhooks.models import Webhook
-from apps.webhooks.models.webhook import WebhookSession
+from apps.webhooks.models.webhook import WEBHOOK_FIELD_PLACEHOLDER, WebhookSession
from apps.webhooks.tasks import execute_webhook, send_webhook_event
from apps.webhooks.tasks.trigger_webhook import NOT_FROM_SELECTED_INTEGRATION
from settings.base import WEBHOOK_RESPONSE_LIMIT
@@ -949,3 +949,64 @@ def test_execute_webhook_integration_config(
# check on_webhook_response_created is called
mock_on_webhook_response_created.assert_called_once_with(webhook.responses.all()[0], source_alert_receive_channel)
+
+
+@pytest.mark.django_db
+def test_execute_webhook_via_personal_notification(
+ make_organization,
+ make_user_for_organization,
+ make_alert_receive_channel,
+ make_alert_group,
+ make_custom_webhook,
+ make_personal_notification_webhook,
+):
+ organization = make_organization()
+ user = make_user_for_organization(organization)
+ alert_receive_channel = make_alert_receive_channel(organization)
+ alert_group = make_alert_group(alert_receive_channel)
+ webhook = make_custom_webhook(
+ organization=organization,
+ url="https://something/{{ alert_group_id }}/",
+ http_method="POST",
+ trigger_type=Webhook.TRIGGER_PERSONAL_NOTIFICATION,
+ data='{"id": "{{ event.user.id }}"}',
+ forward_all=False,
+ )
+ # setup personal webhook configuration
+ user_data = {"id": "some-specific-user-id"}
+ make_personal_notification_webhook(user=user, webhook=webhook, additional_context_data=json.dumps(user_data))
+
+ mock_response = MockResponse()
+ with patch("apps.webhooks.utils.socket.gethostbyname") as mock_gethostbyname:
+ mock_gethostbyname.return_value = "8.8.8.8"
+ with patch("apps.webhooks.models.webhook.WebhookSession.request", return_value=mock_response) as mock_request:
+ execute_webhook(webhook.pk, alert_group.pk, user.pk, None)
+
+ assert mock_request.called
+ expected_call = call(
+ "POST",
+ f"https://something/{alert_group.public_primary_key}/",
+ timeout=TIMEOUT,
+ headers={},
+ # user data is available in the context
+ json={"id": user_data["id"]},
+ )
+ assert mock_request.call_args == expected_call
+ response = webhook.responses.all()[0]
+ # check log record
+ log_record = alert_group.log_records.last()
+ assert log_record.type == AlertGroupLogRecord.TYPE_CUSTOM_WEBHOOK_TRIGGERED
+ expected_info = {
+ "trigger": "personal notification",
+ "webhook_id": webhook.public_primary_key,
+ "webhook_name": webhook.name,
+ "response_id": response.id,
+ }
+ assert log_record.step_specific_info == expected_info
+ assert log_record.escalation_policy is None
+ assert log_record.escalation_policy_step is None
+ assert (
+ log_record.rendered_log_line_action() == f"outgoing webhook `{webhook.name}` triggered by personal notification"
+ )
+ # check response masked the data (which may contain user personal data)
+ response.request_data = WEBHOOK_FIELD_PLACEHOLDER
diff --git a/engine/apps/webhooks/tests/test_webhook.py b/engine/apps/webhooks/tests/test_webhook.py
index 389c0702b8..57397f4511 100644
--- a/engine/apps/webhooks/tests/test_webhook.py
+++ b/engine/apps/webhooks/tests/test_webhook.py
@@ -389,3 +389,29 @@ def test_get_source_alert_receive_channel(make_organization, make_alert_receive_
assert w1.get_source_alert_receive_channel() == channel2
assert w2.get_source_alert_receive_channel() == channel1
+
+
+@pytest.mark.django_db
+def test_personal_notification_webhook(
+ make_organization, make_user_for_organization, make_custom_webhook, make_personal_notification_webhook
+):
+ organization = make_organization()
+ user = make_user_for_organization(organization=organization)
+ webhook = make_custom_webhook(organization=organization, trigger_type=Webhook.TRIGGER_PERSONAL_NOTIFICATION)
+
+ personal_webhook = make_personal_notification_webhook(user=user, webhook=webhook)
+
+ assert personal_webhook.user == user
+ assert personal_webhook.webhook == webhook
+
+ # default context data
+ assert personal_webhook.context_data == {}
+
+ # set context data
+ personal_webhook.context_data = {"foo": "bar"}
+ personal_webhook.refresh_from_db()
+ assert personal_webhook.context_data == {"foo": "bar"}
+
+ # set empty
+ personal_webhook.context_data = None
+ assert personal_webhook.context_data == {}
diff --git a/engine/conftest.py b/engine/conftest.py
index c7e7475cf4..e3440ba0a2 100644
--- a/engine/conftest.py
+++ b/engine/conftest.py
@@ -113,7 +113,11 @@
UserFactory,
)
from apps.webhooks.presets.preset_options import WebhookPresetOptions
-from apps.webhooks.tests.factories import CustomWebhookFactory, WebhookResponseFactory
+from apps.webhooks.tests.factories import (
+ CustomWebhookFactory,
+ PersonalNotificationWebhookFactory,
+ WebhookResponseFactory,
+)
from apps.webhooks.tests.test_webhook_presets import (
ADVANCED_WEBHOOK_PRESET_ID,
TEST_WEBHOOK_PRESET_ID,
@@ -794,6 +798,15 @@ def _make_custom_webhook(organization, **kwargs):
return _make_custom_webhook
+@pytest.fixture
+def make_personal_notification_webhook():
+ def _make_personal_notification_webhook(user, webhook, **kwargs):
+ personal_webhook = PersonalNotificationWebhookFactory(user=user, webhook=webhook, **kwargs)
+ return personal_webhook
+
+ return _make_personal_notification_webhook
+
+
@pytest.fixture
def make_webhook_response():
def _make_webhook_response(**kwargs):
diff --git a/engine/settings/base.py b/engine/settings/base.py
index affb27fe8f..fe8e8189ef 100644
--- a/engine/settings/base.py
+++ b/engine/settings/base.py
@@ -873,6 +873,11 @@ class BrokerTypes:
INBOUND_EMAIL_AWS_SECRET_ACCESS_KEY = os.getenv("INBOUND_EMAIL_AWS_SECRET_ACCESS_KEY")
INBOUND_EMAIL_AWS_REGION = os.getenv("INBOUND_EMAIL_AWS_REGION")
+PERSONAL_WEBHOOK_BACKEND_ID = 11
+FEATURE_PERSONAL_WEBHOOK_ENABLED = getenv_boolean("FEATURE_PERSONAL_WEBHOOK_ENABLED", default=True)
+if FEATURE_PERSONAL_WEBHOOK_ENABLED:
+ EXTRA_MESSAGING_BACKENDS += [("apps.webhooks.backend.PersonalWebhookBackend", PERSONAL_WEBHOOK_BACKEND_ID)]
+
INSTALLED_ONCALL_INTEGRATIONS = [
# Featured
"config_integrations.grafana_alerting",
diff --git a/engine/settings/celery_task_routes.py b/engine/settings/celery_task_routes.py
index de222f0aec..e3b9fdcf64 100644
--- a/engine/settings/celery_task_routes.py
+++ b/engine/settings/celery_task_routes.py
@@ -188,4 +188,5 @@
"apps.webhooks.tasks.trigger_webhook.send_webhook_event": {"queue": "webhook"},
"apps.webhooks.tasks.alert_group_status.alert_group_created": {"queue": "webhook"},
"apps.webhooks.tasks.alert_group_status.alert_group_status_change": {"queue": "webhook"},
+ "apps.webhooks.tasks.notify_user.notify_user_async": {"queue": "webhook"},
}
diff --git a/grafana-plugin/src/containers/UserSettings/UserSettings.tsx b/grafana-plugin/src/containers/UserSettings/UserSettings.tsx
index 0e77055972..c56cab83a3 100644
--- a/grafana-plugin/src/containers/UserSettings/UserSettings.tsx
+++ b/grafana-plugin/src/containers/UserSettings/UserSettings.tsx
@@ -106,6 +106,7 @@ export const UserSettings = observer(({ id, onHide, tab = UserSettingsTab.UserIn
showNotificationSettingsTab,
showSlackConnectionTab,
showTelegramConnectionTab,
+ showPersonalWebhookConnectionTab,
showMobileAppConnectionTab,
showMsTeamsConnectionTab,
showGoogleCalendarTab,
@@ -113,6 +114,7 @@ export const UserSettings = observer(({ id, onHide, tab = UserSettingsTab.UserIn
!isDesktopOrLaptop,
isCurrent && organizationStore.currentOrganization?.slack_team_identity && !storeUser.slack_user_identity,
isCurrent && store.hasFeature(AppFeature.Telegram) && !storeUser.telegram_configuration,
+ isCurrent && store.hasFeature(AppFeature.PersonalWebhook),
isCurrent,
store.hasFeature(AppFeature.MsTeams) && !storeUser.messaging_backends.MSTEAMS,
isCurrent && store.hasFeature(AppFeature.GoogleOauth2),
@@ -141,6 +143,7 @@ export const UserSettings = observer(({ id, onHide, tab = UserSettingsTab.UserIn
showNotificationSettingsTab={showNotificationSettingsTab}
showSlackConnectionTab={showSlackConnectionTab}
showTelegramConnectionTab={showTelegramConnectionTab}
+ showPersonalWebhookConnectionTab={showPersonalWebhookConnectionTab}
showMobileAppConnectionTab={showMobileAppConnectionTab}
showMsTeamsConnectionTab={showMsTeamsConnectionTab}
showGoogleCalendarTab={showGoogleCalendarTab}
diff --git a/grafana-plugin/src/containers/UserSettings/UserSettings.types.ts b/grafana-plugin/src/containers/UserSettings/UserSettings.types.ts
index 6dfd1d9a5f..52095b4f81 100644
--- a/grafana-plugin/src/containers/UserSettings/UserSettings.types.ts
+++ b/grafana-plugin/src/containers/UserSettings/UserSettings.types.ts
@@ -5,6 +5,7 @@ export enum UserSettingsTab {
PhoneVerification,
SlackInfo,
TelegramInfo,
+ PersonalWebhookInfo,
MSTeamsInfo,
MobileAppConnection,
}
diff --git a/grafana-plugin/src/containers/UserSettings/parts/UserSettingsParts.tsx b/grafana-plugin/src/containers/UserSettings/parts/UserSettingsParts.tsx
index 04ec6219d4..29fbafd0b1 100644
--- a/grafana-plugin/src/containers/UserSettings/parts/UserSettingsParts.tsx
+++ b/grafana-plugin/src/containers/UserSettings/parts/UserSettingsParts.tsx
@@ -14,6 +14,7 @@ import { CloudPhoneSettings } from 'containers/UserSettings/parts/tabs/CloudPhon
import { GoogleCalendar } from 'containers/UserSettings/parts/tabs/GoogleCalendar/GoogleCalendar';
import { MSTeamsInfo } from 'containers/UserSettings/parts/tabs/MSTeamsInfo/MSTeamsInfo';
import { NotificationSettingsTab } from 'containers/UserSettings/parts/tabs/NotificationSettingsTab';
+import { PersonalWebhookInfo } from 'containers/UserSettings/parts/tabs/PersonalWebhookInfo/PersonalWebhookInfo';
import { PhoneVerification } from 'containers/UserSettings/parts/tabs/PhoneVerification/PhoneVerification';
import { TelegramInfo } from 'containers/UserSettings/parts/tabs/TelegramInfo/TelegramInfo';
import { UserInfoTab } from 'containers/UserSettings/parts/tabs/UserInfoTab/UserInfoTab';
@@ -29,6 +30,7 @@ interface TabsProps {
showGoogleCalendarTab: boolean;
showSlackConnectionTab: boolean;
showTelegramConnectionTab: boolean;
+ showPersonalWebhookConnectionTab: boolean;
showMsTeamsConnectionTab: boolean;
}
@@ -40,6 +42,7 @@ export const Tabs = ({
showMobileAppConnectionTab,
showSlackConnectionTab,
showTelegramConnectionTab,
+ showPersonalWebhookConnectionTab,
showMsTeamsConnectionTab,
}: TabsProps) => {
const getTabClickHandler = useCallback(
@@ -51,13 +54,11 @@ export const Tabs = ({
[onTabChange]
);
- const styles = useStyles2(getUserSettingsPartsStyles);
-
return (
)}
+ {showPersonalWebhookConnectionTab && (
+
+ )}
{showMsTeamsConnectionTab && (
}
{activeTab === UserSettingsTab.TelegramInfo && }
+ {activeTab === UserSettingsTab.PersonalWebhookInfo && }
{activeTab === UserSettingsTab.MSTeamsInfo && }
);
diff --git a/grafana-plugin/src/containers/UserSettings/parts/connectors/Connectors.tsx b/grafana-plugin/src/containers/UserSettings/parts/connectors/Connectors.tsx
index 570c58dc0c..de5a44a360 100644
--- a/grafana-plugin/src/containers/UserSettings/parts/connectors/Connectors.tsx
+++ b/grafana-plugin/src/containers/UserSettings/parts/connectors/Connectors.tsx
@@ -11,6 +11,7 @@ import { useStore } from 'state/useStore';
import { ICalConnector } from './ICalConnector';
import { MSTeamsConnector } from './MSTeamsConnector';
import { MobileAppConnector } from './MobileAppConnector';
+import { PersonalWebhookConnector } from './PersonalWebhookConnector';
import { PhoneConnector } from './PhoneConnector';
import { SlackConnector } from './SlackConnector';
import { TelegramConnector } from './TelegramConnector';
@@ -28,6 +29,7 @@ export const Connectors: FC = observer((props) => {
{store.hasFeature(AppFeature.Telegram) && }
+ {store.hasFeature(AppFeature.PersonalWebhook) && }
{store.hasFeature(AppFeature.MsTeams) && }
diff --git a/grafana-plugin/src/containers/UserSettings/parts/connectors/PersonalWebhookConnector.tsx b/grafana-plugin/src/containers/UserSettings/parts/connectors/PersonalWebhookConnector.tsx
new file mode 100644
index 0000000000..788e439537
--- /dev/null
+++ b/grafana-plugin/src/containers/UserSettings/parts/connectors/PersonalWebhookConnector.tsx
@@ -0,0 +1,60 @@
+import React, { useCallback } from 'react';
+
+import { Button, InlineField, Input, Stack } from '@grafana/ui';
+import { StackSize } from 'helpers/consts';
+import { observer } from 'mobx-react';
+
+import { WithConfirm } from 'components/WithConfirm/WithConfirm';
+import { UserSettingsTab } from 'containers/UserSettings/UserSettings.types';
+import { ApiSchemas } from 'network/oncall-api/api.types';
+import { useStore } from 'state/useStore';
+
+interface PersonalWebhookConnectorProps {
+ id: ApiSchemas['User']['pk'];
+ onTabChange: (tab: UserSettingsTab) => void;
+}
+
+export const PersonalWebhookConnector = observer((props: PersonalWebhookConnectorProps) => {
+ const { id, onTabChange } = props;
+
+ const store = useStore();
+ const { userStore } = store;
+
+ const storeUser = userStore.items[id];
+ const isCurrentUser = id === store.userStore.currentUserPk;
+
+ const handleConnectButtonClick = useCallback(() => {
+ onTabChange(UserSettingsTab.PersonalWebhookInfo);
+ }, []);
+
+ const handleUnlinkPersonalWebhook = useCallback(() => {
+ userStore.removePersonalWebhook();
+ }, []);
+
+ return (
+
+ {storeUser.messaging_backends.WEBHOOK ? (
+
+
+
+
+
+
+
+
+ ) : (
+
+
+
+
+
+ )}
+
+ );
+});
diff --git a/grafana-plugin/src/containers/UserSettings/parts/tabs/PersonalWebhookInfo/PersonalWebhookInfo.tsx b/grafana-plugin/src/containers/UserSettings/parts/tabs/PersonalWebhookInfo/PersonalWebhookInfo.tsx
new file mode 100644
index 0000000000..8258078951
--- /dev/null
+++ b/grafana-plugin/src/containers/UserSettings/parts/tabs/PersonalWebhookInfo/PersonalWebhookInfo.tsx
@@ -0,0 +1,245 @@
+import React, { useEffect, useMemo, useState } from 'react';
+
+import { css } from '@emotion/css';
+import { LoadingPlaceholder, Button, Select, Stack, Field, TextArea, Icon } from '@grafana/ui';
+import { PLUGIN_ROOT, StackSize } from 'helpers/consts';
+import { observer } from 'mobx-react';
+import { Controller, useForm } from 'react-hook-form';
+
+import { Text } from 'components/Text/Text';
+import { WithConfirm } from 'components/WithConfirm/WithConfirm';
+import { ActionKey } from 'models/loader/action-keys';
+import { WebhookTriggerType } from 'models/outgoing_webhook/outgoing_webhook.types'
+import { useStore } from 'state/useStore';
+
+function useWebhooksOptions(): [boolean, Array<{ label: string, value: string }>] {
+ const { outgoingWebhookStore, loaderStore } = useStore();
+ const [isLoading, setIsLoading] = useState(true)
+ const isLoadingWebhooks = loaderStore.isLoading(ActionKey.FETCH_WEBHOOKS)
+ const [hasRegisteredLoadingWebhooks, setHasRegisteredLoadingWebhooks] = useState(false);
+
+ const webhookOptions = useMemo(() =>
+ Object.values(outgoingWebhookStore.items).map((item) => ({
+ label: item.name,
+ value: item.id,
+ })), [outgoingWebhookStore.items]);
+
+
+ useEffect(() => {
+ if (isLoadingWebhooks) {
+ setHasRegisteredLoadingWebhooks(true);
+ }
+ }, [isLoadingWebhooks]);
+
+ useEffect(() => {
+ if (!isLoadingWebhooks && hasRegisteredLoadingWebhooks) {
+ setIsLoading(false);
+ }
+ }, [isLoadingWebhooks])
+
+ return [isLoading, webhookOptions];
+}
+
+const contextRules = {
+ validate(value: string) {
+ let context: object;
+ try {
+ context = JSON.parse(value);
+ } catch (_) {
+ return 'Invalid JSON';
+ }
+ if (typeof context !== 'object' || context === null || Array.isArray(context)) {
+ return 'JSON must be an object';
+ }
+ return true;
+ }
+}
+
+interface FormFields {
+ webhook: string;
+ context: string;
+}
+
+const defaultValues = { webhook: null, context: '{}' }
+
+export const PersonalWebhookInfo = observer(() => {
+ const { userStore, outgoingWebhookStore } = useStore();
+
+ const {
+ formState: {
+ isDirty,
+ isValid,
+ errors,
+ },
+ watch,
+ control,
+ getValues,
+ handleSubmit,
+ reset,
+ } = useForm({
+ mode: 'onChange',
+ defaultValues
+ });
+
+ const user = userStore.items[userStore.currentUserPk];
+ const selectedWebhook = watch('webhook');
+ const [isLoadingOptions, webhookOptions] = useWebhooksOptions()
+
+ const hasConnectedWebhook = user.messaging_backends?.WEBHOOK != null;
+ const hasSelectedValidWebhook = webhookOptions.some(option => option.value === selectedWebhook)
+
+ useEffect(() => {
+ (async () => {
+ await userStore.updatePersonalWebhook();
+ await outgoingWebhookStore.updateItems(
+ {
+ trigger_type: WebhookTriggerType.PersonalNotification.key,
+ },
+ true
+ );
+ })();
+ }, []);
+
+ useEffect(() => {
+ const { webhook, context } = userStore.personalWebhook ?? {};
+ reset({
+ webhook: webhook ?? null,
+ context: context ? JSON.stringify(context, null, 2) : '{}',
+ })
+ }, [userStore.personalWebhook]);
+
+ async function onFormSubmit() {
+ const values = getValues();
+ const webhook = values.webhook === '' ? null : values.webhook;
+ const context = JSON.parse(values.context);
+ userStore.addPersonalWebook({ webhook, context });
+ }
+
+ async function handleDisconnectPersonalWebhook() {
+ await userStore.removePersonalWebhook();
+ reset(defaultValues);
+ };
+
+ if (isLoadingOptions) {
+ return (
+
+
+
+ )
+ }
+
+ return (
+
+
+ }
+ >
+
+