Clarify test example, make mor general
This commit is contained in:
parent
1b802d5852
commit
8fb5f7c057
17
README.md
17
README.md
|
@ -83,26 +83,21 @@ curl --header "Content-Type: application/json" \
|
|||
|
||||
# Local testing Setup
|
||||
|
||||
Also you might want to test the bot on your local machine
|
||||
Use a domain e.g. webbhook.hyteck.de and configure nginx as
|
||||
reverse proxy for port 4242 for this domain.
|
||||
Also you might want to test the bot on your local machine but send webhooks to a public server. To do that use a domain
|
||||
e.g. webbhook.example.com and configure nginx as reverse proxy for port 4242 for this domain.
|
||||
|
||||
## Connect
|
||||
|
||||
Run the local server and connect via (29316 is the local maubot port)
|
||||
`ssh -N -R 4242:localhost:29316 s`
|
||||
`ssh -N -R 4242:localhost:29316 webhook.example.com`
|
||||
|
||||
## Send some data with
|
||||
|
||||
Put the following in `data.json`
|
||||
```json
|
||||
{"receiver":"matrix","status":"firing","alerts":[{"status":"firing","labels":{"alertname":"InstanceDown","environment":"h2916641.stratoserver.net","instance":"localhost:9100","job":"node_exporter","severity":"critical"},"annotations":{"description":"localhost:9100 of job node_exporter has been down for more than 5 minutes.","summary":"Instance localhost:9100 down"},"startsAt":"2022-06-23T11:53:14.318Z","endsAt":"0001-01-01T00:00:00Z","generatorURL":"http://h2916641.stratoserver.net:9090/graph?g0.expr=up+%3D%3D+0\u0026g0.tab=1","fingerprint":"9cd7837114d58797"}],"groupLabels":{"alertname":"InstanceDown"},"commonLabels":{"alertname":"InstanceDown","environment":"h2916641.stratoserver.net","instance":"localhost:9100","job":"node_exporter","severity":"critical"},"commonAnnotations":{"description":"localhost:9100 of job node_exporter has been down for more than 5 minutes.","summary":"Instance localhost:9100 down"},"externalURL":"https://alert.hyteck.de","version":"4","groupKey":"{}:{alertname=\"InstanceDown\"}","truncatedAlerts":0}
|
||||
```
|
||||
and then
|
||||
Use an example from the `alert_examples/` to test your setup
|
||||
```shell
|
||||
curl --header "Content-Type: application/json" \
|
||||
--request POST \
|
||||
--data "@data.json" \
|
||||
https://webhook.hyteck.de/_matrix/maubot/plugin/maubot/webhook
|
||||
--data "@alert_examples/prometheus.json" \
|
||||
https://webhook.example.com/_matrix/maubot/plugin/maubot/webhook/!zOcbWjsWzdREnihreC:example.com
|
||||
```
|
||||
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
{"receiver": "Alertbot",
|
||||
"status": "firing",
|
||||
"alerts": [{"status": "firing",
|
||||
"labels": {"alertname": "HTTP response not OK",
|
||||
"grafana_folder": "General Alerting"},
|
||||
"annotations": {},
|
||||
"startsAt": "2022-08-24T18:45:00+02:00",
|
||||
"endsAt": "0001-01-01T00:00:00Z",
|
||||
"generatorURL": "http://0.0.0.0:3000/alerting/grafana/t7R1ZdZ4k/view",
|
||||
"fingerprint": "54827aa3e6b3dbbe",
|
||||
"silenceURL": "http://0.0.0.0:3000/alerting/silence/new?alertmanager=grafana&matcher=alertname%3DHTTP+response+not+OK&matcher=grafana_folder%3DGeneral+Alerting",
|
||||
"dashboardURL": "",
|
||||
"panelURL": "",
|
||||
"valueString": "[ var='B0' metric='Value' labels={server=http://s1.example.com} value=503 ]"}],
|
||||
"groupLabels": {},
|
||||
"commonLabels": {"alertname": "HTTP response not OK",
|
||||
"grafana_folder": "General Alerting"},
|
||||
"commonAnnotations": {},
|
||||
"externalURL": "http://0.0.0.0:3000/",
|
||||
"version": "1",
|
||||
"groupKey": "{}:{}",
|
||||
"truncatedAlerts": 0,
|
||||
"orgId": 1,
|
||||
"title": "[FIRING:1] (HTTP response not OK General Alerting)",
|
||||
"state": "alerting",
|
||||
"message": "**Firing**\n\nValue: [ var='B0' metric='Value' labels={server=http://s1.example.com} value=503 ]\nLabels:\n - alertname = HTTP response not OK\n - grafana_folder = General Alerting\nAnnotations:\nSource: http://0.0.0.0:3000/alerting/grafana/t7R1ZdZ4k/view\nSilence: http://0.0.0.0:3000/alerting/silence/new?alertmanager=grafana&matcher=alertname%3DHTTP+response+not+OK&matcher=grafana_folder%3DGeneral+Alerting\n"}
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
{"receiver": "Alertbot",
|
||||
"status": "resolved",
|
||||
"alerts": [{"status": "resolved",
|
||||
"labels": {"alertname": "HTTP response not OK",
|
||||
"grafana_folder": "General Alerting"},
|
||||
"annotations": {},
|
||||
"startsAt": "2022-08-24T18:45:00+02:00",
|
||||
"endsAt": "2022-08-24T19:29:00+02:00",
|
||||
"generatorURL": "http://0.0.0.0:3000/alerting/grafana/t7R1ZdZ4k/view",
|
||||
"fingerprint": "54827aa3e6b3dbbe",
|
||||
"silenceURL": "http://0.0.0.0:3000/alerting/silence/new?alertmanager=grafana&matcher=alertname%3DHTTP+response+not+OK&matcher=grafana_folder%3DGeneral+Alerting",
|
||||
"dashboardURL": "",
|
||||
"panelURL": "",
|
||||
"valueString": ""}],
|
||||
"groupLabels": {},
|
||||
"commonLabels": {"alertname": "HTTP response not OK",
|
||||
"grafana_folder": "General Alerting"},
|
||||
"commonAnnotations": {},
|
||||
"externalURL": "http://0.0.0.0:3000/",
|
||||
"version": "1",
|
||||
"groupKey": "{}:{}",
|
||||
"truncatedAlerts": 0,
|
||||
"orgId": 1,
|
||||
"title": "[RESOLVED] (HTTP response not OK General Alerting)",
|
||||
"state": "ok",
|
||||
"message": "**Resolved**\n\nValue: [no value]\nLabels:\n - alertname = HTTP response not OK\n - grafana_folder = General Alerting\nAnnotations:\nSource: http://0.0.0.0:3000/alerting/grafana/t7R1ZdZ4k/view\nSilence: http://0.0.0.0:3000/alerting/silence/new?alertmanager=grafana&matcher=alertname%3DHTTP+response+not+OK&matcher=grafana_folder%3DGeneral+Alerting\n"}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
{"receiver":"matrix",
|
||||
"status":"firing",
|
||||
"alerts":[
|
||||
{"status":"firing",
|
||||
"labels":
|
||||
{"alertname":"InstanceDown",
|
||||
"environment":"monitoring.example.com",
|
||||
"instance":"webserver.example.com",
|
||||
"job":"node_exporter","severity":"critical"
|
||||
},
|
||||
"annotations":{"description":"webserver.example.com of job node_exporter has been down for more than 5 minutes.",
|
||||
"summary":"Instance webserver.example.com down"},
|
||||
"startsAt":"2022-06-23T11:53:14.318Z",
|
||||
"endsAt":"0001-01-01T00:00:00Z",
|
||||
"generatorURL":"http://monitoring.example.com:9090/graph?g0.expr=up+%3D%3D+0\u0026g0.tab=1",
|
||||
"fingerprint":"9cd7837114d58797"}],
|
||||
"groupLabels":{"alertname":"InstanceDown"},
|
||||
"commonLabels":{"alertname":"InstanceDown",
|
||||
"environment":"monitoring.example.com","instance":"webserver.example.com",
|
||||
"job":"node_exporter","severity":"critical"},
|
||||
"commonAnnotations":{"description":"webserver.example.com of job node_exporter has been down for more than 5 minutes.","summary":"Instance webserver.example.com down"},"externalURL":"https://alert.example","version":"4","groupKey":"{}:{alertname=\"InstanceDown\"}","truncatedAlerts":0}
|
||||
|
Loading…
Reference in New Issue