2015-12-09 10:51:34 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2017-10-10 06:21:41 -04:00
|
|
|
# Copyright 2017 New Vector Ltd
|
2015-12-09 10:51:34 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import re
|
2020-04-16 10:52:55 -04:00
|
|
|
from typing import Pattern
|
2015-12-09 10:51:34 -05:00
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
from six import string_types
|
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
from synapse.events import EventBase
|
2016-01-18 09:09:47 -05:00
|
|
|
from synapse.types import UserID
|
2020-05-11 13:45:23 -04:00
|
|
|
from synapse.util.caches import register_cache
|
2016-01-19 11:01:05 -05:00
|
|
|
from synapse.util.caches.lrucache import LruCache
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2015-12-09 10:51:34 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
GLOB_REGEX = re.compile(r"\\\[(\\\!|)(.*)\\\]")
|
|
|
|
IS_GLOB = re.compile(r"[\?\*\[\]]")
|
2016-01-18 09:09:47 -05:00
|
|
|
INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
|
|
|
|
|
|
|
|
|
|
|
|
def _room_member_count(ev, condition, room_member_count):
|
2017-10-05 07:39:18 -04:00
|
|
|
return _test_ineq_condition(condition, room_member_count)
|
|
|
|
|
2017-10-05 08:08:02 -04:00
|
|
|
|
2017-10-10 10:23:00 -04:00
|
|
|
def _sender_notification_permission(ev, condition, sender_power_level, power_levels):
|
2019-06-20 05:32:02 -04:00
|
|
|
notif_level_key = condition.get("key")
|
2017-10-10 10:23:00 -04:00
|
|
|
if notif_level_key is None:
|
|
|
|
return False
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
notif_levels = power_levels.get("notifications", {})
|
2017-10-10 10:23:00 -04:00
|
|
|
room_notif_level = notif_levels.get(notif_level_key, 50)
|
|
|
|
|
2017-10-10 10:53:34 -04:00
|
|
|
return sender_power_level >= room_notif_level
|
2017-10-05 07:39:18 -04:00
|
|
|
|
2017-10-05 08:08:02 -04:00
|
|
|
|
2017-10-05 07:39:18 -04:00
|
|
|
def _test_ineq_condition(condition, number):
|
2019-06-20 05:32:02 -04:00
|
|
|
if "is" not in condition:
|
2016-01-18 09:09:47 -05:00
|
|
|
return False
|
2019-06-20 05:32:02 -04:00
|
|
|
m = INEQUALITY_EXPR.match(condition["is"])
|
2016-01-18 09:09:47 -05:00
|
|
|
if not m:
|
|
|
|
return False
|
|
|
|
ineq = m.group(1)
|
|
|
|
rhs = m.group(2)
|
|
|
|
if not rhs.isdigit():
|
|
|
|
return False
|
2020-04-16 10:52:55 -04:00
|
|
|
rhs_int = int(rhs)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
if ineq == "" or ineq == "==":
|
2020-04-16 10:52:55 -04:00
|
|
|
return number == rhs_int
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == "<":
|
2020-04-16 10:52:55 -04:00
|
|
|
return number < rhs_int
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == ">":
|
2020-04-16 10:52:55 -04:00
|
|
|
return number > rhs_int
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == ">=":
|
2020-04-16 10:52:55 -04:00
|
|
|
return number >= rhs_int
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == "<=":
|
2020-04-16 10:52:55 -04:00
|
|
|
return number <= rhs_int
|
2016-01-18 09:09:47 -05:00
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2017-10-05 08:08:02 -04:00
|
|
|
|
2016-04-07 11:31:38 -04:00
|
|
|
def tweaks_for_actions(actions):
|
|
|
|
tweaks = {}
|
|
|
|
for a in actions:
|
|
|
|
if not isinstance(a, dict):
|
|
|
|
continue
|
2019-06-20 05:32:02 -04:00
|
|
|
if "set_tweak" in a and "value" in a:
|
|
|
|
tweaks[a["set_tweak"]] = a["value"]
|
2016-04-07 11:31:38 -04:00
|
|
|
return tweaks
|
2015-12-09 10:51:34 -05:00
|
|
|
|
|
|
|
|
2016-01-18 09:09:47 -05:00
|
|
|
class PushRuleEvaluatorForEvent(object):
|
2020-04-16 10:52:55 -04:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
event: EventBase,
|
|
|
|
room_member_count: int,
|
|
|
|
sender_power_level: int,
|
|
|
|
power_levels: dict,
|
|
|
|
):
|
2016-01-18 09:09:47 -05:00
|
|
|
self._event = event
|
|
|
|
self._room_member_count = room_member_count
|
2017-10-05 07:39:18 -04:00
|
|
|
self._sender_power_level = sender_power_level
|
2017-10-10 10:23:00 -04:00
|
|
|
self._power_levels = power_levels
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 10:42:23 -05:00
|
|
|
# Maps strings of e.g. 'content.body' -> event["content"]["body"]
|
2016-01-18 09:09:47 -05:00
|
|
|
self._value_cache = _flatten_dict(event)
|
2015-12-09 10:51:34 -05:00
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
def matches(self, condition: dict, user_id: str, display_name: str) -> bool:
|
2019-06-20 05:32:02 -04:00
|
|
|
if condition["kind"] == "event_match":
|
2016-01-18 05:09:14 -05:00
|
|
|
return self._event_match(condition, user_id)
|
2019-06-20 05:32:02 -04:00
|
|
|
elif condition["kind"] == "contains_display_name":
|
2016-01-18 09:09:47 -05:00
|
|
|
return self._contains_display_name(display_name)
|
2019-06-20 05:32:02 -04:00
|
|
|
elif condition["kind"] == "room_member_count":
|
|
|
|
return _room_member_count(self._event, condition, self._room_member_count)
|
|
|
|
elif condition["kind"] == "sender_notification_permission":
|
2017-10-10 10:23:00 -04:00
|
|
|
return _sender_notification_permission(
|
2019-06-20 05:32:02 -04:00
|
|
|
self._event, condition, self._sender_power_level, self._power_levels
|
2017-10-05 07:39:18 -04:00
|
|
|
)
|
2015-12-09 10:51:34 -05:00
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
def _event_match(self, condition: dict, user_id: str) -> bool:
|
2019-06-20 05:32:02 -04:00
|
|
|
pattern = condition.get("pattern", None)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 05:09:14 -05:00
|
|
|
if not pattern:
|
2019-06-20 05:32:02 -04:00
|
|
|
pattern_type = condition.get("pattern_type", None)
|
2016-01-18 05:09:14 -05:00
|
|
|
if pattern_type == "user_id":
|
|
|
|
pattern = user_id
|
|
|
|
elif pattern_type == "user_localpart":
|
|
|
|
pattern = UserID.from_string(user_id).localpart
|
|
|
|
|
2016-01-18 09:09:47 -05:00
|
|
|
if not pattern:
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning("event_match condition with no pattern")
|
2016-01-18 09:09:47 -05:00
|
|
|
return False
|
|
|
|
|
|
|
|
# XXX: optimisation: cache our pattern regexps
|
2019-06-20 05:32:02 -04:00
|
|
|
if condition["key"] == "content.body":
|
2018-11-02 09:44:12 -04:00
|
|
|
body = self._event.content.get("body", None)
|
2020-06-15 16:20:34 -04:00
|
|
|
if not body or not isinstance(body, str):
|
2016-01-18 11:48:17 -05:00
|
|
|
return False
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
return _glob_matches(pattern, body, word_boundary=True)
|
2016-01-18 09:09:47 -05:00
|
|
|
else:
|
2019-06-20 05:32:02 -04:00
|
|
|
haystack = self._get_value(condition["key"])
|
2016-01-18 09:09:47 -05:00
|
|
|
if haystack is None:
|
|
|
|
return False
|
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
return _glob_matches(pattern, haystack)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
def _contains_display_name(self, display_name: str) -> bool:
|
2016-01-18 09:09:47 -05:00
|
|
|
if not display_name:
|
|
|
|
return False
|
|
|
|
|
2018-11-02 09:44:12 -04:00
|
|
|
body = self._event.content.get("body", None)
|
2020-06-15 16:20:34 -04:00
|
|
|
if not body or not isinstance(body, str):
|
2016-01-18 11:48:17 -05:00
|
|
|
return False
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
# Similar to _glob_matches, but do not treat display_name as a glob.
|
|
|
|
r = regex_cache.get((display_name, False, True), None)
|
|
|
|
if not r:
|
|
|
|
r = re.escape(display_name)
|
|
|
|
r = _re_word_boundary(r)
|
|
|
|
r = re.compile(r, flags=re.IGNORECASE)
|
|
|
|
regex_cache[(display_name, False, True)] = r
|
|
|
|
|
|
|
|
return r.search(body)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
def _get_value(self, dotted_key: str) -> str:
|
2016-01-18 09:09:47 -05:00
|
|
|
return self._value_cache.get(dotted_key, None)
|
|
|
|
|
2015-12-09 10:51:34 -05:00
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
# Caches (string, is_glob, word_boundary) -> regex for push. See _glob_matches
|
2020-05-11 13:45:23 -04:00
|
|
|
regex_cache = LruCache(50000)
|
2018-05-21 20:47:37 -04:00
|
|
|
register_cache("cache", "regex_push_cache", regex_cache)
|
2017-03-29 10:53:14 -04:00
|
|
|
|
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
def _glob_matches(glob: str, value: str, word_boundary: bool = False) -> bool:
|
2016-01-18 11:48:17 -05:00
|
|
|
"""Tests if value matches glob.
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
Args:
|
2020-04-16 10:52:55 -04:00
|
|
|
glob
|
|
|
|
value: String to test against glob.
|
|
|
|
word_boundary: Whether to match against word boundaries or entire
|
2016-01-18 11:48:17 -05:00
|
|
|
string. Defaults to False.
|
|
|
|
"""
|
2016-01-18 12:04:36 -05:00
|
|
|
|
2017-03-29 10:53:14 -04:00
|
|
|
try:
|
2020-04-16 10:52:55 -04:00
|
|
|
r = regex_cache.get((glob, True, word_boundary), None)
|
2017-03-29 10:53:14 -04:00
|
|
|
if not r:
|
|
|
|
r = _glob_to_re(glob, word_boundary)
|
2020-04-16 10:52:55 -04:00
|
|
|
regex_cache[(glob, True, word_boundary)] = r
|
2017-03-29 10:53:14 -04:00
|
|
|
return r.search(value)
|
2016-01-19 09:43:24 -05:00
|
|
|
except re.error:
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning("Failed to parse glob to regex: %r", glob)
|
2016-01-19 09:43:24 -05:00
|
|
|
return False
|
2016-01-18 09:09:47 -05:00
|
|
|
|
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
def _glob_to_re(glob: str, word_boundary: bool) -> Pattern:
|
2017-03-29 10:53:14 -04:00
|
|
|
"""Generates regex for a given glob.
|
|
|
|
|
|
|
|
Args:
|
2020-04-16 10:52:55 -04:00
|
|
|
glob
|
|
|
|
word_boundary: Whether to match against word boundaries or entire string.
|
2017-03-29 10:53:14 -04:00
|
|
|
"""
|
|
|
|
if IS_GLOB.search(glob):
|
|
|
|
r = re.escape(glob)
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
r = r.replace(r"\*", ".*?")
|
|
|
|
r = r.replace(r"\?", ".")
|
2017-03-29 10:53:14 -04:00
|
|
|
|
|
|
|
# handle [abc], [a-z] and [!a-z] style ranges.
|
|
|
|
r = GLOB_REGEX.sub(
|
|
|
|
lambda x: (
|
2019-06-20 05:32:02 -04:00
|
|
|
"[%s%s]" % (x.group(1) and "^" or "", x.group(2).replace(r"\\\-", "-"))
|
2017-03-29 10:53:14 -04:00
|
|
|
),
|
|
|
|
r,
|
|
|
|
)
|
|
|
|
if word_boundary:
|
2017-10-05 06:33:30 -04:00
|
|
|
r = _re_word_boundary(r)
|
2017-03-29 10:53:14 -04:00
|
|
|
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
else:
|
|
|
|
r = "^" + r + "$"
|
|
|
|
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
elif word_boundary:
|
|
|
|
r = re.escape(glob)
|
2017-10-05 06:33:30 -04:00
|
|
|
r = _re_word_boundary(r)
|
2017-03-29 10:53:14 -04:00
|
|
|
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
else:
|
|
|
|
r = "^" + re.escape(glob) + "$"
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
|
2017-10-05 06:43:10 -04:00
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
def _re_word_boundary(r: str) -> str:
|
2017-10-05 06:33:30 -04:00
|
|
|
"""
|
|
|
|
Adds word boundary characters to the start and end of an
|
|
|
|
expression to require that the match occur as a whole word,
|
|
|
|
but do so respecting the fact that strings starting or ending
|
|
|
|
with non-word characters will change word boundaries.
|
|
|
|
"""
|
2017-10-05 06:57:43 -04:00
|
|
|
# we can't use \b as it chokes on unicode. however \W seems to be okay
|
|
|
|
# as shorthand for [^0-9A-Za-z_].
|
|
|
|
return r"(^|\W)%s(\W|$)" % (r,)
|
2017-10-05 06:33:30 -04:00
|
|
|
|
2017-03-29 10:53:14 -04:00
|
|
|
|
2017-07-04 19:28:43 -04:00
|
|
|
def _flatten_dict(d, prefix=[], result=None):
|
|
|
|
if result is None:
|
|
|
|
result = {}
|
2016-01-18 09:09:47 -05:00
|
|
|
for key, value in d.items():
|
2018-04-15 15:43:35 -04:00
|
|
|
if isinstance(value, string_types):
|
2016-01-18 09:09:47 -05:00
|
|
|
result[".".join(prefix + [key])] = value.lower()
|
|
|
|
elif hasattr(value, "items"):
|
2016-02-02 12:18:50 -05:00
|
|
|
_flatten_dict(value, prefix=(prefix + [key]), result=result)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
|
|
|
return result
|