2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2017-10-10 06:21:41 -04:00
|
|
|
# Copyright 2017 New Vector Ltd
|
2015-12-09 10:51:34 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import re
|
2022-05-24 09:23:23 -04:00
|
|
|
from typing import Any, Dict, List, Mapping, Optional, Pattern, Set, Tuple, Union
|
2015-12-09 10:51:34 -05:00
|
|
|
|
2022-01-05 06:41:49 -05:00
|
|
|
from matrix_common.regex import glob_to_regex, to_word_pattern
|
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
from synapse.events import EventBase
|
2022-02-28 12:40:24 -05:00
|
|
|
from synapse.types import UserID
|
2016-01-19 11:01:05 -05:00
|
|
|
from synapse.util.caches.lrucache import LruCache
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2015-12-09 10:51:34 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
GLOB_REGEX = re.compile(r"\\\[(\\\!|)(.*)\\\]")
|
|
|
|
IS_GLOB = re.compile(r"[\?\*\[\]]")
|
2016-01-18 09:09:47 -05:00
|
|
|
INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
|
|
|
|
|
|
|
|
|
2020-12-11 11:43:53 -05:00
|
|
|
def _room_member_count(
|
|
|
|
ev: EventBase, condition: Dict[str, Any], room_member_count: int
|
|
|
|
) -> bool:
|
2017-10-05 07:39:18 -04:00
|
|
|
return _test_ineq_condition(condition, room_member_count)
|
|
|
|
|
2017-10-05 08:08:02 -04:00
|
|
|
|
2020-12-11 11:43:53 -05:00
|
|
|
def _sender_notification_permission(
|
|
|
|
ev: EventBase,
|
|
|
|
condition: Dict[str, Any],
|
|
|
|
sender_power_level: int,
|
|
|
|
power_levels: Dict[str, Union[int, Dict[str, int]]],
|
|
|
|
) -> bool:
|
2019-06-20 05:32:02 -04:00
|
|
|
notif_level_key = condition.get("key")
|
2017-10-10 10:23:00 -04:00
|
|
|
if notif_level_key is None:
|
|
|
|
return False
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
notif_levels = power_levels.get("notifications", {})
|
2020-12-11 11:43:53 -05:00
|
|
|
assert isinstance(notif_levels, dict)
|
2017-10-10 10:23:00 -04:00
|
|
|
room_notif_level = notif_levels.get(notif_level_key, 50)
|
|
|
|
|
2017-10-10 10:53:34 -04:00
|
|
|
return sender_power_level >= room_notif_level
|
2017-10-05 07:39:18 -04:00
|
|
|
|
2017-10-05 08:08:02 -04:00
|
|
|
|
2020-12-11 11:43:53 -05:00
|
|
|
def _test_ineq_condition(condition: Dict[str, Any], number: int) -> bool:
|
2019-06-20 05:32:02 -04:00
|
|
|
if "is" not in condition:
|
2016-01-18 09:09:47 -05:00
|
|
|
return False
|
2019-06-20 05:32:02 -04:00
|
|
|
m = INEQUALITY_EXPR.match(condition["is"])
|
2016-01-18 09:09:47 -05:00
|
|
|
if not m:
|
|
|
|
return False
|
|
|
|
ineq = m.group(1)
|
|
|
|
rhs = m.group(2)
|
|
|
|
if not rhs.isdigit():
|
|
|
|
return False
|
2020-04-16 10:52:55 -04:00
|
|
|
rhs_int = int(rhs)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
if ineq == "" or ineq == "==":
|
2020-04-16 10:52:55 -04:00
|
|
|
return number == rhs_int
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == "<":
|
2020-04-16 10:52:55 -04:00
|
|
|
return number < rhs_int
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == ">":
|
2020-04-16 10:52:55 -04:00
|
|
|
return number > rhs_int
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == ">=":
|
2020-04-16 10:52:55 -04:00
|
|
|
return number >= rhs_int
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == "<=":
|
2020-04-16 10:52:55 -04:00
|
|
|
return number <= rhs_int
|
2016-01-18 09:09:47 -05:00
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2017-10-05 08:08:02 -04:00
|
|
|
|
2020-07-06 06:43:41 -04:00
|
|
|
def tweaks_for_actions(actions: List[Union[str, Dict]]) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Converts a list of actions into a `tweaks` dict (which can then be passed to
|
|
|
|
the push gateway).
|
|
|
|
|
|
|
|
This function ignores all actions other than `set_tweak` actions, and treats
|
|
|
|
absent `value`s as `True`, which agrees with the only spec-defined treatment
|
|
|
|
of absent `value`s (namely, for `highlight` tweaks).
|
|
|
|
|
|
|
|
Args:
|
|
|
|
actions: list of actions
|
|
|
|
e.g. [
|
|
|
|
{"set_tweak": "a", "value": "AAA"},
|
|
|
|
{"set_tweak": "b", "value": "BBB"},
|
|
|
|
{"set_tweak": "highlight"},
|
|
|
|
"notify"
|
|
|
|
]
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dictionary of tweaks for those actions
|
|
|
|
e.g. {"a": "AAA", "b": "BBB", "highlight": True}
|
|
|
|
"""
|
2016-04-07 11:31:38 -04:00
|
|
|
tweaks = {}
|
|
|
|
for a in actions:
|
|
|
|
if not isinstance(a, dict):
|
|
|
|
continue
|
2020-07-06 06:43:41 -04:00
|
|
|
if "set_tweak" in a:
|
|
|
|
# value is allowed to be absent in which case the value assumed
|
|
|
|
# should be True.
|
|
|
|
tweaks[a["set_tweak"]] = a.get("value", True)
|
2016-04-07 11:31:38 -04:00
|
|
|
return tweaks
|
2015-12-09 10:51:34 -05:00
|
|
|
|
|
|
|
|
2020-09-04 06:54:56 -04:00
|
|
|
class PushRuleEvaluatorForEvent:
|
2020-04-16 10:52:55 -04:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
event: EventBase,
|
|
|
|
room_member_count: int,
|
|
|
|
sender_power_level: int,
|
2020-12-11 11:43:53 -05:00
|
|
|
power_levels: Dict[str, Union[int, Dict[str, int]]],
|
2022-05-24 09:23:23 -04:00
|
|
|
relations: Dict[str, Set[Tuple[str, str]]],
|
|
|
|
relations_match_enabled: bool,
|
2020-04-16 10:52:55 -04:00
|
|
|
):
|
2016-01-18 09:09:47 -05:00
|
|
|
self._event = event
|
|
|
|
self._room_member_count = room_member_count
|
2017-10-05 07:39:18 -04:00
|
|
|
self._sender_power_level = sender_power_level
|
2017-10-10 10:23:00 -04:00
|
|
|
self._power_levels = power_levels
|
2022-05-24 09:23:23 -04:00
|
|
|
self._relations = relations
|
|
|
|
self._relations_match_enabled = relations_match_enabled
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 10:42:23 -05:00
|
|
|
# Maps strings of e.g. 'content.body' -> event["content"]["body"]
|
2016-01-18 09:09:47 -05:00
|
|
|
self._value_cache = _flatten_dict(event)
|
2015-12-09 10:51:34 -05:00
|
|
|
|
2022-05-10 07:54:30 -04:00
|
|
|
# Maps cache keys to final values.
|
|
|
|
self._condition_cache: Dict[str, bool] = {}
|
|
|
|
|
|
|
|
def check_conditions(
|
|
|
|
self, conditions: List[dict], uid: str, display_name: Optional[str]
|
|
|
|
) -> bool:
|
|
|
|
"""
|
|
|
|
Returns true if a user's conditions/user ID/display name match the event.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
conditions: The user's conditions to match.
|
|
|
|
uid: The user's MXID.
|
|
|
|
display_name: The display name.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if all conditions match the event, False otherwise.
|
|
|
|
"""
|
|
|
|
for cond in conditions:
|
|
|
|
_cache_key = cond.get("_cache_key", None)
|
|
|
|
if _cache_key:
|
|
|
|
res = self._condition_cache.get(_cache_key, None)
|
|
|
|
if res is False:
|
|
|
|
return False
|
|
|
|
elif res is True:
|
|
|
|
continue
|
|
|
|
|
|
|
|
res = self.matches(cond, uid, display_name)
|
|
|
|
if _cache_key:
|
|
|
|
self._condition_cache[_cache_key] = bool(res)
|
|
|
|
|
|
|
|
if not res:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2020-12-11 11:43:53 -05:00
|
|
|
def matches(
|
2021-11-02 09:55:52 -04:00
|
|
|
self, condition: Dict[str, Any], user_id: str, display_name: Optional[str]
|
2020-12-11 11:43:53 -05:00
|
|
|
) -> bool:
|
2022-05-10 07:54:30 -04:00
|
|
|
"""
|
|
|
|
Returns true if a user's condition/user ID/display name match the event.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
condition: The user's condition to match.
|
|
|
|
uid: The user's MXID.
|
|
|
|
display_name: The display name, or None if there is not one.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the condition matches the event, False otherwise.
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
if condition["kind"] == "event_match":
|
2016-01-18 05:09:14 -05:00
|
|
|
return self._event_match(condition, user_id)
|
2019-06-20 05:32:02 -04:00
|
|
|
elif condition["kind"] == "contains_display_name":
|
2016-01-18 09:09:47 -05:00
|
|
|
return self._contains_display_name(display_name)
|
2019-06-20 05:32:02 -04:00
|
|
|
elif condition["kind"] == "room_member_count":
|
|
|
|
return _room_member_count(self._event, condition, self._room_member_count)
|
|
|
|
elif condition["kind"] == "sender_notification_permission":
|
2017-10-10 10:23:00 -04:00
|
|
|
return _sender_notification_permission(
|
2019-06-20 05:32:02 -04:00
|
|
|
self._event, condition, self._sender_power_level, self._power_levels
|
2017-10-05 07:39:18 -04:00
|
|
|
)
|
2022-05-24 09:23:23 -04:00
|
|
|
elif (
|
|
|
|
condition["kind"] == "org.matrix.msc3772.relation_match"
|
|
|
|
and self._relations_match_enabled
|
|
|
|
):
|
|
|
|
return self._relation_match(condition, user_id)
|
2015-12-09 10:51:34 -05:00
|
|
|
else:
|
2022-05-24 09:23:23 -04:00
|
|
|
# XXX This looks incorrect -- we have reached an unknown condition
|
|
|
|
# kind and are unconditionally returning that it matches. Note
|
|
|
|
# that it seems possible to provide a condition to the /pushrules
|
|
|
|
# endpoint with an unknown kind, see _rule_tuple_from_request_object.
|
2015-12-09 10:51:34 -05:00
|
|
|
return True
|
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
def _event_match(self, condition: dict, user_id: str) -> bool:
|
2022-05-10 07:54:30 -04:00
|
|
|
"""
|
|
|
|
Check an "event_match" push rule condition.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
condition: The "event_match" push rule condition to match.
|
|
|
|
user_id: The user's MXID.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the condition matches the event, False otherwise.
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
pattern = condition.get("pattern", None)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 05:09:14 -05:00
|
|
|
if not pattern:
|
2019-06-20 05:32:02 -04:00
|
|
|
pattern_type = condition.get("pattern_type", None)
|
2016-01-18 05:09:14 -05:00
|
|
|
if pattern_type == "user_id":
|
|
|
|
pattern = user_id
|
|
|
|
elif pattern_type == "user_localpart":
|
|
|
|
pattern = UserID.from_string(user_id).localpart
|
|
|
|
|
2016-01-18 09:09:47 -05:00
|
|
|
if not pattern:
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning("event_match condition with no pattern")
|
2016-01-18 09:09:47 -05:00
|
|
|
return False
|
|
|
|
|
|
|
|
# XXX: optimisation: cache our pattern regexps
|
2019-06-20 05:32:02 -04:00
|
|
|
if condition["key"] == "content.body":
|
2018-11-02 09:44:12 -04:00
|
|
|
body = self._event.content.get("body", None)
|
2020-06-15 16:20:34 -04:00
|
|
|
if not body or not isinstance(body, str):
|
2016-01-18 11:48:17 -05:00
|
|
|
return False
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
return _glob_matches(pattern, body, word_boundary=True)
|
2016-01-18 09:09:47 -05:00
|
|
|
else:
|
2022-05-10 07:54:30 -04:00
|
|
|
haystack = self._value_cache.get(condition["key"], None)
|
2016-01-18 09:09:47 -05:00
|
|
|
if haystack is None:
|
|
|
|
return False
|
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
return _glob_matches(pattern, haystack)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2021-11-02 09:55:52 -04:00
|
|
|
def _contains_display_name(self, display_name: Optional[str]) -> bool:
|
2022-05-10 07:54:30 -04:00
|
|
|
"""
|
|
|
|
Check an "event_match" push rule condition.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
display_name: The display name, or None if there is not one.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the display name is found in the event body, False otherwise.
|
|
|
|
"""
|
2016-01-18 09:09:47 -05:00
|
|
|
if not display_name:
|
|
|
|
return False
|
|
|
|
|
2018-11-02 09:44:12 -04:00
|
|
|
body = self._event.content.get("body", None)
|
2020-06-15 16:20:34 -04:00
|
|
|
if not body or not isinstance(body, str):
|
2016-01-18 11:48:17 -05:00
|
|
|
return False
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
# Similar to _glob_matches, but do not treat display_name as a glob.
|
|
|
|
r = regex_cache.get((display_name, False, True), None)
|
|
|
|
if not r:
|
2020-10-16 10:56:39 -04:00
|
|
|
r1 = re.escape(display_name)
|
2022-01-05 06:41:49 -05:00
|
|
|
r1 = to_word_pattern(r1)
|
2020-10-16 10:56:39 -04:00
|
|
|
r = re.compile(r1, flags=re.IGNORECASE)
|
2020-04-16 10:52:55 -04:00
|
|
|
regex_cache[(display_name, False, True)] = r
|
|
|
|
|
2020-10-16 10:56:39 -04:00
|
|
|
return bool(r.search(body))
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2022-05-24 09:23:23 -04:00
|
|
|
def _relation_match(self, condition: dict, user_id: str) -> bool:
|
|
|
|
"""
|
|
|
|
Check an "relation_match" push rule condition.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
condition: The "event_match" push rule condition to match.
|
|
|
|
user_id: The user's MXID.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the condition matches the event, False otherwise.
|
|
|
|
"""
|
|
|
|
rel_type = condition.get("rel_type")
|
|
|
|
if not rel_type:
|
|
|
|
logger.warning("relation_match condition missing rel_type")
|
|
|
|
return False
|
|
|
|
|
|
|
|
sender_pattern = condition.get("sender")
|
|
|
|
if sender_pattern is None:
|
|
|
|
sender_type = condition.get("sender_type")
|
|
|
|
if sender_type == "user_id":
|
|
|
|
sender_pattern = user_id
|
|
|
|
type_pattern = condition.get("type")
|
|
|
|
|
|
|
|
# If any other relations matches, return True.
|
|
|
|
for sender, event_type in self._relations.get(rel_type, ()):
|
|
|
|
if sender_pattern and not _glob_matches(sender_pattern, sender):
|
|
|
|
continue
|
|
|
|
if type_pattern and not _glob_matches(type_pattern, event_type):
|
|
|
|
continue
|
|
|
|
# All values must have matched.
|
|
|
|
return True
|
|
|
|
|
|
|
|
# No relations matched.
|
|
|
|
return False
|
|
|
|
|
2015-12-09 10:51:34 -05:00
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
# Caches (string, is_glob, word_boundary) -> regex for push. See _glob_matches
|
2021-07-15 06:02:43 -04:00
|
|
|
regex_cache: LruCache[Tuple[str, bool, bool], Pattern] = LruCache(
|
2020-10-16 10:56:39 -04:00
|
|
|
50000, "regex_push_cache"
|
2021-07-15 06:02:43 -04:00
|
|
|
)
|
2017-03-29 10:53:14 -04:00
|
|
|
|
|
|
|
|
2020-04-16 10:52:55 -04:00
|
|
|
def _glob_matches(glob: str, value: str, word_boundary: bool = False) -> bool:
|
2016-01-18 11:48:17 -05:00
|
|
|
"""Tests if value matches glob.
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
Args:
|
2020-04-16 10:52:55 -04:00
|
|
|
glob
|
|
|
|
value: String to test against glob.
|
|
|
|
word_boundary: Whether to match against word boundaries or entire
|
2016-01-18 11:48:17 -05:00
|
|
|
string. Defaults to False.
|
|
|
|
"""
|
2016-01-18 12:04:36 -05:00
|
|
|
|
2017-03-29 10:53:14 -04:00
|
|
|
try:
|
2020-04-16 10:52:55 -04:00
|
|
|
r = regex_cache.get((glob, True, word_boundary), None)
|
2017-03-29 10:53:14 -04:00
|
|
|
if not r:
|
2022-01-05 06:41:49 -05:00
|
|
|
r = glob_to_regex(glob, word_boundary=word_boundary)
|
2020-04-16 10:52:55 -04:00
|
|
|
regex_cache[(glob, True, word_boundary)] = r
|
2020-10-16 10:56:39 -04:00
|
|
|
return bool(r.search(value))
|
2016-01-19 09:43:24 -05:00
|
|
|
except re.error:
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning("Failed to parse glob to regex: %r", glob)
|
2016-01-19 09:43:24 -05:00
|
|
|
return False
|
2016-01-18 09:09:47 -05:00
|
|
|
|
|
|
|
|
2020-12-11 11:43:53 -05:00
|
|
|
def _flatten_dict(
|
2022-02-28 12:40:24 -05:00
|
|
|
d: Union[EventBase, Mapping[str, Any]],
|
2020-12-11 11:43:53 -05:00
|
|
|
prefix: Optional[List[str]] = None,
|
|
|
|
result: Optional[Dict[str, str]] = None,
|
|
|
|
) -> Dict[str, str]:
|
|
|
|
if prefix is None:
|
|
|
|
prefix = []
|
2017-07-04 19:28:43 -04:00
|
|
|
if result is None:
|
|
|
|
result = {}
|
2016-01-18 09:09:47 -05:00
|
|
|
for key, value in d.items():
|
2020-06-16 08:51:47 -04:00
|
|
|
if isinstance(value, str):
|
2016-01-18 09:09:47 -05:00
|
|
|
result[".".join(prefix + [key])] = value.lower()
|
2022-02-28 12:40:24 -05:00
|
|
|
elif isinstance(value, Mapping):
|
2016-02-02 12:18:50 -05:00
|
|
|
_flatten_dict(value, prefix=(prefix + [key]), result=result)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
|
|
|
return result
|