2015-12-09 10:51:34 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2017-10-10 06:21:41 -04:00
|
|
|
# Copyright 2017 New Vector Ltd
|
2015-12-09 10:51:34 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import re
|
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
from six import string_types
|
|
|
|
|
2016-01-18 09:09:47 -05:00
|
|
|
from synapse.types import UserID
|
2017-03-29 10:53:14 -04:00
|
|
|
from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache
|
2016-01-19 11:01:05 -05:00
|
|
|
from synapse.util.caches.lrucache import LruCache
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2015-12-09 10:51:34 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
GLOB_REGEX = re.compile(r"\\\[(\\\!|)(.*)\\\]")
|
|
|
|
IS_GLOB = re.compile(r"[\?\*\[\]]")
|
2016-01-18 09:09:47 -05:00
|
|
|
INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
|
|
|
|
|
|
|
|
|
|
|
|
def _room_member_count(ev, condition, room_member_count):
|
2017-10-05 07:39:18 -04:00
|
|
|
return _test_ineq_condition(condition, room_member_count)
|
|
|
|
|
2017-10-05 08:08:02 -04:00
|
|
|
|
2017-10-10 10:23:00 -04:00
|
|
|
def _sender_notification_permission(ev, condition, sender_power_level, power_levels):
|
2019-06-20 05:32:02 -04:00
|
|
|
notif_level_key = condition.get("key")
|
2017-10-10 10:23:00 -04:00
|
|
|
if notif_level_key is None:
|
|
|
|
return False
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
notif_levels = power_levels.get("notifications", {})
|
2017-10-10 10:23:00 -04:00
|
|
|
room_notif_level = notif_levels.get(notif_level_key, 50)
|
|
|
|
|
2017-10-10 10:53:34 -04:00
|
|
|
return sender_power_level >= room_notif_level
|
2017-10-05 07:39:18 -04:00
|
|
|
|
2017-10-05 08:08:02 -04:00
|
|
|
|
2017-10-05 07:39:18 -04:00
|
|
|
def _test_ineq_condition(condition, number):
|
2019-06-20 05:32:02 -04:00
|
|
|
if "is" not in condition:
|
2016-01-18 09:09:47 -05:00
|
|
|
return False
|
2019-06-20 05:32:02 -04:00
|
|
|
m = INEQUALITY_EXPR.match(condition["is"])
|
2016-01-18 09:09:47 -05:00
|
|
|
if not m:
|
|
|
|
return False
|
|
|
|
ineq = m.group(1)
|
|
|
|
rhs = m.group(2)
|
|
|
|
if not rhs.isdigit():
|
|
|
|
return False
|
|
|
|
rhs = int(rhs)
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
if ineq == "" or ineq == "==":
|
2017-10-05 07:39:18 -04:00
|
|
|
return number == rhs
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == "<":
|
2017-10-05 07:39:18 -04:00
|
|
|
return number < rhs
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == ">":
|
2017-10-05 07:39:18 -04:00
|
|
|
return number > rhs
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == ">=":
|
2017-10-05 07:39:18 -04:00
|
|
|
return number >= rhs
|
2019-06-20 05:32:02 -04:00
|
|
|
elif ineq == "<=":
|
2017-10-05 07:39:18 -04:00
|
|
|
return number <= rhs
|
2016-01-18 09:09:47 -05:00
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2017-10-05 08:08:02 -04:00
|
|
|
|
2016-04-07 11:31:38 -04:00
|
|
|
def tweaks_for_actions(actions):
|
|
|
|
tweaks = {}
|
|
|
|
for a in actions:
|
|
|
|
if not isinstance(a, dict):
|
|
|
|
continue
|
2019-06-20 05:32:02 -04:00
|
|
|
if "set_tweak" in a and "value" in a:
|
|
|
|
tweaks[a["set_tweak"]] = a["value"]
|
2016-04-07 11:31:38 -04:00
|
|
|
return tweaks
|
2015-12-09 10:51:34 -05:00
|
|
|
|
|
|
|
|
2016-01-18 09:09:47 -05:00
|
|
|
class PushRuleEvaluatorForEvent(object):
|
2017-10-10 10:23:00 -04:00
|
|
|
def __init__(self, event, room_member_count, sender_power_level, power_levels):
|
2016-01-18 09:09:47 -05:00
|
|
|
self._event = event
|
|
|
|
self._room_member_count = room_member_count
|
2017-10-05 07:39:18 -04:00
|
|
|
self._sender_power_level = sender_power_level
|
2017-10-10 10:23:00 -04:00
|
|
|
self._power_levels = power_levels
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 10:42:23 -05:00
|
|
|
# Maps strings of e.g. 'content.body' -> event["content"]["body"]
|
2016-01-18 09:09:47 -05:00
|
|
|
self._value_cache = _flatten_dict(event)
|
2015-12-09 10:51:34 -05:00
|
|
|
|
2016-02-18 11:05:13 -05:00
|
|
|
def matches(self, condition, user_id, display_name):
|
2019-06-20 05:32:02 -04:00
|
|
|
if condition["kind"] == "event_match":
|
2016-01-18 05:09:14 -05:00
|
|
|
return self._event_match(condition, user_id)
|
2019-06-20 05:32:02 -04:00
|
|
|
elif condition["kind"] == "contains_display_name":
|
2016-01-18 09:09:47 -05:00
|
|
|
return self._contains_display_name(display_name)
|
2019-06-20 05:32:02 -04:00
|
|
|
elif condition["kind"] == "room_member_count":
|
|
|
|
return _room_member_count(self._event, condition, self._room_member_count)
|
|
|
|
elif condition["kind"] == "sender_notification_permission":
|
2017-10-10 10:23:00 -04:00
|
|
|
return _sender_notification_permission(
|
2019-06-20 05:32:02 -04:00
|
|
|
self._event, condition, self._sender_power_level, self._power_levels
|
2017-10-05 07:39:18 -04:00
|
|
|
)
|
2015-12-09 10:51:34 -05:00
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2016-01-18 05:09:14 -05:00
|
|
|
def _event_match(self, condition, user_id):
|
2019-06-20 05:32:02 -04:00
|
|
|
pattern = condition.get("pattern", None)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 05:09:14 -05:00
|
|
|
if not pattern:
|
2019-06-20 05:32:02 -04:00
|
|
|
pattern_type = condition.get("pattern_type", None)
|
2016-01-18 05:09:14 -05:00
|
|
|
if pattern_type == "user_id":
|
|
|
|
pattern = user_id
|
|
|
|
elif pattern_type == "user_localpart":
|
|
|
|
pattern = UserID.from_string(user_id).localpart
|
|
|
|
|
2016-01-18 09:09:47 -05:00
|
|
|
if not pattern:
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning("event_match condition with no pattern")
|
2016-01-18 09:09:47 -05:00
|
|
|
return False
|
|
|
|
|
|
|
|
# XXX: optimisation: cache our pattern regexps
|
2019-06-20 05:32:02 -04:00
|
|
|
if condition["key"] == "content.body":
|
2018-11-02 09:44:12 -04:00
|
|
|
body = self._event.content.get("body", None)
|
2016-01-18 11:48:17 -05:00
|
|
|
if not body:
|
|
|
|
return False
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
return _glob_matches(pattern, body, word_boundary=True)
|
2016-01-18 09:09:47 -05:00
|
|
|
else:
|
2019-06-20 05:32:02 -04:00
|
|
|
haystack = self._get_value(condition["key"])
|
2016-01-18 09:09:47 -05:00
|
|
|
if haystack is None:
|
|
|
|
return False
|
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
return _glob_matches(pattern, haystack)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
|
|
|
def _contains_display_name(self, display_name):
|
|
|
|
if not display_name:
|
|
|
|
return False
|
|
|
|
|
2018-11-02 09:44:12 -04:00
|
|
|
body = self._event.content.get("body", None)
|
2016-01-18 11:48:17 -05:00
|
|
|
if not body:
|
|
|
|
return False
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
return _glob_matches(display_name, body, word_boundary=True)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
|
|
|
def _get_value(self, dotted_key):
|
|
|
|
return self._value_cache.get(dotted_key, None)
|
|
|
|
|
2015-12-09 10:51:34 -05:00
|
|
|
|
2017-03-29 10:53:14 -04:00
|
|
|
# Caches (glob, word_boundary) -> regex for push. See _glob_matches
|
|
|
|
regex_cache = LruCache(50000 * CACHE_SIZE_FACTOR)
|
2018-05-21 20:47:37 -04:00
|
|
|
register_cache("cache", "regex_push_cache", regex_cache)
|
2017-03-29 10:53:14 -04:00
|
|
|
|
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
def _glob_matches(glob, value, word_boundary=False):
|
|
|
|
"""Tests if value matches glob.
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
Args:
|
|
|
|
glob (string)
|
|
|
|
value (string): String to test against glob.
|
|
|
|
word_boundary (bool): Whether to match against word boundaries or entire
|
|
|
|
string. Defaults to False.
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2016-01-18 11:48:17 -05:00
|
|
|
Returns:
|
|
|
|
bool
|
|
|
|
"""
|
2016-01-18 12:04:36 -05:00
|
|
|
|
2017-03-29 10:53:14 -04:00
|
|
|
try:
|
|
|
|
r = regex_cache.get((glob, word_boundary), None)
|
|
|
|
if not r:
|
|
|
|
r = _glob_to_re(glob, word_boundary)
|
|
|
|
regex_cache[(glob, word_boundary)] = r
|
|
|
|
return r.search(value)
|
2016-01-19 09:43:24 -05:00
|
|
|
except re.error:
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning("Failed to parse glob to regex: %r", glob)
|
2016-01-19 09:43:24 -05:00
|
|
|
return False
|
2016-01-18 09:09:47 -05:00
|
|
|
|
|
|
|
|
2017-03-29 10:53:14 -04:00
|
|
|
def _glob_to_re(glob, word_boundary):
|
|
|
|
"""Generates regex for a given glob.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
glob (string)
|
|
|
|
word_boundary (bool): Whether to match against word boundaries or entire
|
|
|
|
string. Defaults to False.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
regex object
|
|
|
|
"""
|
|
|
|
if IS_GLOB.search(glob):
|
|
|
|
r = re.escape(glob)
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
r = r.replace(r"\*", ".*?")
|
|
|
|
r = r.replace(r"\?", ".")
|
2017-03-29 10:53:14 -04:00
|
|
|
|
|
|
|
# handle [abc], [a-z] and [!a-z] style ranges.
|
|
|
|
r = GLOB_REGEX.sub(
|
|
|
|
lambda x: (
|
2019-06-20 05:32:02 -04:00
|
|
|
"[%s%s]" % (x.group(1) and "^" or "", x.group(2).replace(r"\\\-", "-"))
|
2017-03-29 10:53:14 -04:00
|
|
|
),
|
|
|
|
r,
|
|
|
|
)
|
|
|
|
if word_boundary:
|
2017-10-05 06:33:30 -04:00
|
|
|
r = _re_word_boundary(r)
|
2017-03-29 10:53:14 -04:00
|
|
|
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
else:
|
|
|
|
r = "^" + r + "$"
|
|
|
|
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
elif word_boundary:
|
|
|
|
r = re.escape(glob)
|
2017-10-05 06:33:30 -04:00
|
|
|
r = _re_word_boundary(r)
|
2017-03-29 10:53:14 -04:00
|
|
|
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
else:
|
|
|
|
r = "^" + re.escape(glob) + "$"
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
|
2017-10-05 06:43:10 -04:00
|
|
|
|
2017-10-05 06:33:30 -04:00
|
|
|
def _re_word_boundary(r):
|
|
|
|
"""
|
|
|
|
Adds word boundary characters to the start and end of an
|
|
|
|
expression to require that the match occur as a whole word,
|
|
|
|
but do so respecting the fact that strings starting or ending
|
|
|
|
with non-word characters will change word boundaries.
|
|
|
|
"""
|
2017-10-05 06:57:43 -04:00
|
|
|
# we can't use \b as it chokes on unicode. however \W seems to be okay
|
|
|
|
# as shorthand for [^0-9A-Za-z_].
|
|
|
|
return r"(^|\W)%s(\W|$)" % (r,)
|
2017-10-05 06:33:30 -04:00
|
|
|
|
2017-03-29 10:53:14 -04:00
|
|
|
|
2017-07-04 19:28:43 -04:00
|
|
|
def _flatten_dict(d, prefix=[], result=None):
|
|
|
|
if result is None:
|
|
|
|
result = {}
|
2016-01-18 09:09:47 -05:00
|
|
|
for key, value in d.items():
|
2018-04-15 15:43:35 -04:00
|
|
|
if isinstance(value, string_types):
|
2016-01-18 09:09:47 -05:00
|
|
|
result[".".join(prefix + [key])] = value.lower()
|
|
|
|
elif hasattr(value, "items"):
|
2016-02-02 12:18:50 -05:00
|
|
|
_flatten_dict(value, prefix=(prefix + [key]), result=result)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
|
|
|
return result
|