Merge branch 'develop' into push_badge_counts

This commit is contained in:
David Baker 2016-01-19 18:17:23 +00:00
commit afb7b377f2
35 changed files with 1043 additions and 1246 deletions

View file

@ -37,7 +37,7 @@ class Pusher(object):
MAX_BACKOFF = 60 * 60 * 1000
GIVE_UP_AFTER = 24 * 60 * 60 * 1000
def __init__(self, _hs, profile_tag, user_name, app_id,
def __init__(self, _hs, profile_tag, user_id, app_id,
app_display_name, device_display_name, pushkey, pushkey_ts,
data, last_token, last_success, failing_since):
self.hs = _hs
@ -45,7 +45,7 @@ class Pusher(object):
self.store = self.hs.get_datastore()
self.clock = self.hs.get_clock()
self.profile_tag = profile_tag
self.user_name = user_name
self.user_id = user_id
self.app_id = app_id
self.app_display_name = app_display_name
self.device_display_name = device_display_name
@ -95,14 +95,14 @@ class Pusher(object):
# we fail to dispatch the push)
config = PaginationConfig(from_token=None, limit='1')
chunk = yield self.evStreamHandler.get_stream(
self.user_name, config, timeout=0, affect_presence=False
self.user_id, config, timeout=0, affect_presence=False
)
self.last_token = chunk['end']
self.store.update_pusher_last_token(
self.app_id, self.pushkey, self.user_name, self.last_token
self.app_id, self.pushkey, self.user_id, self.last_token
)
logger.info("Pusher %s for user %s starting from token %s",
self.pushkey, self.user_name, self.last_token)
self.pushkey, self.user_id, self.last_token)
wait = 0
while self.alive:
@ -127,7 +127,7 @@ class Pusher(object):
config = PaginationConfig(from_token=from_tok, limit='1')
timeout = (300 + random.randint(-60, 60)) * 1000
chunk = yield self.evStreamHandler.get_stream(
self.user_name, config, timeout=timeout, affect_presence=False
self.user_id, config, timeout=timeout, affect_presence=False
)
# limiting to 1 may get 1 event plus 1 presence event, so
@ -144,7 +144,7 @@ class Pusher(object):
if read_receipt:
for receipt_part in read_receipt['content'].values():
if 'm.read' in receipt_part:
if self.user_name in receipt_part['m.read'].keys():
if self.user_id in receipt_part['m.read'].keys():
have_updated_badge = True
if not single_event:
@ -154,7 +154,7 @@ class Pusher(object):
yield self.store.update_pusher_last_token(
self.app_id,
self.pushkey,
self.user_name,
self.user_id,
self.last_token
)
return
@ -165,8 +165,8 @@ class Pusher(object):
processed = False
rule_evaluator = yield \
push_rule_evaluator.evaluator_for_user_name_and_profile_tag(
self.user_name, self.profile_tag, single_event['room_id'], self.store
push_rule_evaluator.evaluator_for_user_id_and_profile_tag(
self.user_id, self.profile_tag, single_event['room_id'], self.store
)
actions = yield rule_evaluator.actions_for_event(single_event)
@ -192,7 +192,7 @@ class Pusher(object):
pk
)
yield self.hs.get_pusherpool().remove_pusher(
self.app_id, pk, self.user_name
self.app_id, pk, self.user_id
)
else:
if have_updated_badge:
@ -208,7 +208,7 @@ class Pusher(object):
yield self.store.update_pusher_last_token_and_success(
self.app_id,
self.pushkey,
self.user_name,
self.user_id,
self.last_token,
self.clock.time_msec()
)
@ -217,7 +217,7 @@ class Pusher(object):
yield self.store.update_pusher_failing_since(
self.app_id,
self.pushkey,
self.user_name,
self.user_id,
self.failing_since)
else:
if not self.failing_since:
@ -225,7 +225,7 @@ class Pusher(object):
yield self.store.update_pusher_failing_since(
self.app_id,
self.pushkey,
self.user_name,
self.user_id,
self.failing_since
)
@ -237,13 +237,13 @@ class Pusher(object):
# of old notifications.
logger.warn("Giving up on a notification to user %s, "
"pushkey %s",
self.user_name, self.pushkey)
self.user_id, self.pushkey)
self.backoff_delay = Pusher.INITIAL_BACKOFF
self.last_token = chunk['end']
yield self.store.update_pusher_last_token(
self.app_id,
self.pushkey,
self.user_name,
self.user_id,
self.last_token
)
@ -251,14 +251,14 @@ class Pusher(object):
yield self.store.update_pusher_failing_since(
self.app_id,
self.pushkey,
self.user_name,
self.user_id,
self.failing_since
)
else:
logger.warn("Failed to dispatch push for user %s "
"(failing for %dms)."
"Trying again in %dms",
self.user_name,
self.user_id,
self.clock.time_msec() - self.failing_since,
self.backoff_delay)
yield synapse.util.async.sleep(self.backoff_delay / 1000.0)
@ -299,11 +299,11 @@ class Pusher(object):
membership_list = (Membership.INVITE, Membership.JOIN)
room_list = yield self.store.get_rooms_for_user_where_membership_is(
user_id=self.user_name,
user_id=self.user_id,
membership_list=membership_list
)
user_is_guest = yield self.store.is_guest(UserID.from_string(self.user_name))
user_is_guest = yield self.store.is_guest(self.user_id)
# XXX: importing inside method to break circular dependency.
# should sort out the mess by moving all this logic out of
@ -311,7 +311,7 @@ class Pusher(object):
# handler to somewhere more amenable to re-use.
from synapse.handlers.sync import SyncConfig
sync_config = SyncConfig(
user=UserID.from_string(self.user_name),
user=UserID.from_string(self.user_id),
filter=FilterCollection({}),
is_guest=user_is_guest,
)
@ -328,13 +328,13 @@ class Pusher(object):
badge += 1
else:
last_unread_event_id = sync_handler.last_read_event_id_for_room_and_user(
r.room_id, self.user_name, ephemeral_by_room
r.room_id, self.user_id, ephemeral_by_room
)
if last_unread_event_id:
notifs = yield (
self.store.get_unread_event_push_actions_by_room_for_user(
r.room_id, self.user_name, last_unread_event_id
r.room_id, self.user_id, last_unread_event_id
)
)
badge += len(notifs)

View file

@ -25,8 +25,9 @@ logger = logging.getLogger(__name__)
class ActionGenerator:
def __init__(self, store):
self.store = store
def __init__(self, hs):
self.hs = hs
self.store = hs.get_datastore()
# really we want to get all user ids and all profile tags too,
# since we want the actions for each profile tag for every user and
# also actions for a client with no profile tag for each user.
@ -42,7 +43,7 @@ class ActionGenerator:
)
bulk_evaluator = yield bulk_push_rule_evaluator.evaluator_for_room_id(
event.room_id, self.store
event.room_id, self.hs, self.store
)
actions_by_user = yield bulk_evaluator.action_for_event_by_user(event, handler)

View file

@ -15,27 +15,25 @@
from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP
def list_with_base_rules(rawrules, user_name):
def list_with_base_rules(rawrules):
ruleslist = []
# shove the server default rules for each kind onto the end of each
current_prio_class = PRIORITY_CLASS_INVERSE_MAP.keys()[-1]
ruleslist.extend(make_base_prepend_rules(
user_name, PRIORITY_CLASS_INVERSE_MAP[current_prio_class]
PRIORITY_CLASS_INVERSE_MAP[current_prio_class]
))
for r in rawrules:
if r['priority_class'] < current_prio_class:
while r['priority_class'] < current_prio_class:
ruleslist.extend(make_base_append_rules(
user_name,
PRIORITY_CLASS_INVERSE_MAP[current_prio_class]
))
current_prio_class -= 1
if current_prio_class > 0:
ruleslist.extend(make_base_prepend_rules(
user_name,
PRIORITY_CLASS_INVERSE_MAP[current_prio_class]
))
@ -43,223 +41,232 @@ def list_with_base_rules(rawrules, user_name):
while current_prio_class > 0:
ruleslist.extend(make_base_append_rules(
user_name,
PRIORITY_CLASS_INVERSE_MAP[current_prio_class]
))
current_prio_class -= 1
if current_prio_class > 0:
ruleslist.extend(make_base_prepend_rules(
user_name,
PRIORITY_CLASS_INVERSE_MAP[current_prio_class]
))
return ruleslist
def make_base_append_rules(user, kind):
def make_base_append_rules(kind):
rules = []
if kind == 'override':
rules = make_base_append_override_rules()
rules = BASE_APPEND_OVRRIDE_RULES
elif kind == 'underride':
rules = make_base_append_underride_rules(user)
rules = BASE_APPEND_UNDERRIDE_RULES
elif kind == 'content':
rules = make_base_append_content_rules(user)
for r in rules:
r['priority_class'] = PRIORITY_CLASS_MAP[kind]
r['default'] = True # Deprecated, left for backwards compat
rules = BASE_APPEND_CONTENT_RULES
return rules
def make_base_prepend_rules(user, kind):
def make_base_prepend_rules(kind):
rules = []
if kind == 'override':
rules = make_base_prepend_override_rules()
for r in rules:
r['priority_class'] = PRIORITY_CLASS_MAP[kind]
r['default'] = True # Deprecated, left for backwards compat
rules = BASE_PREPEND_OVERRIDE_RULES
return rules
def make_base_append_content_rules(user):
return [
{
'rule_id': 'global/content/.m.rule.contains_user_name',
'conditions': [
{
'kind': 'event_match',
'key': 'content.body',
'pattern': user.localpart, # Matrix ID match
}
],
'actions': [
'notify',
{
'set_tweak': 'sound',
'value': 'default',
}, {
'set_tweak': 'highlight'
}
]
},
]
BASE_APPEND_CONTENT_RULES = [
{
'rule_id': 'global/content/.m.rule.contains_user_name',
'conditions': [
{
'kind': 'event_match',
'key': 'content.body',
'pattern_type': 'user_localpart'
}
],
'actions': [
'notify',
{
'set_tweak': 'sound',
'value': 'default',
}, {
'set_tweak': 'highlight'
}
]
},
]
def make_base_prepend_override_rules():
return [
{
'rule_id': 'global/override/.m.rule.master',
'enabled': False,
'conditions': [],
'actions': [
"dont_notify"
]
}
]
BASE_PREPEND_OVERRIDE_RULES = [
{
'rule_id': 'global/override/.m.rule.master',
'enabled': False,
'conditions': [],
'actions': [
"dont_notify"
]
}
]
def make_base_append_override_rules():
return [
{
'rule_id': 'global/override/.m.rule.suppress_notices',
'conditions': [
{
'kind': 'event_match',
'key': 'content.msgtype',
'pattern': 'm.notice',
}
],
'actions': [
'dont_notify',
]
}
]
BASE_APPEND_OVRRIDE_RULES = [
{
'rule_id': 'global/override/.m.rule.suppress_notices',
'conditions': [
{
'kind': 'event_match',
'key': 'content.msgtype',
'pattern': 'm.notice',
'_id': '_suppress_notices',
}
],
'actions': [
'dont_notify',
]
}
]
def make_base_append_underride_rules(user):
return [
{
'rule_id': 'global/underride/.m.rule.call',
'conditions': [
{
'kind': 'event_match',
'key': 'type',
'pattern': 'm.call.invite',
}
],
'actions': [
'notify',
{
'set_tweak': 'sound',
'value': 'ring'
}, {
'set_tweak': 'highlight',
'value': False
}
]
},
{
'rule_id': 'global/underride/.m.rule.contains_display_name',
'conditions': [
{
'kind': 'contains_display_name'
}
],
'actions': [
'notify',
{
'set_tweak': 'sound',
'value': 'default'
}, {
'set_tweak': 'highlight'
}
]
},
{
'rule_id': 'global/underride/.m.rule.room_one_to_one',
'conditions': [
{
'kind': 'room_member_count',
'is': '2'
}
],
'actions': [
'notify',
{
'set_tweak': 'sound',
'value': 'default'
}, {
'set_tweak': 'highlight',
'value': False
}
]
},
{
'rule_id': 'global/underride/.m.rule.invite_for_me',
'conditions': [
{
'kind': 'event_match',
'key': 'type',
'pattern': 'm.room.member',
},
{
'kind': 'event_match',
'key': 'content.membership',
'pattern': 'invite',
},
{
'kind': 'event_match',
'key': 'state_key',
'pattern': user.to_string(),
},
],
'actions': [
'notify',
{
'set_tweak': 'sound',
'value': 'default'
}, {
'set_tweak': 'highlight',
'value': False
}
]
},
{
'rule_id': 'global/underride/.m.rule.member_event',
'conditions': [
{
'kind': 'event_match',
'key': 'type',
'pattern': 'm.room.member',
}
],
'actions': [
'notify', {
'set_tweak': 'highlight',
'value': False
}
]
},
{
'rule_id': 'global/underride/.m.rule.message',
'enabled': False,
'conditions': [
{
'kind': 'event_match',
'key': 'type',
'pattern': 'm.room.message',
}
],
'actions': [
'notify', {
'set_tweak': 'highlight',
'value': False
}
]
}
]
BASE_APPEND_UNDERRIDE_RULES = [
{
'rule_id': 'global/underride/.m.rule.call',
'conditions': [
{
'kind': 'event_match',
'key': 'type',
'pattern': 'm.call.invite',
'_id': '_call',
}
],
'actions': [
'notify',
{
'set_tweak': 'sound',
'value': 'ring'
}, {
'set_tweak': 'highlight',
'value': False
}
]
},
{
'rule_id': 'global/underride/.m.rule.contains_display_name',
'conditions': [
{
'kind': 'contains_display_name'
}
],
'actions': [
'notify',
{
'set_tweak': 'sound',
'value': 'default'
}, {
'set_tweak': 'highlight'
}
]
},
{
'rule_id': 'global/underride/.m.rule.room_one_to_one',
'conditions': [
{
'kind': 'room_member_count',
'is': '2',
'_id': 'member_count',
}
],
'actions': [
'notify',
{
'set_tweak': 'sound',
'value': 'default'
}, {
'set_tweak': 'highlight',
'value': False
}
]
},
{
'rule_id': 'global/underride/.m.rule.invite_for_me',
'conditions': [
{
'kind': 'event_match',
'key': 'type',
'pattern': 'm.room.member',
'_id': '_member',
},
{
'kind': 'event_match',
'key': 'content.membership',
'pattern': 'invite',
'_id': '_invite_member',
},
{
'kind': 'event_match',
'key': 'state_key',
'pattern_type': 'user_id'
},
],
'actions': [
'notify',
{
'set_tweak': 'sound',
'value': 'default'
}, {
'set_tweak': 'highlight',
'value': False
}
]
},
{
'rule_id': 'global/underride/.m.rule.member_event',
'conditions': [
{
'kind': 'event_match',
'key': 'type',
'pattern': 'm.room.member',
'_id': '_member',
}
],
'actions': [
'notify', {
'set_tweak': 'highlight',
'value': False
}
]
},
{
'rule_id': 'global/underride/.m.rule.message',
'conditions': [
{
'kind': 'event_match',
'key': 'type',
'pattern': 'm.room.message',
'_id': '_message',
}
],
'actions': [
'notify', {
'set_tweak': 'highlight',
'value': False
}
]
}
]
for r in BASE_APPEND_CONTENT_RULES:
r['priority_class'] = PRIORITY_CLASS_MAP['content']
r['default'] = True
for r in BASE_PREPEND_OVERRIDE_RULES:
r['priority_class'] = PRIORITY_CLASS_MAP['override']
r['default'] = True
for r in BASE_APPEND_OVRRIDE_RULES:
r['priority_class'] = PRIORITY_CLASS_MAP['override']
r['default'] = True
for r in BASE_APPEND_UNDERRIDE_RULES:
r['priority_class'] = PRIORITY_CLASS_MAP['underride']
r['default'] = True

View file

@ -14,16 +14,15 @@
# limitations under the License.
import logging
import simplejson as json
import ujson as json
from twisted.internet import defer
from synapse.types import UserID
import baserules
from push_rule_evaluator import PushRuleEvaluator
from push_rule_evaluator import PushRuleEvaluatorForEvent
from synapse.api.constants import EventTypes
from synapse.events.utils import serialize_event
logger = logging.getLogger(__name__)
@ -35,28 +34,30 @@ def decode_rule_json(rule):
@defer.inlineCallbacks
def evaluator_for_room_id(room_id, store):
users = yield store.get_users_in_room(room_id)
rules_by_user = yield store.bulk_get_push_rules(users)
def _get_rules(room_id, user_ids, store):
rules_by_user = yield store.bulk_get_push_rules(user_ids)
rules_by_user = {
uid: baserules.list_with_base_rules(
[decode_rule_json(rule_list) for rule_list in rules_by_user[uid]]
if uid in rules_by_user else [],
UserID.from_string(uid),
)
for uid in users
uid: baserules.list_with_base_rules([
decode_rule_json(rule_list)
for rule_list in rules_by_user.get(uid, [])
])
for uid in user_ids
}
member_events = yield store.get_current_state(
room_id=room_id,
event_type='m.room.member',
)
display_names = {}
for ev in member_events:
if ev.content.get("displayname"):
display_names[ev.state_key] = ev.content.get("displayname")
defer.returnValue(rules_by_user)
@defer.inlineCallbacks
def evaluator_for_room_id(room_id, hs, store):
results = yield store.get_receipts_for_room(room_id, "m.read")
user_ids = [
row["user_id"] for row in results
if hs.is_mine_id(row["user_id"])
]
rules_by_user = yield _get_rules(room_id, user_ids, store)
defer.returnValue(BulkPushRuleEvaluator(
room_id, rules_by_user, display_names, users, store
room_id, rules_by_user, user_ids, store
))
@ -69,10 +70,9 @@ class BulkPushRuleEvaluator:
the same logic to run the actual rules, but could be optimised further
(see https://matrix.org/jira/browse/SYN-562)
"""
def __init__(self, room_id, rules_by_user, display_names, users_in_room, store):
def __init__(self, room_id, rules_by_user, users_in_room, store):
self.room_id = room_id
self.rules_by_user = rules_by_user
self.display_names = display_names
self.users_in_room = users_in_room
self.store = store
@ -80,15 +80,30 @@ class BulkPushRuleEvaluator:
def action_for_event_by_user(self, event, handler):
actions_by_user = {}
for uid, rules in self.rules_by_user.items():
display_name = None
if uid in self.display_names:
display_name = self.display_names[uid]
users_dict = yield self.store.are_guests(self.rules_by_user.keys())
is_guest = yield self.store.is_guest(UserID.from_string(uid))
filtered = yield handler._filter_events_for_client(
uid, [event], is_guest=is_guest
)
filtered_by_user = yield handler._filter_events_for_clients(
users_dict.items(), [event]
)
evaluator = PushRuleEvaluatorForEvent(event, len(self.users_in_room))
condition_cache = {}
member_state = yield self.store.get_state_for_event(
event.event_id,
)
display_names = {}
for ev in member_state.values():
nm = ev.content.get("displayname", None)
if nm and ev.type == EventTypes.Member:
display_names[ev.state_key] = nm
for uid, rules in self.rules_by_user.items():
display_name = display_names.get(uid, None)
filtered = filtered_by_user[uid]
if len(filtered) == 0:
continue
@ -96,29 +111,32 @@ class BulkPushRuleEvaluator:
if 'enabled' in rule and not rule['enabled']:
continue
# XXX: profile tags
if BulkPushRuleEvaluator.event_matches_rule(
event, rule,
display_name, len(self.users_in_room), None
):
matches = _condition_checker(
evaluator, rule['conditions'], uid, display_name, condition_cache
)
if matches:
actions = [x for x in rule['actions'] if x != 'dont_notify']
if len(actions) > 0:
if actions:
actions_by_user[uid] = actions
break
defer.returnValue(actions_by_user)
@staticmethod
def event_matches_rule(event, rule,
display_name, room_member_count, profile_tag):
matches = True
# passing the clock all the way into here is extremely awkward and push
# rules do not care about any of the relative timestamps, so we just
# pass 0 for the current time.
client_event = serialize_event(event, 0)
def _condition_checker(evaluator, conditions, uid, display_name, cache):
for cond in conditions:
_id = cond.get("_id", None)
if _id:
res = cache.get(_id, None)
if res is False:
return False
elif res is True:
continue
for cond in rule['conditions']:
matches &= PushRuleEvaluator._event_fulfills_condition(
client_event, cond, display_name, room_member_count, profile_tag
)
return matches
res = evaluator.matches(cond, uid, display_name, None)
if _id:
cache[_id] = bool(res)
if not res:
return False
return True

View file

@ -23,13 +23,13 @@ logger = logging.getLogger(__name__)
class HttpPusher(Pusher):
def __init__(self, _hs, profile_tag, user_name, app_id,
def __init__(self, _hs, profile_tag, user_id, app_id,
app_display_name, device_display_name, pushkey, pushkey_ts,
data, last_token, last_success, failing_since):
super(HttpPusher, self).__init__(
_hs,
profile_tag,
user_name,
user_id,
app_id,
app_display_name,
device_display_name,
@ -87,7 +87,7 @@ class HttpPusher(Pusher):
}
if event['type'] == 'm.room.member':
d['notification']['membership'] = event['content']['membership']
d['notification']['user_is_target'] = event['state_key'] == self.user_name
d['notification']['user_is_target'] = event['state_key'] == self.user_id
if 'content' in event:
d['notification']['content'] = event['content']
@ -117,7 +117,7 @@ class HttpPusher(Pusher):
@defer.inlineCallbacks
def send_badge(self, badge):
logger.info("Sending updated badge count %d to %r", badge, self.user_name)
logger.info("Sending updated badge count %d to %r", badge, self.user_id)
d = {
'notification': {
'id': '',

View file

@ -15,40 +15,71 @@
from twisted.internet import defer
from synapse.types import UserID
import baserules
import logging
import simplejson as json
import re
from synapse.types import UserID
from synapse.util.caches.lrucache import LruCache
logger = logging.getLogger(__name__)
GLOB_REGEX = re.compile(r'\\\[(\\\!|)(.*)\\\]')
IS_GLOB = re.compile(r'[\?\*\[\]]')
INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
@defer.inlineCallbacks
def evaluator_for_user_name_and_profile_tag(user_name, profile_tag, room_id, store):
rawrules = yield store.get_push_rules_for_user(user_name)
enabled_map = yield store.get_push_rules_enabled_for_user(user_name)
def evaluator_for_user_id_and_profile_tag(user_id, profile_tag, room_id, store):
rawrules = yield store.get_push_rules_for_user(user_id)
enabled_map = yield store.get_push_rules_enabled_for_user(user_id)
our_member_event = yield store.get_current_state(
room_id=room_id,
event_type='m.room.member',
state_key=user_name,
state_key=user_id,
)
defer.returnValue(PushRuleEvaluator(
user_name, profile_tag, rawrules, enabled_map,
user_id, profile_tag, rawrules, enabled_map,
room_id, our_member_event, store
))
def _room_member_count(ev, condition, room_member_count):
if 'is' not in condition:
return False
m = INEQUALITY_EXPR.match(condition['is'])
if not m:
return False
ineq = m.group(1)
rhs = m.group(2)
if not rhs.isdigit():
return False
rhs = int(rhs)
if ineq == '' or ineq == '==':
return room_member_count == rhs
elif ineq == '<':
return room_member_count < rhs
elif ineq == '>':
return room_member_count > rhs
elif ineq == '>=':
return room_member_count >= rhs
elif ineq == '<=':
return room_member_count <= rhs
else:
return False
class PushRuleEvaluator:
DEFAULT_ACTIONS = []
INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
def __init__(self, user_name, profile_tag, raw_rules, enabled_map, room_id,
def __init__(self, user_id, profile_tag, raw_rules, enabled_map, room_id,
our_member_event, store):
self.user_name = user_name
self.user_id = user_id
self.profile_tag = profile_tag
self.room_id = room_id
self.our_member_event = our_member_event
@ -61,8 +92,7 @@ class PushRuleEvaluator:
rule['actions'] = json.loads(raw_rule['actions'])
rules.append(rule)
user = UserID.from_string(self.user_name)
self.rules = baserules.list_with_base_rules(rules, user)
self.rules = baserules.list_with_base_rules(rules)
self.enabled_map = enabled_map
@ -83,7 +113,7 @@ class PushRuleEvaluator:
has configured both globally and per-room when we have the ability
to do such things.
"""
if ev['user_id'] == self.user_name:
if ev['user_id'] == self.user_id:
# let's assume you probably know about messages you sent yourself
defer.returnValue([])
@ -98,39 +128,44 @@ class PushRuleEvaluator:
room_members = yield self.store.get_users_in_room(room_id)
room_member_count = len(room_members)
evaluator = PushRuleEvaluatorForEvent(ev, room_member_count)
for r in self.rules:
if r['rule_id'] in self.enabled_map:
r['enabled'] = self.enabled_map[r['rule_id']]
elif 'enabled' not in r:
r['enabled'] = True
if not r['enabled']:
enabled = self.enabled_map.get(r['rule_id'], None)
if enabled is not None and not enabled:
continue
if not r.get("enabled", True):
continue
matches = True
conditions = r['conditions']
actions = r['actions']
for c in conditions:
matches &= self._event_fulfills_condition(
ev, c, display_name=my_display_name,
room_member_count=room_member_count,
profile_tag=self.profile_tag
)
logger.debug(
"Rule %s %s",
r['rule_id'], "matches" if matches else "doesn't match"
)
# ignore rules with no actions (we have an explict 'dont_notify')
if len(actions) == 0:
logger.warn(
"Ignoring rule id %s with no actions for user %s",
r['rule_id'], self.user_name
r['rule_id'], self.user_id
)
continue
matches = True
for c in conditions:
matches = evaluator.matches(
c, self.user_id, my_display_name, self.profile_tag
)
if not matches:
break
logger.debug(
"Rule %s %s",
r['rule_id'], "matches" if matches else "doesn't match"
)
if matches:
logger.info(
logger.debug(
"%s matches for user %s, event %s",
r['rule_id'], self.user_name, ev['event_id']
r['rule_id'], self.user_id, ev['event_id']
)
# filter out dont_notify as we treat an empty actions list
@ -139,94 +174,149 @@ class PushRuleEvaluator:
defer.returnValue(actions)
logger.info(
logger.debug(
"No rules match for user %s, event %s",
self.user_name, ev['event_id']
self.user_id, ev['event_id']
)
defer.returnValue(PushRuleEvaluator.DEFAULT_ACTIONS)
@staticmethod
def _glob_to_regexp(glob):
r = re.escape(glob)
r = re.sub(r'\\\*', r'.*?', r)
r = re.sub(r'\\\?', r'.', r)
# handle [abc], [a-z] and [!a-z] style ranges.
r = re.sub(r'\\\[(\\\!|)(.*)\\\]',
lambda x: ('[%s%s]' % (x.group(1) and '^' or '',
re.sub(r'\\\-', '-', x.group(2)))), r)
return r
class PushRuleEvaluatorForEvent(object):
def __init__(self, event, room_member_count):
self._event = event
self._room_member_count = room_member_count
@staticmethod
def _event_fulfills_condition(ev, condition,
display_name, room_member_count, profile_tag):
# Maps strings of e.g. 'content.body' -> event["content"]["body"]
self._value_cache = _flatten_dict(event)
def matches(self, condition, user_id, display_name, profile_tag):
if condition['kind'] == 'event_match':
if 'pattern' not in condition:
logger.warn("event_match condition with no pattern")
return False
# XXX: optimisation: cache our pattern regexps
if condition['key'] == 'content.body':
r = r'\b%s\b' % PushRuleEvaluator._glob_to_regexp(condition['pattern'])
else:
r = r'^%s$' % PushRuleEvaluator._glob_to_regexp(condition['pattern'])
val = _value_for_dotted_key(condition['key'], ev)
if val is None:
return False
return re.search(r, val, flags=re.IGNORECASE) is not None
return self._event_match(condition, user_id)
elif condition['kind'] == 'device':
if 'profile_tag' not in condition:
return True
return condition['profile_tag'] == profile_tag
elif condition['kind'] == 'contains_display_name':
# This is special because display names can be different
# between rooms and so you can't really hard code it in a rule.
# Optimisation: we should cache these names and update them from
# the event stream.
if 'content' not in ev or 'body' not in ev['content']:
return False
if not display_name:
return False
return re.search(
r"\b%s\b" % re.escape(display_name), ev['content']['body'],
flags=re.IGNORECASE
) is not None
return self._contains_display_name(display_name)
elif condition['kind'] == 'room_member_count':
if 'is' not in condition:
return False
m = PushRuleEvaluator.INEQUALITY_EXPR.match(condition['is'])
if not m:
return False
ineq = m.group(1)
rhs = m.group(2)
if not rhs.isdigit():
return False
rhs = int(rhs)
if ineq == '' or ineq == '==':
return room_member_count == rhs
elif ineq == '<':
return room_member_count < rhs
elif ineq == '>':
return room_member_count > rhs
elif ineq == '>=':
return room_member_count >= rhs
elif ineq == '<=':
return room_member_count <= rhs
else:
return False
return _room_member_count(
self._event, condition, self._room_member_count
)
else:
return True
def _event_match(self, condition, user_id):
pattern = condition.get('pattern', None)
def _value_for_dotted_key(dotted_key, event):
parts = dotted_key.split(".")
val = event
while len(parts) > 0:
if parts[0] not in val:
return None
val = val[parts[0]]
parts = parts[1:]
return val
if not pattern:
pattern_type = condition.get('pattern_type', None)
if pattern_type == "user_id":
pattern = user_id
elif pattern_type == "user_localpart":
pattern = UserID.from_string(user_id).localpart
if not pattern:
logger.warn("event_match condition with no pattern")
return False
# XXX: optimisation: cache our pattern regexps
if condition['key'] == 'content.body':
body = self._event["content"].get("body", None)
if not body:
return False
return _glob_matches(pattern, body, word_boundary=True)
else:
haystack = self._get_value(condition['key'])
if haystack is None:
return False
return _glob_matches(pattern, haystack)
def _contains_display_name(self, display_name):
if not display_name:
return False
body = self._event["content"].get("body", None)
if not body:
return False
return _glob_matches(display_name, body, word_boundary=True)
def _get_value(self, dotted_key):
return self._value_cache.get(dotted_key, None)
def _glob_matches(glob, value, word_boundary=False):
"""Tests if value matches glob.
Args:
glob (string)
value (string): String to test against glob.
word_boundary (bool): Whether to match against word boundaries or entire
string. Defaults to False.
Returns:
bool
"""
try:
if IS_GLOB.search(glob):
r = re.escape(glob)
r = r.replace(r'\*', '.*?')
r = r.replace(r'\?', '.')
# handle [abc], [a-z] and [!a-z] style ranges.
r = GLOB_REGEX.sub(
lambda x: (
'[%s%s]' % (
x.group(1) and '^' or '',
x.group(2).replace(r'\\\-', '-')
)
),
r,
)
if word_boundary:
r = r"\b%s\b" % (r,)
r = _compile_regex(r)
return r.search(value)
else:
r = r + "$"
r = _compile_regex(r)
return r.match(value)
elif word_boundary:
r = re.escape(glob)
r = r"\b%s\b" % (r,)
r = _compile_regex(r)
return r.search(value)
else:
return value.lower() == glob.lower()
except re.error:
logger.warn("Failed to parse glob to regex: %r", glob)
return False
def _flatten_dict(d, prefix=[], result={}):
for key, value in d.items():
if isinstance(value, basestring):
result[".".join(prefix + [key])] = value.lower()
elif hasattr(value, "items"):
_flatten_dict(value, prefix=(prefix+[key]), result=result)
return result
regex_cache = LruCache(5000)
def _compile_regex(regex_str):
r = regex_cache.get(regex_str, None)
if r:
return r
r = re.compile(regex_str, flags=re.IGNORECASE)
regex_cache[regex_str] = r
return r

View file

@ -37,14 +37,14 @@ class PusherPool:
self._start_pushers(pushers)
@defer.inlineCallbacks
def add_pusher(self, user_name, access_token, profile_tag, kind, app_id,
def add_pusher(self, user_id, access_token, profile_tag, kind, app_id,
app_display_name, device_display_name, pushkey, lang, data):
# we try to create the pusher just to validate the config: it
# will then get pulled out of the database,
# recreated, added and started: this means we have only one
# code path adding pushers.
self._create_pusher({
"user_name": user_name,
"user_name": user_id,
"kind": kind,
"profile_tag": profile_tag,
"app_id": app_id,
@ -59,7 +59,7 @@ class PusherPool:
"failing_since": None
})
yield self._add_pusher_to_store(
user_name, access_token, profile_tag, kind, app_id,
user_id, access_token, profile_tag, kind, app_id,
app_display_name, device_display_name,
pushkey, lang, data
)
@ -94,11 +94,11 @@ class PusherPool:
self.remove_pusher(p['app_id'], p['pushkey'], p['user_name'])
@defer.inlineCallbacks
def _add_pusher_to_store(self, user_name, access_token, profile_tag, kind,
def _add_pusher_to_store(self, user_id, access_token, profile_tag, kind,
app_id, app_display_name, device_display_name,
pushkey, lang, data):
yield self.store.add_pusher(
user_name=user_name,
user_id=user_id,
access_token=access_token,
profile_tag=profile_tag,
kind=kind,
@ -110,14 +110,14 @@ class PusherPool:
lang=lang,
data=data,
)
self._refresh_pusher(app_id, pushkey, user_name)
self._refresh_pusher(app_id, pushkey, user_id)
def _create_pusher(self, pusherdict):
if pusherdict['kind'] == 'http':
return HttpPusher(
self.hs,
profile_tag=pusherdict['profile_tag'],
user_name=pusherdict['user_name'],
user_id=pusherdict['user_name'],
app_id=pusherdict['app_id'],
app_display_name=pusherdict['app_display_name'],
device_display_name=pusherdict['device_display_name'],
@ -135,14 +135,14 @@ class PusherPool:
)
@defer.inlineCallbacks
def _refresh_pusher(self, app_id, pushkey, user_name):
def _refresh_pusher(self, app_id, pushkey, user_id):
resultlist = yield self.store.get_pushers_by_app_id_and_pushkey(
app_id, pushkey
)
p = None
for r in resultlist:
if r['user_name'] == user_name:
if r['user_name'] == user_id:
p = r
if p:
@ -171,12 +171,12 @@ class PusherPool:
logger.info("Started pushers")
@defer.inlineCallbacks
def remove_pusher(self, app_id, pushkey, user_name):
fullid = "%s:%s:%s" % (app_id, pushkey, user_name)
def remove_pusher(self, app_id, pushkey, user_id):
fullid = "%s:%s:%s" % (app_id, pushkey, user_id)
if fullid in self.pushers:
logger.info("Stopping pusher %s", fullid)
self.pushers[fullid].stop()
del self.pushers[fullid]
yield self.store.delete_pusher_by_app_id_pushkey_user_name(
app_id, pushkey, user_name
yield self.store.delete_pusher_by_app_id_pushkey_user_id(
app_id, pushkey, user_id
)