2014-11-19 13:20:59 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-01-28 08:17:55 -05:00
|
|
|
# Copyright 2015 OpenMarket Ltd
|
2014-11-19 13:20:59 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
from synapse.streams.config import PaginationConfig
|
2015-01-29 13:38:22 -05:00
|
|
|
from synapse.types import StreamToken, UserID
|
2014-11-19 13:20:59 -05:00
|
|
|
|
|
|
|
import synapse.util.async
|
2015-01-29 13:38:22 -05:00
|
|
|
import baserules
|
2014-11-19 13:20:59 -05:00
|
|
|
|
|
|
|
import logging
|
2015-01-23 12:07:06 -05:00
|
|
|
import fnmatch
|
|
|
|
import json
|
2014-11-19 13:20:59 -05:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-12-03 08:37:02 -05:00
|
|
|
|
2014-11-19 13:20:59 -05:00
|
|
|
class Pusher(object):
|
|
|
|
INITIAL_BACKOFF = 1000
|
2014-11-21 07:21:00 -05:00
|
|
|
MAX_BACKOFF = 60 * 60 * 1000
|
|
|
|
GIVE_UP_AFTER = 24 * 60 * 60 * 1000
|
2015-01-23 12:07:06 -05:00
|
|
|
DEFAULT_ACTIONS = ['notify']
|
2014-11-19 13:20:59 -05:00
|
|
|
|
2015-01-20 06:49:48 -05:00
|
|
|
def __init__(self, _hs, instance_handle, user_name, app_id,
|
2015-01-13 14:48:37 -05:00
|
|
|
app_display_name, device_display_name, pushkey, pushkey_ts,
|
|
|
|
data, last_token, last_success, failing_since):
|
2014-11-19 13:20:59 -05:00
|
|
|
self.hs = _hs
|
|
|
|
self.evStreamHandler = self.hs.get_handlers().event_stream_handler
|
|
|
|
self.store = self.hs.get_datastore()
|
2014-11-21 07:21:00 -05:00
|
|
|
self.clock = self.hs.get_clock()
|
2015-01-23 12:07:06 -05:00
|
|
|
self.instance_handle = instance_handle
|
2014-11-19 13:20:59 -05:00
|
|
|
self.user_name = user_name
|
2014-12-03 08:37:02 -05:00
|
|
|
self.app_id = app_id
|
2014-11-19 13:20:59 -05:00
|
|
|
self.app_display_name = app_display_name
|
|
|
|
self.device_display_name = device_display_name
|
|
|
|
self.pushkey = pushkey
|
2015-01-13 14:48:37 -05:00
|
|
|
self.pushkey_ts = pushkey_ts
|
2014-11-19 13:20:59 -05:00
|
|
|
self.data = data
|
|
|
|
self.last_token = last_token
|
2014-12-03 08:37:02 -05:00
|
|
|
self.last_success = last_success # not actually used
|
2014-11-19 13:20:59 -05:00
|
|
|
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
2014-12-03 08:37:02 -05:00
|
|
|
self.failing_since = failing_since
|
2014-12-18 09:49:22 -05:00
|
|
|
self.alive = True
|
2014-11-19 13:20:59 -05:00
|
|
|
|
2015-01-28 06:55:49 -05:00
|
|
|
# The last value of last_active_time that we saw
|
|
|
|
self.last_last_active_time = 0
|
2015-01-29 11:10:01 -05:00
|
|
|
self.has_unread = True
|
2015-01-28 06:55:49 -05:00
|
|
|
|
2015-01-23 12:07:06 -05:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _actions_for_event(self, ev):
|
2015-01-13 08:14:41 -05:00
|
|
|
"""
|
|
|
|
This should take into account notification settings that the user
|
|
|
|
has configured both globally and per-room when we have the ability
|
|
|
|
to do such things.
|
|
|
|
"""
|
|
|
|
if ev['user_id'] == self.user_name:
|
|
|
|
# let's assume you probably know about messages you sent yourself
|
2015-01-23 12:07:06 -05:00
|
|
|
defer.returnValue(['dont_notify'])
|
|
|
|
|
2015-01-26 12:27:28 -05:00
|
|
|
if ev['type'] == 'm.room.member':
|
|
|
|
if ev['state_key'] != self.user_name:
|
|
|
|
defer.returnValue(['dont_notify'])
|
|
|
|
|
2015-01-23 12:07:06 -05:00
|
|
|
rules = yield self.store.get_push_rules_for_user_name(self.user_name)
|
|
|
|
|
2015-01-29 13:38:22 -05:00
|
|
|
for r in rules:
|
|
|
|
r['conditions'] = json.loads(r['conditions'])
|
|
|
|
r['actions'] = json.loads(r['actions'])
|
|
|
|
|
|
|
|
user_name_localpart = UserID.from_string(self.user_name).localpart
|
|
|
|
|
|
|
|
rules.extend(baserules.make_base_rules(user_name_localpart))
|
|
|
|
|
|
|
|
# get *our* member event for display name matching
|
|
|
|
member_events_for_room = yield self.store.get_current_state(
|
|
|
|
room_id=ev['room_id'],
|
|
|
|
event_type='m.room.member',
|
|
|
|
state_key=self.user_name
|
|
|
|
)
|
|
|
|
my_display_name = None
|
|
|
|
if len(member_events_for_room) > 0:
|
|
|
|
my_display_name = member_events_for_room[0].content['displayname']
|
|
|
|
|
2015-01-23 12:07:06 -05:00
|
|
|
for r in rules:
|
|
|
|
matches = True
|
|
|
|
|
2015-01-29 13:38:22 -05:00
|
|
|
conditions = r['conditions']
|
|
|
|
actions = r['actions']
|
2015-01-23 12:07:06 -05:00
|
|
|
|
|
|
|
for c in conditions:
|
2015-01-29 13:38:22 -05:00
|
|
|
matches &= self._event_fulfills_condition(
|
|
|
|
ev, c, display_name=my_display_name
|
|
|
|
)
|
2015-01-23 12:07:06 -05:00
|
|
|
# ignore rules with no actions (we have an explict 'dont_notify'
|
|
|
|
if len(actions) == 0:
|
|
|
|
logger.warn(
|
|
|
|
"Ignoring rule id %s with no actions for user %s" %
|
|
|
|
(r['rule_id'], r['user_name'])
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
if matches:
|
|
|
|
defer.returnValue(actions)
|
|
|
|
|
|
|
|
defer.returnValue(Pusher.DEFAULT_ACTIONS)
|
|
|
|
|
2015-01-29 13:38:22 -05:00
|
|
|
def _event_fulfills_condition(self, ev, condition, display_name):
|
2015-01-23 12:07:06 -05:00
|
|
|
if condition['kind'] == 'event_match':
|
|
|
|
if 'pattern' not in condition:
|
|
|
|
logger.warn("event_match condition with no pattern")
|
|
|
|
return False
|
|
|
|
pat = condition['pattern']
|
|
|
|
|
|
|
|
val = _value_for_dotted_key(condition['key'], ev)
|
2015-01-29 13:38:22 -05:00
|
|
|
if val is None:
|
|
|
|
return False
|
|
|
|
return fnmatch.fnmatch(val.upper(), pat.upper())
|
2015-01-23 12:07:06 -05:00
|
|
|
elif condition['kind'] == 'device':
|
|
|
|
if 'instance_handle' not in condition:
|
|
|
|
return True
|
|
|
|
return condition['instance_handle'] == self.instance_handle
|
2015-01-29 13:38:22 -05:00
|
|
|
elif condition['kind'] == 'contains_display_name':
|
|
|
|
# This is special because display names can be different
|
|
|
|
# between rooms and so you can't really hard code it in a rule.
|
|
|
|
# Optimisation: we should cache these names and update them from
|
|
|
|
# the event stream.
|
|
|
|
if 'content' not in ev or 'body' not in ev['content']:
|
|
|
|
return False
|
|
|
|
return fnmatch.fnmatch(
|
|
|
|
ev['content']['body'].upper(), "*%s*" % (display_name.upper(),)
|
|
|
|
)
|
2015-01-23 12:07:06 -05:00
|
|
|
else:
|
|
|
|
return True
|
2015-01-13 08:14:41 -05:00
|
|
|
|
2015-01-15 11:56:18 -05:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_context_for_event(self, ev):
|
|
|
|
name_aliases = yield self.store.get_room_name_and_aliases(
|
|
|
|
ev['room_id']
|
|
|
|
)
|
|
|
|
|
|
|
|
ctx = {'aliases': name_aliases[1]}
|
|
|
|
if name_aliases[0] is not None:
|
|
|
|
ctx['name'] = name_aliases[0]
|
|
|
|
|
2015-01-29 13:51:22 -05:00
|
|
|
their_member_events_for_room = yield self.store.get_current_state(
|
|
|
|
room_id=ev['room_id'],
|
|
|
|
event_type='m.room.member',
|
|
|
|
state_key=ev['user_id']
|
|
|
|
)
|
|
|
|
if len(their_member_events_for_room) > 0:
|
|
|
|
dn = their_member_events_for_room[0].content['displayname']
|
|
|
|
if dn is not None:
|
|
|
|
ctx['sender_display_name'] = dn
|
|
|
|
|
2015-01-15 11:56:18 -05:00
|
|
|
defer.returnValue(ctx)
|
|
|
|
|
2014-11-19 13:20:59 -05:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def start(self):
|
|
|
|
if not self.last_token:
|
2014-12-03 08:37:02 -05:00
|
|
|
# First-time setup: get a token to start from (we can't
|
|
|
|
# just start from no token, ie. 'now'
|
|
|
|
# because we need the result to be reproduceable in case
|
|
|
|
# we fail to dispatch the push)
|
2014-11-19 13:20:59 -05:00
|
|
|
config = PaginationConfig(from_token=None, limit='1')
|
2014-12-03 08:37:02 -05:00
|
|
|
chunk = yield self.evStreamHandler.get_stream(
|
|
|
|
self.user_name, config, timeout=0)
|
2014-11-19 13:20:59 -05:00
|
|
|
self.last_token = chunk['end']
|
2014-12-03 08:37:02 -05:00
|
|
|
self.store.update_pusher_last_token(
|
|
|
|
self.user_name, self.pushkey, self.last_token)
|
2014-11-19 13:20:59 -05:00
|
|
|
logger.info("Pusher %s for user %s starting from token %s",
|
|
|
|
self.pushkey, self.user_name, self.last_token)
|
|
|
|
|
2014-12-18 09:49:22 -05:00
|
|
|
while self.alive:
|
2014-11-19 13:20:59 -05:00
|
|
|
from_tok = StreamToken.from_string(self.last_token)
|
|
|
|
config = PaginationConfig(from_token=from_tok, limit='1')
|
2014-12-03 08:37:02 -05:00
|
|
|
chunk = yield self.evStreamHandler.get_stream(
|
2015-01-15 11:17:21 -05:00
|
|
|
self.user_name, config,
|
|
|
|
timeout=100*365*24*60*60*1000, affect_presence=False
|
|
|
|
)
|
2014-11-19 13:20:59 -05:00
|
|
|
|
2014-12-03 08:37:02 -05:00
|
|
|
# limiting to 1 may get 1 event plus 1 presence event, so
|
|
|
|
# pick out the actual event
|
|
|
|
single_event = None
|
2014-11-21 07:21:00 -05:00
|
|
|
for c in chunk['chunk']:
|
2014-12-03 08:37:02 -05:00
|
|
|
if 'event_id' in c: # Hmmm...
|
|
|
|
single_event = c
|
2014-11-21 07:21:00 -05:00
|
|
|
break
|
2014-12-03 08:37:02 -05:00
|
|
|
if not single_event:
|
2014-12-18 13:44:33 -05:00
|
|
|
self.last_token = chunk['end']
|
2014-11-21 07:21:00 -05:00
|
|
|
continue
|
|
|
|
|
2014-12-18 09:49:22 -05:00
|
|
|
if not self.alive:
|
|
|
|
continue
|
|
|
|
|
2015-01-13 08:14:41 -05:00
|
|
|
processed = False
|
2015-01-23 12:07:06 -05:00
|
|
|
actions = yield self._actions_for_event(single_event)
|
|
|
|
tweaks = _tweaks_for_actions(actions)
|
|
|
|
|
|
|
|
if len(actions) == 0:
|
|
|
|
logger.warn("Empty actions! Using default action.")
|
|
|
|
actions = Pusher.DEFAULT_ACTIONS
|
|
|
|
if 'notify' not in actions and 'dont_notify' not in actions:
|
|
|
|
logger.warn("Neither notify nor dont_notify in actions: adding default")
|
|
|
|
actions.extend(Pusher.DEFAULT_ACTIONS)
|
|
|
|
if 'dont_notify' in actions:
|
|
|
|
logger.debug(
|
|
|
|
"%s for %s: dont_notify",
|
|
|
|
single_event['event_id'], self.user_name
|
|
|
|
)
|
|
|
|
processed = True
|
|
|
|
else:
|
|
|
|
rejected = yield self.dispatch_push(single_event, tweaks)
|
2015-01-29 11:10:01 -05:00
|
|
|
self.has_unread = True
|
2015-01-26 09:37:14 -05:00
|
|
|
if isinstance(rejected, list) or isinstance(rejected, tuple):
|
2015-01-13 14:48:37 -05:00
|
|
|
processed = True
|
|
|
|
for pk in rejected:
|
|
|
|
if pk != self.pushkey:
|
|
|
|
# for sanity, we only remove the pushkey if it
|
|
|
|
# was the one we actually sent...
|
|
|
|
logger.warn(
|
2015-01-29 11:10:35 -05:00
|
|
|
("Ignoring rejected pushkey %s because we"
|
|
|
|
" didn't send it"), pk
|
2015-01-13 14:48:37 -05:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.info(
|
|
|
|
"Pushkey %s was rejected: removing",
|
|
|
|
pk
|
|
|
|
)
|
|
|
|
yield self.hs.get_pusherpool().remove_pusher(
|
|
|
|
self.app_id, pk
|
|
|
|
)
|
|
|
|
|
|
|
|
if not self.alive:
|
|
|
|
continue
|
|
|
|
|
2015-01-13 08:14:41 -05:00
|
|
|
if processed:
|
2014-11-19 13:20:59 -05:00
|
|
|
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
|
|
|
self.last_token = chunk['end']
|
2014-12-03 08:37:02 -05:00
|
|
|
self.store.update_pusher_last_token_and_success(
|
|
|
|
self.user_name,
|
|
|
|
self.pushkey,
|
|
|
|
self.last_token,
|
|
|
|
self.clock.time_msec()
|
|
|
|
)
|
2014-11-21 07:21:00 -05:00
|
|
|
if self.failing_since:
|
|
|
|
self.failing_since = None
|
2014-12-03 08:37:02 -05:00
|
|
|
self.store.update_pusher_failing_since(
|
|
|
|
self.user_name,
|
|
|
|
self.pushkey,
|
|
|
|
self.failing_since)
|
2014-11-19 13:20:59 -05:00
|
|
|
else:
|
2014-11-21 07:21:00 -05:00
|
|
|
if not self.failing_since:
|
|
|
|
self.failing_since = self.clock.time_msec()
|
2014-12-03 08:37:02 -05:00
|
|
|
self.store.update_pusher_failing_since(
|
|
|
|
self.user_name,
|
|
|
|
self.pushkey,
|
|
|
|
self.failing_since
|
|
|
|
)
|
2014-11-21 07:21:00 -05:00
|
|
|
|
2015-01-28 08:58:32 -05:00
|
|
|
if (self.failing_since and
|
|
|
|
self.failing_since <
|
|
|
|
self.clock.time_msec() - Pusher.GIVE_UP_AFTER):
|
2014-12-03 08:37:02 -05:00
|
|
|
# we really only give up so that if the URL gets
|
|
|
|
# fixed, we don't suddenly deliver a load
|
2014-11-21 07:21:00 -05:00
|
|
|
# of old notifications.
|
2014-12-03 08:37:02 -05:00
|
|
|
logger.warn("Giving up on a notification to user %s, "
|
|
|
|
"pushkey %s",
|
2015-01-29 11:10:35 -05:00
|
|
|
self.user_name, self.pushkey)
|
2014-11-21 07:21:00 -05:00
|
|
|
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
|
|
|
self.last_token = chunk['end']
|
2014-12-03 08:37:02 -05:00
|
|
|
self.store.update_pusher_last_token(
|
|
|
|
self.user_name,
|
|
|
|
self.pushkey,
|
|
|
|
self.last_token
|
|
|
|
)
|
2014-11-21 07:21:00 -05:00
|
|
|
|
|
|
|
self.failing_since = None
|
2014-12-03 08:37:02 -05:00
|
|
|
self.store.update_pusher_failing_since(
|
|
|
|
self.user_name,
|
|
|
|
self.pushkey,
|
|
|
|
self.failing_since
|
|
|
|
)
|
2014-11-21 07:21:00 -05:00
|
|
|
else:
|
2014-12-03 08:37:02 -05:00
|
|
|
logger.warn("Failed to dispatch push for user %s "
|
|
|
|
"(failing for %dms)."
|
2014-11-21 07:21:00 -05:00
|
|
|
"Trying again in %dms",
|
2014-12-03 08:37:02 -05:00
|
|
|
self.user_name,
|
|
|
|
self.clock.time_msec() - self.failing_since,
|
2015-01-29 11:10:35 -05:00
|
|
|
self.backoff_delay)
|
2014-11-21 07:21:00 -05:00
|
|
|
yield synapse.util.async.sleep(self.backoff_delay / 1000.0)
|
2014-12-03 08:37:02 -05:00
|
|
|
self.backoff_delay *= 2
|
2014-11-21 07:21:00 -05:00
|
|
|
if self.backoff_delay > Pusher.MAX_BACKOFF:
|
|
|
|
self.backoff_delay = Pusher.MAX_BACKOFF
|
2014-11-19 13:20:59 -05:00
|
|
|
|
2014-12-18 09:49:22 -05:00
|
|
|
def stop(self):
|
|
|
|
self.alive = False
|
|
|
|
|
2015-01-23 12:07:06 -05:00
|
|
|
def dispatch_push(self, p, tweaks):
|
2015-01-13 14:48:37 -05:00
|
|
|
"""
|
|
|
|
Overridden by implementing classes to actually deliver the notification
|
2015-01-28 09:10:46 -05:00
|
|
|
Args:
|
2015-01-28 09:14:49 -05:00
|
|
|
p: The event to notify for as a single event from the event stream
|
2015-01-28 09:10:46 -05:00
|
|
|
Returns: If the notification was delivered, an array containing any
|
2015-01-13 14:48:37 -05:00
|
|
|
pushkeys that were rejected by the push gateway.
|
|
|
|
False if the notification could not be delivered (ie.
|
|
|
|
should be retried).
|
|
|
|
"""
|
2014-12-03 08:37:02 -05:00
|
|
|
pass
|
|
|
|
|
2015-01-28 06:55:49 -05:00
|
|
|
def reset_badge_count(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def presence_changed(self, state):
|
|
|
|
"""
|
|
|
|
We clear badge counts whenever a user's last_active time is bumped
|
|
|
|
This is by no means perfect but I think it's the best we can do
|
|
|
|
without read receipts.
|
|
|
|
"""
|
|
|
|
if 'last_active' in state.state:
|
|
|
|
last_active = state.state['last_active']
|
|
|
|
if last_active > self.last_last_active_time:
|
|
|
|
self.last_last_active_time = last_active
|
2015-01-29 11:10:01 -05:00
|
|
|
if self.has_unread:
|
|
|
|
logger.info("Resetting badge count for %s", self.user_name)
|
|
|
|
self.reset_badge_count()
|
|
|
|
self.has_unread = False
|
|
|
|
|
2015-01-28 06:55:49 -05:00
|
|
|
|
2015-01-23 12:07:06 -05:00
|
|
|
def _value_for_dotted_key(dotted_key, event):
|
|
|
|
parts = dotted_key.split(".")
|
|
|
|
val = event
|
|
|
|
while len(parts) > 0:
|
|
|
|
if parts[0] not in val:
|
|
|
|
return None
|
|
|
|
val = val[parts[0]]
|
|
|
|
parts = parts[1:]
|
|
|
|
return val
|
|
|
|
|
2015-01-29 11:10:35 -05:00
|
|
|
|
2015-01-23 12:07:06 -05:00
|
|
|
def _tweaks_for_actions(actions):
|
|
|
|
tweaks = {}
|
|
|
|
for a in actions:
|
|
|
|
if not isinstance(a, dict):
|
|
|
|
continue
|
|
|
|
if 'set_sound' in a:
|
|
|
|
tweaks['sound'] = a['set_sound']
|
|
|
|
return tweaks
|
|
|
|
|
2015-01-29 11:10:35 -05:00
|
|
|
|
2014-11-19 13:20:59 -05:00
|
|
|
class PusherConfigException(Exception):
|
|
|
|
def __init__(self, msg):
|
2015-01-29 11:10:35 -05:00
|
|
|
super(PusherConfigException, self).__init__(msg)
|