mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2024-12-18 09:54:20 -05:00
Merge branch 'client_v2_filter' into client_v2_sync
This commit is contained in:
commit
8e571cbed8
@ -111,6 +111,7 @@ class NotFoundError(SynapseError):
|
|||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AuthError(SynapseError):
|
class AuthError(SynapseError):
|
||||||
"""An error raised when there was a problem authorising an event."""
|
"""An error raised when there was a problem authorising an event."""
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ class Filtering(object):
|
|||||||
defer.returnValue(self._filter_with_definition(events, definition))
|
defer.returnValue(self._filter_with_definition(events, definition))
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# return all events if definition isn't specified.
|
# return all events if definition isn't specified.
|
||||||
defer.returnValue(events)
|
defer.returnValue(events)
|
||||||
|
|
||||||
def _filter_with_definition(self, events, definition):
|
def _filter_with_definition(self, events, definition):
|
||||||
return [e for e in events if self._passes_definition(definition, e)]
|
return [e for e in events if self._passes_definition(definition, e)]
|
||||||
@ -94,14 +94,12 @@ class Filtering(object):
|
|||||||
# * For senders/rooms: Literal match only
|
# * For senders/rooms: Literal match only
|
||||||
# * "not_" checks take presedence (e.g. if "m.*" is in both 'types'
|
# * "not_" checks take presedence (e.g. if "m.*" is in both 'types'
|
||||||
# and 'not_types' then it is treated as only being in 'not_types')
|
# and 'not_types' then it is treated as only being in 'not_types')
|
||||||
|
|
||||||
# room checks
|
# room checks
|
||||||
if hasattr(event, "room_id"):
|
if hasattr(event, "room_id"):
|
||||||
room_id = event.room_id
|
room_id = event.room_id
|
||||||
allow_rooms = definition["rooms"] if "rooms" in definition else None
|
allow_rooms = definition.get("rooms", None)
|
||||||
reject_rooms = (
|
reject_rooms = definition.get("not_rooms", None)
|
||||||
definition["not_rooms"] if "not_rooms" in definition else None
|
|
||||||
)
|
|
||||||
if reject_rooms and room_id in reject_rooms:
|
if reject_rooms and room_id in reject_rooms:
|
||||||
return False
|
return False
|
||||||
if allow_rooms and room_id not in allow_rooms:
|
if allow_rooms and room_id not in allow_rooms:
|
||||||
@ -111,12 +109,8 @@ class Filtering(object):
|
|||||||
if hasattr(event, "sender"):
|
if hasattr(event, "sender"):
|
||||||
# Should we be including event.state_key for some event types?
|
# Should we be including event.state_key for some event types?
|
||||||
sender = event.sender
|
sender = event.sender
|
||||||
allow_senders = (
|
allow_senders = definition.get("senders", None)
|
||||||
definition["senders"] if "senders" in definition else None
|
reject_senders = definition.get("not_senders", None)
|
||||||
)
|
|
||||||
reject_senders = (
|
|
||||||
definition["not_senders"] if "not_senders" in definition else None
|
|
||||||
)
|
|
||||||
if reject_senders and sender in reject_senders:
|
if reject_senders and sender in reject_senders:
|
||||||
return False
|
return False
|
||||||
if allow_senders and sender not in allow_senders:
|
if allow_senders and sender not in allow_senders:
|
||||||
@ -176,7 +170,6 @@ class Filtering(object):
|
|||||||
if key in user_filter["room"]:
|
if key in user_filter["room"]:
|
||||||
self._check_definition(user_filter["room"][key])
|
self._check_definition(user_filter["room"][key])
|
||||||
|
|
||||||
|
|
||||||
def _check_definition(self, definition):
|
def _check_definition(self, definition):
|
||||||
"""Check if the provided definition is valid.
|
"""Check if the provided definition is valid.
|
||||||
|
|
||||||
|
@ -56,6 +56,7 @@ class Pusher(object):
|
|||||||
|
|
||||||
# The last value of last_active_time that we saw
|
# The last value of last_active_time that we saw
|
||||||
self.last_last_active_time = 0
|
self.last_last_active_time = 0
|
||||||
|
self.has_unread = True
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _actions_for_event(self, ev):
|
def _actions_for_event(self, ev):
|
||||||
@ -180,6 +181,7 @@ class Pusher(object):
|
|||||||
processed = True
|
processed = True
|
||||||
else:
|
else:
|
||||||
rejected = yield self.dispatch_push(single_event, tweaks)
|
rejected = yield self.dispatch_push(single_event, tweaks)
|
||||||
|
self.has_unread = True
|
||||||
if isinstance(rejected, list) or isinstance(rejected, tuple):
|
if isinstance(rejected, list) or isinstance(rejected, tuple):
|
||||||
processed = True
|
processed = True
|
||||||
for pk in rejected:
|
for pk in rejected:
|
||||||
@ -187,8 +189,8 @@ class Pusher(object):
|
|||||||
# for sanity, we only remove the pushkey if it
|
# for sanity, we only remove the pushkey if it
|
||||||
# was the one we actually sent...
|
# was the one we actually sent...
|
||||||
logger.warn(
|
logger.warn(
|
||||||
("Ignoring rejected pushkey %s because we "
|
("Ignoring rejected pushkey %s because we"
|
||||||
"didn't send it"), pk
|
" didn't send it"), pk
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.info(
|
||||||
@ -234,8 +236,7 @@ class Pusher(object):
|
|||||||
# of old notifications.
|
# of old notifications.
|
||||||
logger.warn("Giving up on a notification to user %s, "
|
logger.warn("Giving up on a notification to user %s, "
|
||||||
"pushkey %s",
|
"pushkey %s",
|
||||||
self.user_name, self.pushkey
|
self.user_name, self.pushkey)
|
||||||
)
|
|
||||||
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
||||||
self.last_token = chunk['end']
|
self.last_token = chunk['end']
|
||||||
self.store.update_pusher_last_token(
|
self.store.update_pusher_last_token(
|
||||||
@ -256,8 +257,7 @@ class Pusher(object):
|
|||||||
"Trying again in %dms",
|
"Trying again in %dms",
|
||||||
self.user_name,
|
self.user_name,
|
||||||
self.clock.time_msec() - self.failing_since,
|
self.clock.time_msec() - self.failing_since,
|
||||||
self.backoff_delay
|
self.backoff_delay)
|
||||||
)
|
|
||||||
yield synapse.util.async.sleep(self.backoff_delay / 1000.0)
|
yield synapse.util.async.sleep(self.backoff_delay / 1000.0)
|
||||||
self.backoff_delay *= 2
|
self.backoff_delay *= 2
|
||||||
if self.backoff_delay > Pusher.MAX_BACKOFF:
|
if self.backoff_delay > Pusher.MAX_BACKOFF:
|
||||||
@ -290,9 +290,11 @@ class Pusher(object):
|
|||||||
if 'last_active' in state.state:
|
if 'last_active' in state.state:
|
||||||
last_active = state.state['last_active']
|
last_active = state.state['last_active']
|
||||||
if last_active > self.last_last_active_time:
|
if last_active > self.last_last_active_time:
|
||||||
logger.info("Resetting badge count for %s", self.user_name)
|
|
||||||
self.reset_badge_count()
|
|
||||||
self.last_last_active_time = last_active
|
self.last_last_active_time = last_active
|
||||||
|
if self.has_unread:
|
||||||
|
logger.info("Resetting badge count for %s", self.user_name)
|
||||||
|
self.reset_badge_count()
|
||||||
|
self.has_unread = False
|
||||||
|
|
||||||
|
|
||||||
def _value_for_dotted_key(dotted_key, event):
|
def _value_for_dotted_key(dotted_key, event):
|
||||||
@ -305,6 +307,7 @@ def _value_for_dotted_key(dotted_key, event):
|
|||||||
parts = parts[1:]
|
parts = parts[1:]
|
||||||
return val
|
return val
|
||||||
|
|
||||||
|
|
||||||
def _tweaks_for_actions(actions):
|
def _tweaks_for_actions(actions):
|
||||||
tweaks = {}
|
tweaks = {}
|
||||||
for a in actions:
|
for a in actions:
|
||||||
@ -314,6 +317,7 @@ def _tweaks_for_actions(actions):
|
|||||||
tweaks['sound'] = a['set_sound']
|
tweaks['sound'] = a['set_sound']
|
||||||
return tweaks
|
return tweaks
|
||||||
|
|
||||||
|
|
||||||
class PusherConfigException(Exception):
|
class PusherConfigException(Exception):
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
super(PusherConfigException, self).__init__(msg)
|
super(PusherConfigException, self).__init__(msg)
|
||||||
|
@ -71,11 +71,11 @@ class HttpPusher(Pusher):
|
|||||||
# we may have to fetch this over federation and we
|
# we may have to fetch this over federation and we
|
||||||
# can't trust it anyway: is it worth it?
|
# can't trust it anyway: is it worth it?
|
||||||
#'from_display_name': 'Steve Stevington'
|
#'from_display_name': 'Steve Stevington'
|
||||||
'counts': { #-- we don't mark messages as read yet so
|
'counts': { # -- we don't mark messages as read yet so
|
||||||
# we have no way of knowing
|
# we have no way of knowing
|
||||||
# Just set the badge to 1 until we have read receipts
|
# Just set the badge to 1 until we have read receipts
|
||||||
'unread': 1,
|
'unread': 1,
|
||||||
# 'missed_calls': 2
|
# 'missed_calls': 2
|
||||||
},
|
},
|
||||||
'devices': [
|
'devices': [
|
||||||
{
|
{
|
||||||
@ -142,4 +142,4 @@ class HttpPusher(Pusher):
|
|||||||
rejected = []
|
rejected = []
|
||||||
if 'rejected' in resp:
|
if 'rejected' in resp:
|
||||||
rejected = resp['rejected']
|
rejected = resp['rejected']
|
||||||
defer.returnValue(rejected)
|
defer.returnValue(rejected)
|
||||||
|
@ -149,4 +149,4 @@ class PusherPool:
|
|||||||
logger.info("Stopping pusher %s", fullid)
|
logger.info("Stopping pusher %s", fullid)
|
||||||
self.pushers[fullid].stop()
|
self.pushers[fullid].stop()
|
||||||
del self.pushers[fullid]
|
del self.pushers[fullid]
|
||||||
yield self.store.delete_pusher_by_app_id_pushkey(app_id, pushkey)
|
yield self.store.delete_pusher_by_app_id_pushkey(app_id, pushkey)
|
||||||
|
@ -11,4 +11,4 @@
|
|||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
@ -30,9 +30,9 @@ class PushRuleRestServlet(ClientV1RestServlet):
|
|||||||
'sender': 1,
|
'sender': 1,
|
||||||
'room': 2,
|
'room': 2,
|
||||||
'content': 3,
|
'content': 3,
|
||||||
'override': 4
|
'override': 4,
|
||||||
}
|
}
|
||||||
PRIORITY_CLASS_INVERSE_MAP = {v: k for k,v in PRIORITY_CLASS_MAP.items()}
|
PRIORITY_CLASS_INVERSE_MAP = {v: k for k, v in PRIORITY_CLASS_MAP.items()}
|
||||||
SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR = (
|
SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR = (
|
||||||
"Unrecognised request: You probably wanted a trailing slash")
|
"Unrecognised request: You probably wanted a trailing slash")
|
||||||
|
|
||||||
@ -260,7 +260,9 @@ class PushRuleRestServlet(ClientV1RestServlet):
|
|||||||
|
|
||||||
if path == []:
|
if path == []:
|
||||||
# we're a reference impl: pedantry is our job.
|
# we're a reference impl: pedantry is our job.
|
||||||
raise UnrecognizedRequestError(PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR)
|
raise UnrecognizedRequestError(
|
||||||
|
PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
|
||||||
|
)
|
||||||
|
|
||||||
if path[0] == '':
|
if path[0] == '':
|
||||||
defer.returnValue((200, rules))
|
defer.returnValue((200, rules))
|
||||||
@ -271,7 +273,9 @@ class PushRuleRestServlet(ClientV1RestServlet):
|
|||||||
elif path[0] == 'device':
|
elif path[0] == 'device':
|
||||||
path = path[1:]
|
path = path[1:]
|
||||||
if path == []:
|
if path == []:
|
||||||
raise UnrecognizedRequestError(PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR)
|
raise UnrecognizedRequestError(
|
||||||
|
PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
|
||||||
|
)
|
||||||
if path[0] == '':
|
if path[0] == '':
|
||||||
defer.returnValue((200, rules['device']))
|
defer.returnValue((200, rules['device']))
|
||||||
|
|
||||||
@ -290,11 +294,13 @@ class PushRuleRestServlet(ClientV1RestServlet):
|
|||||||
def on_OPTIONS(self, _):
|
def on_OPTIONS(self, _):
|
||||||
return 200, {}
|
return 200, {}
|
||||||
|
|
||||||
|
|
||||||
def _add_empty_priority_class_arrays(d):
|
def _add_empty_priority_class_arrays(d):
|
||||||
for pc in PushRuleRestServlet.PRIORITY_CLASS_MAP.keys():
|
for pc in PushRuleRestServlet.PRIORITY_CLASS_MAP.keys():
|
||||||
d[pc] = []
|
d[pc] = []
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
def _instance_handle_from_conditions(conditions):
|
def _instance_handle_from_conditions(conditions):
|
||||||
"""
|
"""
|
||||||
Given a list of conditions, return the instance handle of the
|
Given a list of conditions, return the instance handle of the
|
||||||
@ -305,9 +311,12 @@ def _instance_handle_from_conditions(conditions):
|
|||||||
return c['instance_handle']
|
return c['instance_handle']
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _filter_ruleset_with_path(ruleset, path):
|
def _filter_ruleset_with_path(ruleset, path):
|
||||||
if path == []:
|
if path == []:
|
||||||
raise UnrecognizedRequestError(PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR)
|
raise UnrecognizedRequestError(
|
||||||
|
PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
|
||||||
|
)
|
||||||
|
|
||||||
if path[0] == '':
|
if path[0] == '':
|
||||||
return ruleset
|
return ruleset
|
||||||
@ -316,7 +325,9 @@ def _filter_ruleset_with_path(ruleset, path):
|
|||||||
raise UnrecognizedRequestError()
|
raise UnrecognizedRequestError()
|
||||||
path = path[1:]
|
path = path[1:]
|
||||||
if path == []:
|
if path == []:
|
||||||
raise UnrecognizedRequestError(PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR)
|
raise UnrecognizedRequestError(
|
||||||
|
PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
|
||||||
|
)
|
||||||
if path[0] == '':
|
if path[0] == '':
|
||||||
return ruleset[template_kind]
|
return ruleset[template_kind]
|
||||||
rule_id = path[0]
|
rule_id = path[0]
|
||||||
@ -325,6 +336,7 @@ def _filter_ruleset_with_path(ruleset, path):
|
|||||||
return r
|
return r
|
||||||
raise NotFoundError
|
raise NotFoundError
|
||||||
|
|
||||||
|
|
||||||
def _priority_class_from_spec(spec):
|
def _priority_class_from_spec(spec):
|
||||||
if spec['template'] not in PushRuleRestServlet.PRIORITY_CLASS_MAP.keys():
|
if spec['template'] not in PushRuleRestServlet.PRIORITY_CLASS_MAP.keys():
|
||||||
raise InvalidRuleException("Unknown template: %s" % (spec['kind']))
|
raise InvalidRuleException("Unknown template: %s" % (spec['kind']))
|
||||||
@ -335,6 +347,7 @@ def _priority_class_from_spec(spec):
|
|||||||
|
|
||||||
return pc
|
return pc
|
||||||
|
|
||||||
|
|
||||||
def _priority_class_to_template_name(pc):
|
def _priority_class_to_template_name(pc):
|
||||||
if pc > PushRuleRestServlet.PRIORITY_CLASS_MAP['override']:
|
if pc > PushRuleRestServlet.PRIORITY_CLASS_MAP['override']:
|
||||||
# per-device
|
# per-device
|
||||||
@ -343,6 +356,7 @@ def _priority_class_to_template_name(pc):
|
|||||||
else:
|
else:
|
||||||
return PushRuleRestServlet.PRIORITY_CLASS_INVERSE_MAP[pc]
|
return PushRuleRestServlet.PRIORITY_CLASS_INVERSE_MAP[pc]
|
||||||
|
|
||||||
|
|
||||||
def _rule_to_template(rule):
|
def _rule_to_template(rule):
|
||||||
template_name = _priority_class_to_template_name(rule['priority_class'])
|
template_name = _priority_class_to_template_name(rule['priority_class'])
|
||||||
if template_name in ['override', 'underride']:
|
if template_name in ['override', 'underride']:
|
||||||
@ -359,8 +373,9 @@ def _rule_to_template(rule):
|
|||||||
ret["pattern"] = thecond["pattern"]
|
ret["pattern"] = thecond["pattern"]
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def _strip_device_condition(rule):
|
def _strip_device_condition(rule):
|
||||||
for i,c in enumerate(rule['conditions']):
|
for i, c in enumerate(rule['conditions']):
|
||||||
if c['kind'] == 'device':
|
if c['kind'] == 'device':
|
||||||
del rule['conditions'][i]
|
del rule['conditions'][i]
|
||||||
return rule
|
return rule
|
||||||
|
@ -117,7 +117,7 @@ class PushRuleStore(SQLBaseStore):
|
|||||||
new_rule['priority'] = new_rule_priority
|
new_rule['priority'] = new_rule_priority
|
||||||
|
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT COUNT(*) FROM "+PushRuleTable.table_name+
|
"SELECT COUNT(*) FROM " + PushRuleTable.table_name +
|
||||||
" WHERE user_name = ? AND priority_class = ? AND priority = ?"
|
" WHERE user_name = ? AND priority_class = ? AND priority = ?"
|
||||||
)
|
)
|
||||||
txn.execute(sql, (user_name, priority_class, new_rule_priority))
|
txn.execute(sql, (user_name, priority_class, new_rule_priority))
|
||||||
@ -146,10 +146,11 @@ class PushRuleStore(SQLBaseStore):
|
|||||||
|
|
||||||
txn.execute(sql, new_rule.values())
|
txn.execute(sql, new_rule.values())
|
||||||
|
|
||||||
def _add_push_rule_highest_priority_txn(self, txn, user_name, priority_class, **kwargs):
|
def _add_push_rule_highest_priority_txn(self, txn, user_name,
|
||||||
|
priority_class, **kwargs):
|
||||||
# find the highest priority rule in that class
|
# find the highest priority rule in that class
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT COUNT(*), MAX(priority) FROM "+PushRuleTable.table_name+
|
"SELECT COUNT(*), MAX(priority) FROM " + PushRuleTable.table_name +
|
||||||
" WHERE user_name = ? and priority_class = ?"
|
" WHERE user_name = ? and priority_class = ?"
|
||||||
)
|
)
|
||||||
txn.execute(sql, (user_name, priority_class))
|
txn.execute(sql, (user_name, priority_class))
|
||||||
@ -209,4 +210,4 @@ class PushRuleTable(Table):
|
|||||||
"actions",
|
"actions",
|
||||||
]
|
]
|
||||||
|
|
||||||
EntryType = collections.namedtuple("PushRuleEntry", fields)
|
EntryType = collections.namedtuple("PushRuleEntry", fields)
|
||||||
|
@ -170,4 +170,4 @@ class PushersTable(Table):
|
|||||||
"failing_since"
|
"failing_since"
|
||||||
]
|
]
|
||||||
|
|
||||||
EntryType = collections.namedtuple("PusherEntry", fields)
|
EntryType = collections.namedtuple("PusherEntry", fields)
|
||||||
|
Loading…
Reference in New Issue
Block a user