mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2024-10-01 08:25:44 -04:00
WIP to announce deleted devices over federation
Previously we queued up the poke correctly when the device was deleted, but then the actual EDU wouldn't get sent, as the device was no longer known. Instead, we now send EDUs for deleted devices too if there's a poke for them.
This commit is contained in:
parent
129ffd7b88
commit
5797f5542b
@ -274,7 +274,7 @@ class Notifier(object):
|
|||||||
logger.exception("Error notifying application services of event")
|
logger.exception("Error notifying application services of event")
|
||||||
|
|
||||||
def on_new_event(self, stream_key, new_token, users=[], rooms=[]):
|
def on_new_event(self, stream_key, new_token, users=[], rooms=[]):
|
||||||
""" Used to inform listeners that something has happend event wise.
|
""" Used to inform listeners that something has happened event wise.
|
||||||
|
|
||||||
Will wake up all listeners for the given users and rooms.
|
Will wake up all listeners for the given users and rooms.
|
||||||
"""
|
"""
|
||||||
|
@ -239,6 +239,7 @@ class DeviceStore(SQLBaseStore):
|
|||||||
def update_remote_device_list_cache_entry(self, user_id, device_id, content,
|
def update_remote_device_list_cache_entry(self, user_id, device_id, content,
|
||||||
stream_id):
|
stream_id):
|
||||||
"""Updates a single user's device in the cache.
|
"""Updates a single user's device in the cache.
|
||||||
|
If the content is null, delete the device from the cache.
|
||||||
"""
|
"""
|
||||||
return self.runInteraction(
|
return self.runInteraction(
|
||||||
"update_remote_device_list_cache_entry",
|
"update_remote_device_list_cache_entry",
|
||||||
@ -248,17 +249,32 @@ class DeviceStore(SQLBaseStore):
|
|||||||
|
|
||||||
def _update_remote_device_list_cache_entry_txn(self, txn, user_id, device_id,
|
def _update_remote_device_list_cache_entry_txn(self, txn, user_id, device_id,
|
||||||
content, stream_id):
|
content, stream_id):
|
||||||
self._simple_upsert_txn(
|
if content is None:
|
||||||
txn,
|
self._simple_delete_txn(
|
||||||
table="device_lists_remote_cache",
|
txn,
|
||||||
keyvalues={
|
table="device_lists_remote_cache",
|
||||||
"user_id": user_id,
|
keyvalues={
|
||||||
"device_id": device_id,
|
"user_id": user_id,
|
||||||
},
|
"device_id": device_id,
|
||||||
values={
|
},
|
||||||
"content": json.dumps(content),
|
)
|
||||||
}
|
|
||||||
)
|
# Do we need this?
|
||||||
|
txn.call_after(
|
||||||
|
self.device_id_exists_cache.invalidate, (user_id, device_id,)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._simple_upsert_txn(
|
||||||
|
txn,
|
||||||
|
table="device_lists_remote_cache",
|
||||||
|
keyvalues={
|
||||||
|
"user_id": user_id,
|
||||||
|
"device_id": device_id,
|
||||||
|
},
|
||||||
|
values={
|
||||||
|
"content": json.dumps(content),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
txn.call_after(self._get_cached_user_device.invalidate, (user_id, device_id,))
|
txn.call_after(self._get_cached_user_device.invalidate, (user_id, device_id,))
|
||||||
txn.call_after(self._get_cached_devices_for_user.invalidate, (user_id,))
|
txn.call_after(self._get_cached_devices_for_user.invalidate, (user_id,))
|
||||||
@ -366,7 +382,7 @@ class DeviceStore(SQLBaseStore):
|
|||||||
now_stream_id = max(stream_id for stream_id in itervalues(query_map))
|
now_stream_id = max(stream_id for stream_id in itervalues(query_map))
|
||||||
|
|
||||||
devices = self._get_e2e_device_keys_txn(
|
devices = self._get_e2e_device_keys_txn(
|
||||||
txn, query_map.keys(), include_all_devices=True
|
txn, query_map.keys(), include_all_devices=True, include_deleted_devices=True
|
||||||
)
|
)
|
||||||
|
|
||||||
prev_sent_id_sql = """
|
prev_sent_id_sql = """
|
||||||
|
@ -64,12 +64,17 @@ class EndToEndKeyStore(SQLBaseStore):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_e2e_device_keys(self, query_list, include_all_devices=False):
|
def get_e2e_device_keys(
|
||||||
|
self, query_list, include_all_devices=False,
|
||||||
|
include_deleted_devices=False
|
||||||
|
):
|
||||||
"""Fetch a list of device keys.
|
"""Fetch a list of device keys.
|
||||||
Args:
|
Args:
|
||||||
query_list(list): List of pairs of user_ids and device_ids.
|
query_list(list): List of pairs of user_ids and device_ids.
|
||||||
include_all_devices (bool): whether to include entries for devices
|
include_all_devices (bool): whether to include entries for devices
|
||||||
that don't have device keys
|
that don't have device keys
|
||||||
|
include_deleted_devices (bool): whether to include null entries for
|
||||||
|
devices which no longer exist (but where in the query_list)
|
||||||
Returns:
|
Returns:
|
||||||
Dict mapping from user-id to dict mapping from device_id to
|
Dict mapping from user-id to dict mapping from device_id to
|
||||||
dict containing "key_json", "device_display_name".
|
dict containing "key_json", "device_display_name".
|
||||||
@ -82,10 +87,19 @@ class EndToEndKeyStore(SQLBaseStore):
|
|||||||
query_list, include_all_devices,
|
query_list, include_all_devices,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if include_deleted_devices:
|
||||||
|
deleted_devices = set(query_list)
|
||||||
|
|
||||||
for user_id, device_keys in iteritems(results):
|
for user_id, device_keys in iteritems(results):
|
||||||
for device_id, device_info in iteritems(device_keys):
|
for device_id, device_info in iteritems(device_keys):
|
||||||
|
if include_deleted_devices:
|
||||||
|
deleted_devices -= (user_id, device_id)
|
||||||
device_info["keys"] = json.loads(device_info.pop("key_json"))
|
device_info["keys"] = json.loads(device_info.pop("key_json"))
|
||||||
|
|
||||||
|
if include_deleted_devices:
|
||||||
|
for user_id, device_id in deleted_devices:
|
||||||
|
results.setdefault(user_id, {})[device_id] = None
|
||||||
|
|
||||||
defer.returnValue(results)
|
defer.returnValue(results)
|
||||||
|
|
||||||
def _get_e2e_device_keys_txn(self, txn, query_list, include_all_devices):
|
def _get_e2e_device_keys_txn(self, txn, query_list, include_all_devices):
|
||||||
|
Loading…
Reference in New Issue
Block a user