mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-05-02 22:14:55 -04:00
Replace returnValue with return (#5736)
This commit is contained in:
parent
18a466b84e
commit
4806651744
177 changed files with 1359 additions and 1513 deletions
|
@ -71,7 +71,7 @@ class DeviceWorkerStore(SQLBaseStore):
|
|||
desc="get_devices_by_user",
|
||||
)
|
||||
|
||||
defer.returnValue({d["device_id"]: d for d in devices})
|
||||
return {d["device_id"]: d for d in devices}
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_devices_by_remote(self, destination, from_stream_id, limit):
|
||||
|
@ -88,7 +88,7 @@ class DeviceWorkerStore(SQLBaseStore):
|
|||
destination, int(from_stream_id)
|
||||
)
|
||||
if not has_changed:
|
||||
defer.returnValue((now_stream_id, []))
|
||||
return (now_stream_id, [])
|
||||
|
||||
# We retrieve n+1 devices from the list of outbound pokes where n is
|
||||
# our outbound device update limit. We then check if the very last
|
||||
|
@ -111,7 +111,7 @@ class DeviceWorkerStore(SQLBaseStore):
|
|||
|
||||
# Return an empty list if there are no updates
|
||||
if not updates:
|
||||
defer.returnValue((now_stream_id, []))
|
||||
return (now_stream_id, [])
|
||||
|
||||
# if we have exceeded the limit, we need to exclude any results with the
|
||||
# same stream_id as the last row.
|
||||
|
@ -147,13 +147,13 @@ class DeviceWorkerStore(SQLBaseStore):
|
|||
# skip that stream_id and return an empty list, and continue with the next
|
||||
# stream_id next time.
|
||||
if not query_map:
|
||||
defer.returnValue((stream_id_cutoff, []))
|
||||
return (stream_id_cutoff, [])
|
||||
|
||||
results = yield self._get_device_update_edus_by_remote(
|
||||
destination, from_stream_id, query_map
|
||||
)
|
||||
|
||||
defer.returnValue((now_stream_id, results))
|
||||
return (now_stream_id, results)
|
||||
|
||||
def _get_devices_by_remote_txn(
|
||||
self, txn, destination, from_stream_id, now_stream_id, limit
|
||||
|
@ -232,7 +232,7 @@ class DeviceWorkerStore(SQLBaseStore):
|
|||
|
||||
results.append(result)
|
||||
|
||||
defer.returnValue(results)
|
||||
return results
|
||||
|
||||
def _get_last_device_update_for_remote_user(
|
||||
self, destination, user_id, from_stream_id
|
||||
|
@ -330,7 +330,7 @@ class DeviceWorkerStore(SQLBaseStore):
|
|||
else:
|
||||
results[user_id] = yield self._get_cached_devices_for_user(user_id)
|
||||
|
||||
defer.returnValue((user_ids_not_in_cache, results))
|
||||
return (user_ids_not_in_cache, results)
|
||||
|
||||
@cachedInlineCallbacks(num_args=2, tree=True)
|
||||
def _get_cached_user_device(self, user_id, device_id):
|
||||
|
@ -340,7 +340,7 @@ class DeviceWorkerStore(SQLBaseStore):
|
|||
retcol="content",
|
||||
desc="_get_cached_user_device",
|
||||
)
|
||||
defer.returnValue(db_to_json(content))
|
||||
return db_to_json(content)
|
||||
|
||||
@cachedInlineCallbacks()
|
||||
def _get_cached_devices_for_user(self, user_id):
|
||||
|
@ -350,9 +350,9 @@ class DeviceWorkerStore(SQLBaseStore):
|
|||
retcols=("device_id", "content"),
|
||||
desc="_get_cached_devices_for_user",
|
||||
)
|
||||
defer.returnValue(
|
||||
{device["device_id"]: db_to_json(device["content"]) for device in devices}
|
||||
)
|
||||
return {
|
||||
device["device_id"]: db_to_json(device["content"]) for device in devices
|
||||
}
|
||||
|
||||
def get_devices_with_keys_by_user(self, user_id):
|
||||
"""Get all devices (with any device keys) for a user
|
||||
|
@ -482,7 +482,7 @@ class DeviceWorkerStore(SQLBaseStore):
|
|||
results = {user_id: None for user_id in user_ids}
|
||||
results.update({row["user_id"]: row["stream_id"] for row in rows})
|
||||
|
||||
defer.returnValue(results)
|
||||
return results
|
||||
|
||||
|
||||
class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore):
|
||||
|
@ -543,7 +543,7 @@ class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore):
|
|||
"""
|
||||
key = (user_id, device_id)
|
||||
if self.device_id_exists_cache.get(key, None):
|
||||
defer.returnValue(False)
|
||||
return False
|
||||
|
||||
try:
|
||||
inserted = yield self._simple_insert(
|
||||
|
@ -557,7 +557,7 @@ class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore):
|
|||
or_ignore=True,
|
||||
)
|
||||
self.device_id_exists_cache.prefill(key, True)
|
||||
defer.returnValue(inserted)
|
||||
return inserted
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"store_device with device_id=%s(%r) user_id=%s(%r)"
|
||||
|
@ -780,7 +780,7 @@ class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore):
|
|||
hosts,
|
||||
stream_id,
|
||||
)
|
||||
defer.returnValue(stream_id)
|
||||
return stream_id
|
||||
|
||||
def _add_device_change_txn(self, txn, user_id, device_ids, hosts, stream_id):
|
||||
now = self._clock.time_msec()
|
||||
|
@ -889,4 +889,4 @@ class DeviceStore(DeviceWorkerStore, BackgroundUpdateStore):
|
|||
|
||||
yield self.runWithConnection(f)
|
||||
yield self._end_background_update(DROP_DEVICE_LIST_STREAMS_NON_UNIQUE_INDEXES)
|
||||
defer.returnValue(1)
|
||||
return 1
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue