mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-03-12 07:26:27 -04:00
Remove unnecessary parentheses around tuples returned from methods (#10889)
This commit is contained in:
parent
26f2bfedbf
commit
aa2c027792
1
changelog.d/10889.misc
Normal file
1
changelog.d/10889.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Clean up some unnecessary parentheses in places around the codebase.
|
@ -1447,7 +1447,7 @@ def read_gc_thresholds(thresholds):
|
|||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
assert len(thresholds) == 3
|
assert len(thresholds) == 3
|
||||||
return (int(thresholds[0]), int(thresholds[1]), int(thresholds[2]))
|
return int(thresholds[0]), int(thresholds[1]), int(thresholds[2])
|
||||||
except Exception:
|
except Exception:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"Value of `gc_threshold` must be a list of three integers if set"
|
"Value of `gc_threshold` must be a list of three integers if set"
|
||||||
|
@ -560,7 +560,7 @@ class PerDestinationQueue:
|
|||||||
|
|
||||||
assert len(edus) <= limit, "get_device_updates_by_remote returned too many EDUs"
|
assert len(edus) <= limit, "get_device_updates_by_remote returned too many EDUs"
|
||||||
|
|
||||||
return (edus, now_stream_id)
|
return edus, now_stream_id
|
||||||
|
|
||||||
async def _get_to_device_message_edus(self, limit: int) -> Tuple[List[Edu], int]:
|
async def _get_to_device_message_edus(self, limit: int) -> Tuple[List[Edu], int]:
|
||||||
last_device_stream_id = self._last_device_stream_id
|
last_device_stream_id = self._last_device_stream_id
|
||||||
@ -593,7 +593,7 @@ class PerDestinationQueue:
|
|||||||
stream_id,
|
stream_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
return (edus, stream_id)
|
return edus, stream_id
|
||||||
|
|
||||||
def _start_catching_up(self) -> None:
|
def _start_catching_up(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -1369,7 +1369,7 @@ class FederationHandler(BaseHandler):
|
|||||||
builder=builder
|
builder=builder
|
||||||
)
|
)
|
||||||
EventValidator().validate_new(event, self.config)
|
EventValidator().validate_new(event, self.config)
|
||||||
return (event, context)
|
return event, context
|
||||||
|
|
||||||
async def _check_signature(self, event: EventBase, context: EventContext) -> None:
|
async def _check_signature(self, event: EventBase, context: EventContext) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -666,7 +666,7 @@ class EventCreationHandler:
|
|||||||
|
|
||||||
self.validator.validate_new(event, self.config)
|
self.validator.validate_new(event, self.config)
|
||||||
|
|
||||||
return (event, context)
|
return event, context
|
||||||
|
|
||||||
async def _is_exempt_from_privacy_policy(
|
async def _is_exempt_from_privacy_policy(
|
||||||
self, builder: EventBuilder, requester: Requester
|
self, builder: EventBuilder, requester: Requester
|
||||||
@ -1004,7 +1004,7 @@ class EventCreationHandler:
|
|||||||
|
|
||||||
logger.debug("Created event %s", event.event_id)
|
logger.debug("Created event %s", event.event_id)
|
||||||
|
|
||||||
return (event, context)
|
return event, context
|
||||||
|
|
||||||
@measure_func("handle_new_client_event")
|
@measure_func("handle_new_client_event")
|
||||||
async def handle_new_client_event(
|
async def handle_new_client_event(
|
||||||
|
@ -238,7 +238,7 @@ class ReceiptEventSource(EventSource[int, JsonDict]):
|
|||||||
if self.config.experimental.msc2285_enabled:
|
if self.config.experimental.msc2285_enabled:
|
||||||
events = ReceiptEventSource.filter_out_hidden(events, user.to_string())
|
events = ReceiptEventSource.filter_out_hidden(events, user.to_string())
|
||||||
|
|
||||||
return (events, to_key)
|
return events, to_key
|
||||||
|
|
||||||
async def get_new_events_as(
|
async def get_new_events_as(
|
||||||
self, from_key: int, service: ApplicationService
|
self, from_key: int, service: ApplicationService
|
||||||
@ -270,7 +270,7 @@ class ReceiptEventSource(EventSource[int, JsonDict]):
|
|||||||
|
|
||||||
events.append(event)
|
events.append(event)
|
||||||
|
|
||||||
return (events, to_key)
|
return events, to_key
|
||||||
|
|
||||||
def get_current_key(self, direction: str = "f") -> int:
|
def get_current_key(self, direction: str = "f") -> int:
|
||||||
return self.store.get_max_receipt_stream_id()
|
return self.store.get_max_receipt_stream_id()
|
||||||
|
@ -1235,7 +1235,7 @@ class RoomEventSource(EventSource[RoomStreamToken, EventBase]):
|
|||||||
else:
|
else:
|
||||||
end_key = to_key
|
end_key = to_key
|
||||||
|
|
||||||
return (events, end_key)
|
return events, end_key
|
||||||
|
|
||||||
def get_current_key(self) -> RoomStreamToken:
|
def get_current_key(self) -> RoomStreamToken:
|
||||||
return self.store.get_room_max_token()
|
return self.store.get_room_max_token()
|
||||||
|
@ -1179,4 +1179,4 @@ def _child_events_comparison_key(
|
|||||||
order = None
|
order = None
|
||||||
|
|
||||||
# Items without an order come last.
|
# Items without an order come last.
|
||||||
return (order is None, order, child.origin_server_ts, child.room_id)
|
return order is None, order, child.origin_server_ts, child.room_id
|
||||||
|
@ -483,7 +483,7 @@ class TypingNotificationEventSource(EventSource[int, JsonDict]):
|
|||||||
|
|
||||||
events.append(self._make_event_for(room_id))
|
events.append(self._make_event_for(room_id))
|
||||||
|
|
||||||
return (events, handler._latest_room_serial)
|
return events, handler._latest_room_serial
|
||||||
|
|
||||||
async def get_new_events(
|
async def get_new_events(
|
||||||
self,
|
self,
|
||||||
@ -507,7 +507,7 @@ class TypingNotificationEventSource(EventSource[int, JsonDict]):
|
|||||||
|
|
||||||
events.append(self._make_event_for(room_id))
|
events.append(self._make_event_for(room_id))
|
||||||
|
|
||||||
return (events, handler._latest_room_serial)
|
return events, handler._latest_room_serial
|
||||||
|
|
||||||
def get_current_key(self) -> int:
|
def get_current_key(self) -> int:
|
||||||
return self.get_typing_handler()._latest_room_serial
|
return self.get_typing_handler()._latest_room_serial
|
||||||
|
@ -1186,7 +1186,7 @@ class MatrixFederationHttpClient:
|
|||||||
request.method,
|
request.method,
|
||||||
request.uri.decode("ascii"),
|
request.uri.decode("ascii"),
|
||||||
)
|
)
|
||||||
return (length, headers)
|
return length, headers
|
||||||
|
|
||||||
|
|
||||||
def _flatten_response_never_received(e):
|
def _flatten_response_never_received(e):
|
||||||
|
@ -213,7 +213,7 @@ class RoomRestServlet(RestServlet):
|
|||||||
members = await self.store.get_users_in_room(room_id)
|
members = await self.store.get_users_in_room(room_id)
|
||||||
ret["joined_local_devices"] = await self.store.count_devices_by_users(members)
|
ret["joined_local_devices"] = await self.store.count_devices_by_users(members)
|
||||||
|
|
||||||
return (200, ret)
|
return 200, ret
|
||||||
|
|
||||||
async def on_DELETE(
|
async def on_DELETE(
|
||||||
self, request: SynapseRequest, room_id: str
|
self, request: SynapseRequest, room_id: str
|
||||||
@ -668,4 +668,4 @@ async def _delete_room(
|
|||||||
if purge:
|
if purge:
|
||||||
await pagination_handler.purge_room(room_id, force=force_purge)
|
await pagination_handler.purge_room(room_id, force=force_purge)
|
||||||
|
|
||||||
return (200, ret)
|
return 200, ret
|
||||||
|
@ -211,7 +211,7 @@ class DehydratedDeviceServlet(RestServlet):
|
|||||||
if dehydrated_device is not None:
|
if dehydrated_device is not None:
|
||||||
(device_id, device_data) = dehydrated_device
|
(device_id, device_data) = dehydrated_device
|
||||||
result = {"device_id": device_id, "device_data": device_data}
|
result = {"device_id": device_id, "device_data": device_data}
|
||||||
return (200, result)
|
return 200, result
|
||||||
else:
|
else:
|
||||||
raise errors.NotFoundError("No dehydrated device available")
|
raise errors.NotFoundError("No dehydrated device available")
|
||||||
|
|
||||||
@ -293,7 +293,7 @@ class ClaimDehydratedDeviceServlet(RestServlet):
|
|||||||
submission["device_id"],
|
submission["device_id"],
|
||||||
)
|
)
|
||||||
|
|
||||||
return (200, result)
|
return 200, result
|
||||||
|
|
||||||
|
|
||||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||||
|
@ -40,7 +40,7 @@ class PasswordPolicyServlet(RestServlet):
|
|||||||
|
|
||||||
def on_GET(self, request: Request) -> Tuple[int, JsonDict]:
|
def on_GET(self, request: Request) -> Tuple[int, JsonDict]:
|
||||||
if not self.enabled or not self.policy:
|
if not self.enabled or not self.policy:
|
||||||
return (200, {})
|
return 200, {}
|
||||||
|
|
||||||
policy = {}
|
policy = {}
|
||||||
|
|
||||||
@ -54,7 +54,7 @@ class PasswordPolicyServlet(RestServlet):
|
|||||||
if param in self.policy:
|
if param in self.policy:
|
||||||
policy["m.%s" % param] = self.policy[param]
|
policy["m.%s" % param] = self.policy[param]
|
||||||
|
|
||||||
return (200, policy)
|
return 200, policy
|
||||||
|
|
||||||
|
|
||||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||||
|
@ -324,7 +324,7 @@ class AccountDataWorkerStore(SQLBaseStore):
|
|||||||
user_id, int(stream_id)
|
user_id, int(stream_id)
|
||||||
)
|
)
|
||||||
if not changed:
|
if not changed:
|
||||||
return ({}, {})
|
return {}, {}
|
||||||
|
|
||||||
return await self.db_pool.runInteraction(
|
return await self.db_pool.runInteraction(
|
||||||
"get_updated_account_data_for_user", get_updated_account_data_for_user_txn
|
"get_updated_account_data_for_user", get_updated_account_data_for_user_txn
|
||||||
|
@ -136,7 +136,7 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
|||||||
user_id, last_stream_id
|
user_id, last_stream_id
|
||||||
)
|
)
|
||||||
if not has_changed:
|
if not has_changed:
|
||||||
return ([], current_stream_id)
|
return [], current_stream_id
|
||||||
|
|
||||||
def get_new_messages_for_device_txn(txn):
|
def get_new_messages_for_device_txn(txn):
|
||||||
sql = (
|
sql = (
|
||||||
@ -240,11 +240,11 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
|||||||
)
|
)
|
||||||
if not has_changed or last_stream_id == current_stream_id:
|
if not has_changed or last_stream_id == current_stream_id:
|
||||||
log_kv({"message": "No new messages in stream"})
|
log_kv({"message": "No new messages in stream"})
|
||||||
return ([], current_stream_id)
|
return [], current_stream_id
|
||||||
|
|
||||||
if limit <= 0:
|
if limit <= 0:
|
||||||
# This can happen if we run out of room for EDUs in the transaction.
|
# This can happen if we run out of room for EDUs in the transaction.
|
||||||
return ([], last_stream_id)
|
return [], last_stream_id
|
||||||
|
|
||||||
@trace
|
@trace
|
||||||
def get_new_messages_for_remote_destination_txn(txn):
|
def get_new_messages_for_remote_destination_txn(txn):
|
||||||
|
@ -1495,7 +1495,7 @@ class EventsWorkerStore(SQLBaseStore):
|
|||||||
if not res:
|
if not res:
|
||||||
raise SynapseError(404, "Could not find event %s" % (event_id,))
|
raise SynapseError(404, "Could not find event %s" % (event_id,))
|
||||||
|
|
||||||
return (int(res["topological_ordering"]), int(res["stream_ordering"]))
|
return int(res["topological_ordering"]), int(res["stream_ordering"])
|
||||||
|
|
||||||
async def get_next_event_to_expire(self) -> Optional[Tuple[str, int]]:
|
async def get_next_event_to_expire(self) -> Optional[Tuple[str, int]]:
|
||||||
"""Retrieve the entry with the lowest expiry timestamp in the event_expiry
|
"""Retrieve the entry with the lowest expiry timestamp in the event_expiry
|
||||||
|
@ -58,7 +58,7 @@ class StateDeltasStore(SQLBaseStore):
|
|||||||
# if the CSDs haven't changed between prev_stream_id and now, we
|
# if the CSDs haven't changed between prev_stream_id and now, we
|
||||||
# know for certain that they haven't changed between prev_stream_id and
|
# know for certain that they haven't changed between prev_stream_id and
|
||||||
# max_stream_id.
|
# max_stream_id.
|
||||||
return (max_stream_id, [])
|
return max_stream_id, []
|
||||||
|
|
||||||
def get_current_state_deltas_txn(txn):
|
def get_current_state_deltas_txn(txn):
|
||||||
# First we calculate the max stream id that will give us less than
|
# First we calculate the max stream id that will give us less than
|
||||||
|
@ -624,7 +624,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore, metaclass=abc.ABCMeta):
|
|||||||
|
|
||||||
self._set_before_and_after(events, rows)
|
self._set_before_and_after(events, rows)
|
||||||
|
|
||||||
return (events, token)
|
return events, token
|
||||||
|
|
||||||
async def get_recent_event_ids_for_room(
|
async def get_recent_event_ids_for_room(
|
||||||
self, room_id: str, limit: int, end_token: RoomStreamToken
|
self, room_id: str, limit: int, end_token: RoomStreamToken
|
||||||
@ -1242,7 +1242,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore, metaclass=abc.ABCMeta):
|
|||||||
|
|
||||||
self._set_before_and_after(events, rows)
|
self._set_before_and_after(events, rows)
|
||||||
|
|
||||||
return (events, token)
|
return events, token
|
||||||
|
|
||||||
@cached()
|
@cached()
|
||||||
async def get_id_for_instance(self, instance_name: str) -> int:
|
async def get_id_for_instance(self, instance_name: str) -> int:
|
||||||
|
@ -81,7 +81,7 @@ class PaginationConfig:
|
|||||||
raise SynapseError(400, "Invalid request.")
|
raise SynapseError(400, "Invalid request.")
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return ("PaginationConfig(from_tok=%r, to_tok=%r, direction=%r, limit=%r)") % (
|
return "PaginationConfig(from_tok=%r, to_tok=%r, direction=%r, limit=%r)" % (
|
||||||
self.from_token,
|
self.from_token,
|
||||||
self.to_token,
|
self.to_token,
|
||||||
self.direction,
|
self.direction,
|
||||||
|
@ -556,7 +556,7 @@ class RoomStreamToken:
|
|||||||
"Cannot call `RoomStreamToken.as_historical_tuple` on live token"
|
"Cannot call `RoomStreamToken.as_historical_tuple` on live token"
|
||||||
)
|
)
|
||||||
|
|
||||||
return (self.topological, self.stream)
|
return self.topological, self.stream
|
||||||
|
|
||||||
def get_stream_pos_for_instance(self, instance_name: str) -> int:
|
def get_stream_pos_for_instance(self, instance_name: str) -> int:
|
||||||
"""Get the stream position that the given writer was at at this token.
|
"""Get the stream position that the given writer was at at this token.
|
||||||
@ -766,7 +766,7 @@ def get_verify_key_from_cross_signing_key(key_info):
|
|||||||
raise ValueError("Invalid key")
|
raise ValueError("Invalid key")
|
||||||
# and return that one key
|
# and return that one key
|
||||||
for key_id, key_data in keys.items():
|
for key_id, key_data in keys.items():
|
||||||
return (key_id, decode_verify_key_bytes(key_id, decode_base64(key_data)))
|
return key_id, decode_verify_key_bytes(key_id, decode_base64(key_data))
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||||
|
@ -106,7 +106,7 @@ class StateGroupStore:
|
|||||||
}
|
}
|
||||||
|
|
||||||
async def get_state_group_delta(self, name):
|
async def get_state_group_delta(self, name):
|
||||||
return (None, None)
|
return None, None
|
||||||
|
|
||||||
def register_events(self, events):
|
def register_events(self, events):
|
||||||
for e in events:
|
for e in events:
|
||||||
|
@ -434,7 +434,7 @@ class MockHttpResource:
|
|||||||
)
|
)
|
||||||
return code, response
|
return code, response
|
||||||
except CodeMessageException as e:
|
except CodeMessageException as e:
|
||||||
return (e.code, cs_error(e.msg, code=e.errcode))
|
return e.code, cs_error(e.msg, code=e.errcode)
|
||||||
|
|
||||||
raise KeyError("No event can handle %s" % path)
|
raise KeyError("No event can handle %s" % path)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user