mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-11-11 17:46:38 -05:00
Enable mypy checking for unreachable code and fix instances. (#8432)
This commit is contained in:
parent
c1ef579b63
commit
4ff0201e62
17 changed files with 38 additions and 53 deletions
|
|
@ -21,8 +21,8 @@ from synapse.metrics.background_process_metrics import run_as_background_process
|
|||
from synapse.storage._base import SQLBaseStore
|
||||
from synapse.storage.database import DatabasePool
|
||||
from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore
|
||||
from synapse.storage.databases.main.events import encode_json
|
||||
from synapse.storage.databases.main.events_worker import EventsWorkerStore
|
||||
from synapse.util.frozenutils import frozendict_json_encoder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
|
@ -105,7 +105,7 @@ class CensorEventsStore(EventsWorkerStore, CacheInvalidationWorkerStore, SQLBase
|
|||
and original_event.internal_metadata.is_redacted()
|
||||
):
|
||||
# Redaction was allowed
|
||||
pruned_json = encode_json(
|
||||
pruned_json = frozendict_json_encoder.encode(
|
||||
prune_event_dict(
|
||||
original_event.room_version, original_event.get_dict()
|
||||
)
|
||||
|
|
@ -171,7 +171,7 @@ class CensorEventsStore(EventsWorkerStore, CacheInvalidationWorkerStore, SQLBase
|
|||
return
|
||||
|
||||
# Prune the event's dict then convert it to JSON.
|
||||
pruned_json = encode_json(
|
||||
pruned_json = frozendict_json_encoder.encode(
|
||||
prune_event_dict(event.room_version, event.get_dict())
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -52,16 +52,6 @@ event_counter = Counter(
|
|||
)
|
||||
|
||||
|
||||
def encode_json(json_object):
|
||||
"""
|
||||
Encode a Python object as JSON and return it in a Unicode string.
|
||||
"""
|
||||
out = frozendict_json_encoder.encode(json_object)
|
||||
if isinstance(out, bytes):
|
||||
out = out.decode("utf8")
|
||||
return out
|
||||
|
||||
|
||||
_EventCacheEntry = namedtuple("_EventCacheEntry", ("event", "redacted_event"))
|
||||
|
||||
|
||||
|
|
@ -743,7 +733,9 @@ class PersistEventsStore:
|
|||
logger.exception("")
|
||||
raise
|
||||
|
||||
metadata_json = encode_json(event.internal_metadata.get_dict())
|
||||
metadata_json = frozendict_json_encoder.encode(
|
||||
event.internal_metadata.get_dict()
|
||||
)
|
||||
|
||||
sql = "UPDATE event_json SET internal_metadata = ? WHERE event_id = ?"
|
||||
txn.execute(sql, (metadata_json, event.event_id))
|
||||
|
|
@ -797,10 +789,10 @@ class PersistEventsStore:
|
|||
{
|
||||
"event_id": event.event_id,
|
||||
"room_id": event.room_id,
|
||||
"internal_metadata": encode_json(
|
||||
"internal_metadata": frozendict_json_encoder.encode(
|
||||
event.internal_metadata.get_dict()
|
||||
),
|
||||
"json": encode_json(event_dict(event)),
|
||||
"json": frozendict_json_encoder.encode(event_dict(event)),
|
||||
"format_version": event.format_version,
|
||||
}
|
||||
for event, _ in events_and_contexts
|
||||
|
|
|
|||
|
|
@ -546,7 +546,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore, metaclass=abc.ABCMeta):
|
|||
|
||||
async def get_room_event_before_stream_ordering(
|
||||
self, room_id: str, stream_ordering: int
|
||||
) -> Tuple[int, int, str]:
|
||||
) -> Optional[Tuple[int, int, str]]:
|
||||
"""Gets details of the first event in a room at or before a stream ordering
|
||||
|
||||
Args:
|
||||
|
|
|
|||
|
|
@ -421,7 +421,7 @@ class MultiWriterIdGenerator:
|
|||
self._unfinished_ids.discard(next_id)
|
||||
self._finished_ids.add(next_id)
|
||||
|
||||
new_cur = None
|
||||
new_cur = None # type: Optional[int]
|
||||
|
||||
if self._unfinished_ids:
|
||||
# If there are unfinished IDs then the new position will be the
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue