Make token serializing/deserializing async (#8427)

The idea is that in future tokens will encode a mapping of instance to position. However, we don't want to include the full instance name in the string representation, so instead we'll have a mapping between instance name and an immutable integer ID in the DB that we can use instead. We'll then do the lookup when we serialize/deserialize the token (we could alternatively pass around an `Instance` type that includes both the name and ID, but that turns out to be a lot more invasive).
This commit is contained in:
Erik Johnston 2020-09-30 20:29:19 +01:00 committed by GitHub
parent a0a1ba6973
commit 7941372ec8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 115 additions and 59 deletions

View file

@ -902,16 +902,18 @@ class RoomMessageListTestCase(RoomBase):
# Send a first message in the room, which will be removed by the purge.
first_event_id = self.helper.send(self.room_id, "message 1")["event_id"]
first_token = str(
self.get_success(store.get_topological_token_for_event(first_event_id))
first_token = self.get_success(
store.get_topological_token_for_event(first_event_id)
)
first_token_str = self.get_success(first_token.to_string(store))
# Send a second message in the room, which won't be removed, and which we'll
# use as the marker to purge events before.
second_event_id = self.helper.send(self.room_id, "message 2")["event_id"]
second_token = str(
self.get_success(store.get_topological_token_for_event(second_event_id))
second_token = self.get_success(
store.get_topological_token_for_event(second_event_id)
)
second_token_str = self.get_success(second_token.to_string(store))
# Send a third event in the room to ensure we don't fall under any edge case
# due to our marker being the latest forward extremity in the room.
@ -921,7 +923,11 @@ class RoomMessageListTestCase(RoomBase):
request, channel = self.make_request(
"GET",
"/rooms/%s/messages?access_token=x&from=%s&dir=b&filter=%s"
% (self.room_id, second_token, json.dumps({"types": [EventTypes.Message]})),
% (
self.room_id,
second_token_str,
json.dumps({"types": [EventTypes.Message]}),
),
)
self.render(request)
self.assertEqual(channel.code, 200, channel.json_body)
@ -936,7 +942,7 @@ class RoomMessageListTestCase(RoomBase):
pagination_handler._purge_history(
purge_id=purge_id,
room_id=self.room_id,
token=second_token,
token=second_token_str,
delete_local_events=True,
)
)
@ -946,7 +952,11 @@ class RoomMessageListTestCase(RoomBase):
request, channel = self.make_request(
"GET",
"/rooms/%s/messages?access_token=x&from=%s&dir=b&filter=%s"
% (self.room_id, second_token, json.dumps({"types": [EventTypes.Message]})),
% (
self.room_id,
second_token_str,
json.dumps({"types": [EventTypes.Message]}),
),
)
self.render(request)
self.assertEqual(channel.code, 200, channel.json_body)
@ -960,7 +970,11 @@ class RoomMessageListTestCase(RoomBase):
request, channel = self.make_request(
"GET",
"/rooms/%s/messages?access_token=x&from=%s&dir=b&filter=%s"
% (self.room_id, first_token, json.dumps({"types": [EventTypes.Message]})),
% (
self.room_id,
first_token_str,
json.dumps({"types": [EventTypes.Message]}),
),
)
self.render(request)
self.assertEqual(channel.code, 200, channel.json_body)

View file

@ -47,12 +47,15 @@ class PurgeTests(HomeserverTestCase):
storage = self.hs.get_storage()
# Get the topological token
event = str(
self.get_success(store.get_topological_token_for_event(last["event_id"]))
token = self.get_success(
store.get_topological_token_for_event(last["event_id"])
)
token_str = self.get_success(token.to_string(self.hs.get_datastore()))
# Purge everything before this topological token
self.get_success(storage.purge_events.purge_history(self.room_id, event, True))
self.get_success(
storage.purge_events.purge_history(self.room_id, token_str, True)
)
# 1-3 should fail and last will succeed, meaning that 1-3 are deleted
# and last is not.