2017-03-27 09:58:26 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2017 Vector Creations Ltd
|
2019-03-27 05:58:42 -04:00
|
|
|
# Copyright 2019 New Vector Ltd
|
2017-03-27 09:58:26 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
2018-07-09 02:09:20 -04:00
|
|
|
from collections import namedtuple
|
2020-03-25 10:54:01 -04:00
|
|
|
from typing import Any, Awaitable, Callable, List, Optional, Tuple
|
2020-01-22 05:37:00 -05:00
|
|
|
|
|
|
|
import attr
|
2017-03-27 09:58:26 -04:00
|
|
|
|
2020-03-25 10:54:01 -04:00
|
|
|
from synapse.replication.http.streams import ReplicationGetStreamUpdates
|
2020-03-20 10:40:47 -04:00
|
|
|
from synapse.types import JsonDict
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2018-02-14 08:53:02 -05:00
|
|
|
MAX_EVENTS_BEHIND = 500000
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
|
2020-03-25 10:54:01 -04:00
|
|
|
# Some type aliases to make things a bit easier.
|
|
|
|
|
|
|
|
# A stream position token
|
|
|
|
Token = int
|
|
|
|
|
|
|
|
# A pair of position in stream and args used to create an instance of `ROW_TYPE`.
|
|
|
|
StreamRow = Tuple[Token, tuple]
|
|
|
|
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
class Stream(object):
|
|
|
|
"""Base class for the streams.
|
|
|
|
|
|
|
|
Provides a `get_updates()` function that returns new updates since the last
|
2020-03-20 10:40:47 -04:00
|
|
|
time it was called.
|
2017-03-27 09:58:26 -04:00
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
NAME = None # type: str # The name of the stream
|
|
|
|
# The type of the row. Used by the default impl of parse_row.
|
|
|
|
ROW_TYPE = None # type: Any
|
2017-03-27 09:58:26 -04:00
|
|
|
|
2019-03-27 03:40:32 -04:00
|
|
|
@classmethod
|
|
|
|
def parse_row(cls, row):
|
|
|
|
"""Parse a row received over replication
|
|
|
|
|
|
|
|
By default, assumes that the row data is an array object and passes its contents
|
|
|
|
to the constructor of the ROW_TYPE for this stream.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
row: row data from the incoming RDATA command, after json decoding
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
ROW_TYPE object for this stream
|
|
|
|
"""
|
|
|
|
return cls.ROW_TYPE(*row)
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
def __init__(self, hs):
|
2020-03-25 10:54:01 -04:00
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
# The token from which we last asked for updates
|
|
|
|
self.last_token = self.current_token()
|
|
|
|
|
2017-04-04 08:19:26 -04:00
|
|
|
def discard_updates_and_advance(self):
|
|
|
|
"""Called when the stream should advance but the updates would be discarded,
|
|
|
|
e.g. when there are no currently connected workers.
|
|
|
|
"""
|
2020-03-20 10:40:47 -04:00
|
|
|
self.last_token = self.current_token()
|
2017-04-04 08:19:26 -04:00
|
|
|
|
2020-03-25 10:54:01 -04:00
|
|
|
async def get_updates(self) -> Tuple[List[Tuple[Token, JsonDict]], Token, bool]:
|
2017-03-27 09:58:26 -04:00
|
|
|
"""Gets all updates since the last time this function was called (or
|
2020-03-20 10:40:47 -04:00
|
|
|
since the stream was constructed if it hadn't been called before).
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
Returns:
|
2020-03-25 10:54:01 -04:00
|
|
|
A triplet `(updates, new_last_token, limited)`, where `updates` is
|
|
|
|
a list of `(token, row)` entries, `new_last_token` is the new
|
|
|
|
position in stream, and `limited` is whether there are more updates
|
|
|
|
to fetch.
|
2017-03-27 09:58:26 -04:00
|
|
|
"""
|
2020-03-25 10:54:01 -04:00
|
|
|
current_token = self.current_token()
|
|
|
|
updates, current_token, limited = await self.get_updates_since(
|
|
|
|
self.last_token, current_token
|
|
|
|
)
|
2017-03-27 09:58:26 -04:00
|
|
|
self.last_token = current_token
|
|
|
|
|
2020-03-25 10:54:01 -04:00
|
|
|
return updates, current_token, limited
|
2017-03-27 09:58:26 -04:00
|
|
|
|
2020-03-20 10:40:47 -04:00
|
|
|
async def get_updates_since(
|
2020-03-25 10:54:01 -04:00
|
|
|
self, from_token: Token, upto_token: Token, limit: int = 100
|
|
|
|
) -> Tuple[List[Tuple[Token, JsonDict]], Token, bool]:
|
2017-03-27 09:58:26 -04:00
|
|
|
"""Like get_updates except allows specifying from when we should
|
|
|
|
stream updates
|
|
|
|
|
|
|
|
Returns:
|
2020-03-25 10:54:01 -04:00
|
|
|
A triplet `(updates, new_last_token, limited)`, where `updates` is
|
|
|
|
a list of `(token, row)` entries, `new_last_token` is the new
|
|
|
|
position in stream, and `limited` is whether there are more updates
|
|
|
|
to fetch.
|
2017-03-27 09:58:26 -04:00
|
|
|
"""
|
2020-03-20 10:40:47 -04:00
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
from_token = int(from_token)
|
|
|
|
|
2020-03-25 10:54:01 -04:00
|
|
|
if from_token == upto_token:
|
|
|
|
return [], upto_token, False
|
2017-03-27 09:58:26 -04:00
|
|
|
|
2020-03-25 10:54:01 -04:00
|
|
|
updates, upto_token, limited = await self.update_function(
|
|
|
|
from_token, upto_token, limit=limit,
|
2020-03-20 10:40:47 -04:00
|
|
|
)
|
2020-03-25 10:54:01 -04:00
|
|
|
return updates, upto_token, limited
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
def current_token(self):
|
|
|
|
"""Gets the current token of the underlying streams. Should be provided
|
|
|
|
by the sub classes
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
int
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2020-03-20 10:40:47 -04:00
|
|
|
def update_function(self, from_token, current_token, limit):
|
|
|
|
"""Get updates between from_token and to_token.
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
Returns:
|
2017-03-31 06:19:24 -04:00
|
|
|
Deferred(list(tuple)): the first entry in the tuple is the token for
|
|
|
|
that update, and the rest of the tuple gets used to construct
|
2017-03-27 09:58:26 -04:00
|
|
|
a ``ROW_TYPE`` instance
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
|
2020-03-25 10:54:01 -04:00
|
|
|
def db_query_to_update_function(
|
|
|
|
query_function: Callable[[Token, Token, int], Awaitable[List[tuple]]]
|
|
|
|
) -> Callable[[Token, Token, int], Awaitable[Tuple[List[StreamRow], Token, bool]]]:
|
|
|
|
"""Wraps a db query function which returns a list of rows to make it
|
|
|
|
suitable for use as an `update_function` for the Stream class
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def update_function(from_token, upto_token, limit):
|
|
|
|
rows = await query_function(from_token, upto_token, limit)
|
|
|
|
updates = [(row[0], row[1:]) for row in rows]
|
|
|
|
limited = False
|
|
|
|
if len(updates) == limit:
|
|
|
|
upto_token = rows[-1][0]
|
|
|
|
limited = True
|
|
|
|
|
|
|
|
return updates, upto_token, limited
|
|
|
|
|
|
|
|
return update_function
|
|
|
|
|
|
|
|
|
|
|
|
def make_http_update_function(
|
|
|
|
hs, stream_name: str
|
|
|
|
) -> Callable[[Token, Token, Token], Awaitable[Tuple[List[StreamRow], Token, bool]]]:
|
|
|
|
"""Makes a suitable function for use as an `update_function` that queries
|
|
|
|
the master process for updates.
|
|
|
|
"""
|
|
|
|
|
|
|
|
client = ReplicationGetStreamUpdates.make_client(hs)
|
|
|
|
|
|
|
|
async def update_function(
|
|
|
|
from_token: int, upto_token: int, limit: int
|
|
|
|
) -> Tuple[List[Tuple[int, tuple]], int, bool]:
|
2020-04-07 06:01:04 -04:00
|
|
|
result = await client(
|
2020-03-25 10:54:01 -04:00
|
|
|
stream_name=stream_name,
|
|
|
|
from_token=from_token,
|
|
|
|
upto_token=upto_token,
|
|
|
|
limit=limit,
|
|
|
|
)
|
2020-04-07 06:01:04 -04:00
|
|
|
return result["updates"], result["upto_token"], result["limited"]
|
2020-03-25 10:54:01 -04:00
|
|
|
|
|
|
|
return update_function
|
|
|
|
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
class BackfillStream(Stream):
|
|
|
|
"""We fetched some old events and either we had never seen that event before
|
|
|
|
or it went from being an outlier to not.
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-03-23 09:59:11 -04:00
|
|
|
BackfillStreamRow = namedtuple(
|
|
|
|
"BackfillStreamRow",
|
|
|
|
(
|
|
|
|
"event_id", # str
|
|
|
|
"room_id", # str
|
|
|
|
"type", # str
|
|
|
|
"state_key", # str, optional
|
|
|
|
"redacts", # str, optional
|
|
|
|
"relates_to", # str, optional
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "backfill"
|
|
|
|
ROW_TYPE = BackfillStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
store = hs.get_datastore()
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = store.get_current_backfill_token # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
self.update_function = db_query_to_update_function(store.get_all_new_backfill_event_rows) # type: ignore
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
super(BackfillStream, self).__init__(hs)
|
|
|
|
|
|
|
|
|
|
|
|
class PresenceStream(Stream):
|
2020-03-23 09:59:11 -04:00
|
|
|
PresenceStreamRow = namedtuple(
|
|
|
|
"PresenceStreamRow",
|
|
|
|
(
|
|
|
|
"user_id", # str
|
|
|
|
"state", # str
|
|
|
|
"last_active_ts", # int
|
|
|
|
"last_federation_update_ts", # int
|
|
|
|
"last_user_sync_ts", # int
|
|
|
|
"status_msg", # str
|
|
|
|
"currently_active", # bool
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "presence"
|
|
|
|
ROW_TYPE = PresenceStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
store = hs.get_datastore()
|
|
|
|
presence_handler = hs.get_presence_handler()
|
|
|
|
|
2020-03-25 10:54:01 -04:00
|
|
|
self._is_worker = hs.config.worker_app is not None
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = store.get_current_presence_token # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
|
|
|
|
if hs.config.worker_app is None:
|
|
|
|
self.update_function = db_query_to_update_function(presence_handler.get_all_presence_updates) # type: ignore
|
|
|
|
else:
|
|
|
|
# Query master process
|
|
|
|
self.update_function = make_http_update_function(hs, self.NAME) # type: ignore
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
super(PresenceStream, self).__init__(hs)
|
|
|
|
|
|
|
|
|
|
|
|
class TypingStream(Stream):
|
2020-03-23 09:59:11 -04:00
|
|
|
TypingStreamRow = namedtuple(
|
|
|
|
"TypingStreamRow", ("room_id", "user_ids") # str # list(str)
|
|
|
|
)
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "typing"
|
|
|
|
ROW_TYPE = TypingStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
typing_handler = hs.get_typing_handler()
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = typing_handler.get_current_token # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
|
|
|
|
if hs.config.worker_app is None:
|
|
|
|
self.update_function = db_query_to_update_function(typing_handler.get_all_typing_updates) # type: ignore
|
|
|
|
else:
|
|
|
|
# Query master process
|
|
|
|
self.update_function = make_http_update_function(hs, self.NAME) # type: ignore
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
super(TypingStream, self).__init__(hs)
|
|
|
|
|
|
|
|
|
|
|
|
class ReceiptsStream(Stream):
|
2020-03-23 09:59:11 -04:00
|
|
|
ReceiptsStreamRow = namedtuple(
|
|
|
|
"ReceiptsStreamRow",
|
|
|
|
(
|
|
|
|
"room_id", # str
|
|
|
|
"receipt_type", # str
|
|
|
|
"user_id", # str
|
|
|
|
"event_id", # str
|
|
|
|
"data", # dict
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "receipts"
|
|
|
|
ROW_TYPE = ReceiptsStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
store = hs.get_datastore()
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = store.get_max_receipt_stream_id # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
self.update_function = db_query_to_update_function(store.get_all_updated_receipts) # type: ignore
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
super(ReceiptsStream, self).__init__(hs)
|
|
|
|
|
|
|
|
|
|
|
|
class PushRulesStream(Stream):
|
|
|
|
"""A user has changed their push rules
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-03-23 09:59:11 -04:00
|
|
|
PushRulesStreamRow = namedtuple("PushRulesStreamRow", ("user_id",)) # str
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "push_rules"
|
|
|
|
ROW_TYPE = PushRulesStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
self.store = hs.get_datastore()
|
|
|
|
super(PushRulesStream, self).__init__(hs)
|
|
|
|
|
|
|
|
def current_token(self):
|
|
|
|
push_rules_token, _ = self.store.get_push_rules_stream_token()
|
|
|
|
return push_rules_token
|
|
|
|
|
2020-01-16 04:16:12 -05:00
|
|
|
async def update_function(self, from_token, to_token, limit):
|
|
|
|
rows = await self.store.get_all_push_rule_updates(from_token, to_token, limit)
|
2020-03-25 10:54:01 -04:00
|
|
|
|
|
|
|
limited = False
|
|
|
|
if len(rows) == limit:
|
|
|
|
to_token = rows[-1][0]
|
|
|
|
limited = True
|
|
|
|
|
|
|
|
return [(row[0], (row[2],)) for row in rows], to_token, limited
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
|
|
|
|
class PushersStream(Stream):
|
|
|
|
"""A user has added/changed/removed a pusher
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-03-23 09:59:11 -04:00
|
|
|
PushersStreamRow = namedtuple(
|
|
|
|
"PushersStreamRow",
|
|
|
|
("user_id", "app_id", "pushkey", "deleted"), # str # str # str # bool
|
|
|
|
)
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "pushers"
|
|
|
|
ROW_TYPE = PushersStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
store = hs.get_datastore()
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = store.get_pushers_stream_token # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
self.update_function = db_query_to_update_function(store.get_all_updated_pushers_rows) # type: ignore
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
super(PushersStream, self).__init__(hs)
|
|
|
|
|
|
|
|
|
|
|
|
class CachesStream(Stream):
|
|
|
|
"""A cache was invalidated on the master and no other stream would invalidate
|
|
|
|
the cache on the workers
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-03-23 09:59:11 -04:00
|
|
|
@attr.s
|
|
|
|
class CachesStreamRow:
|
|
|
|
"""Stream to inform workers they should invalidate their cache.
|
|
|
|
|
|
|
|
Attributes:
|
|
|
|
cache_func: Name of the cached function.
|
|
|
|
keys: The entry in the cache to invalidate. If None then will
|
|
|
|
invalidate all.
|
|
|
|
invalidation_ts: Timestamp of when the invalidation took place.
|
|
|
|
"""
|
|
|
|
|
|
|
|
cache_func = attr.ib(type=str)
|
|
|
|
keys = attr.ib(type=Optional[List[Any]])
|
|
|
|
invalidation_ts = attr.ib(type=int)
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "caches"
|
|
|
|
ROW_TYPE = CachesStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
store = hs.get_datastore()
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = store.get_cache_stream_token # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
self.update_function = db_query_to_update_function(store.get_all_updated_caches) # type: ignore
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
super(CachesStream, self).__init__(hs)
|
|
|
|
|
|
|
|
|
|
|
|
class PublicRoomsStream(Stream):
|
|
|
|
"""The public rooms list changed
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-03-23 09:59:11 -04:00
|
|
|
PublicRoomsStreamRow = namedtuple(
|
|
|
|
"PublicRoomsStreamRow",
|
|
|
|
(
|
|
|
|
"room_id", # str
|
|
|
|
"visibility", # str
|
|
|
|
"appservice_id", # str, optional
|
|
|
|
"network_id", # str, optional
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "public_rooms"
|
|
|
|
ROW_TYPE = PublicRoomsStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
store = hs.get_datastore()
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = store.get_current_public_room_stream_id # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
self.update_function = db_query_to_update_function(store.get_all_new_public_rooms) # type: ignore
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
super(PublicRoomsStream, self).__init__(hs)
|
|
|
|
|
|
|
|
|
|
|
|
class DeviceListsStream(Stream):
|
2020-02-28 06:24:05 -05:00
|
|
|
"""Either a user has updated their devices or a remote server needs to be
|
|
|
|
told about a device update.
|
2017-03-27 09:58:26 -04:00
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-03-23 09:59:11 -04:00
|
|
|
@attr.s
|
|
|
|
class DeviceListsStreamRow:
|
|
|
|
entity = attr.ib(type=str)
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "device_lists"
|
|
|
|
ROW_TYPE = DeviceListsStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
store = hs.get_datastore()
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = store.get_device_stream_token # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
self.update_function = db_query_to_update_function(store.get_all_device_list_changes_for_remotes) # type: ignore
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
super(DeviceListsStream, self).__init__(hs)
|
|
|
|
|
|
|
|
|
|
|
|
class ToDeviceStream(Stream):
|
|
|
|
"""New to_device messages for a client
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-03-23 09:59:11 -04:00
|
|
|
ToDeviceStreamRow = namedtuple("ToDeviceStreamRow", ("entity",)) # str
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "to_device"
|
|
|
|
ROW_TYPE = ToDeviceStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
store = hs.get_datastore()
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = store.get_to_device_stream_token # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
self.update_function = db_query_to_update_function(store.get_all_new_device_messages) # type: ignore
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
super(ToDeviceStream, self).__init__(hs)
|
|
|
|
|
|
|
|
|
|
|
|
class TagAccountDataStream(Stream):
|
|
|
|
"""Someone added/removed a tag for a room
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-03-23 09:59:11 -04:00
|
|
|
TagAccountDataStreamRow = namedtuple(
|
|
|
|
"TagAccountDataStreamRow", ("user_id", "room_id", "data") # str # str # dict
|
|
|
|
)
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "tag_account_data"
|
|
|
|
ROW_TYPE = TagAccountDataStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
store = hs.get_datastore()
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = store.get_max_account_data_stream_id # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
self.update_function = db_query_to_update_function(store.get_all_updated_tags) # type: ignore
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
super(TagAccountDataStream, self).__init__(hs)
|
|
|
|
|
|
|
|
|
|
|
|
class AccountDataStream(Stream):
|
|
|
|
"""Global or per room account data was changed
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-03-23 09:59:11 -04:00
|
|
|
AccountDataStreamRow = namedtuple(
|
|
|
|
"AccountDataStream", ("user_id", "room_id", "data_type") # str # str # str
|
|
|
|
)
|
|
|
|
|
2017-03-27 09:58:26 -04:00
|
|
|
NAME = "account_data"
|
|
|
|
ROW_TYPE = AccountDataStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
self.store = hs.get_datastore()
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = self.store.get_max_account_data_stream_id # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
self.update_function = db_query_to_update_function(self._update_function) # type: ignore
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
super(AccountDataStream, self).__init__(hs)
|
|
|
|
|
2020-03-25 10:54:01 -04:00
|
|
|
async def _update_function(self, from_token, to_token, limit):
|
2020-01-16 04:16:12 -05:00
|
|
|
global_results, room_results = await self.store.get_all_updated_account_data(
|
2017-03-27 09:58:26 -04:00
|
|
|
from_token, from_token, to_token, limit
|
|
|
|
)
|
|
|
|
|
|
|
|
results = list(room_results)
|
|
|
|
results.extend(
|
2019-11-08 06:42:55 -05:00
|
|
|
(stream_id, user_id, None, account_data_type)
|
|
|
|
for stream_id, user_id, account_data_type in global_results
|
2017-03-27 09:58:26 -04:00
|
|
|
)
|
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return results
|
2017-03-27 09:58:26 -04:00
|
|
|
|
|
|
|
|
2017-07-20 12:13:18 -04:00
|
|
|
class GroupServerStream(Stream):
|
2020-03-23 09:59:11 -04:00
|
|
|
GroupsStreamRow = namedtuple(
|
|
|
|
"GroupsStreamRow",
|
|
|
|
("group_id", "user_id", "type", "content"), # str # str # str # dict
|
|
|
|
)
|
|
|
|
|
2017-07-20 12:13:18 -04:00
|
|
|
NAME = "groups"
|
|
|
|
ROW_TYPE = GroupsStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
store = hs.get_datastore()
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = store.get_group_stream_token # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
self.update_function = db_query_to_update_function(store.get_all_groups_changes) # type: ignore
|
2017-07-20 12:13:18 -04:00
|
|
|
|
|
|
|
super(GroupServerStream, self).__init__(hs)
|
2019-10-30 17:22:52 -04:00
|
|
|
|
|
|
|
|
|
|
|
class UserSignatureStream(Stream):
|
|
|
|
"""A user has signed their own device with their user-signing key
|
|
|
|
"""
|
|
|
|
|
2020-03-23 09:59:11 -04:00
|
|
|
UserSignatureStreamRow = namedtuple("UserSignatureStreamRow", ("user_id")) # str
|
|
|
|
|
2019-10-30 17:22:52 -04:00
|
|
|
NAME = "user_signature"
|
|
|
|
ROW_TYPE = UserSignatureStreamRow
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
store = hs.get_datastore()
|
|
|
|
|
2020-01-14 09:08:06 -05:00
|
|
|
self.current_token = store.get_device_stream_token # type: ignore
|
2020-03-25 10:54:01 -04:00
|
|
|
self.update_function = db_query_to_update_function(store.get_all_user_signature_changes_for_remotes) # type: ignore
|
2019-10-30 17:22:52 -04:00
|
|
|
|
|
|
|
super(UserSignatureStream, self).__init__(hs)
|