mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2024-10-01 08:25:44 -04:00
Make EventStream rows have a type
... as a precursor to combining it with the CurrentStateDelta stream.
This commit is contained in:
parent
a65763a5d6
commit
1f6d6f918a
@ -48,6 +48,7 @@ from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.slave.storage.room import RoomStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.replication.tcp.streams.events import EventsStreamEventRow
|
||||
from synapse.rest.client.v1 import events
|
||||
from synapse.rest.client.v1.initial_sync import InitialSyncRestServlet
|
||||
from synapse.rest.client.v1.room import RoomInitialSyncRestServlet
|
||||
@ -369,7 +370,9 @@ class SyncReplicationHandler(ReplicationClientHandler):
|
||||
# We shouldn't get multiple rows per token for events stream, so
|
||||
# we don't need to optimise this for multiple rows.
|
||||
for row in rows:
|
||||
event = yield self.store.get_event(row.event_id)
|
||||
if row.type != EventsStreamEventRow.TypeId:
|
||||
continue
|
||||
event = yield self.store.get_event(row.data.event_id)
|
||||
extra_users = ()
|
||||
if event.type == EventTypes.Member:
|
||||
extra_users = (event.state_key,)
|
||||
|
@ -16,6 +16,7 @@
|
||||
import logging
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.replication.tcp.streams.events import EventsStreamEventRow
|
||||
from synapse.storage.event_federation import EventFederationWorkerStore
|
||||
from synapse.storage.event_push_actions import EventPushActionsWorkerStore
|
||||
from synapse.storage.events_worker import EventsWorkerStore
|
||||
@ -79,9 +80,12 @@ class SlavedEventStore(EventFederationWorkerStore,
|
||||
if stream_name == "events":
|
||||
self._stream_id_gen.advance(token)
|
||||
for row in rows:
|
||||
if row.type != EventsStreamEventRow.TypeId:
|
||||
continue
|
||||
data = row.data
|
||||
self.invalidate_caches_for_event(
|
||||
token, row.event_id, row.room_id, row.type, row.state_key,
|
||||
row.redacts,
|
||||
token, data.event_id, data.room_id, data.type, data.state_key,
|
||||
data.redacts,
|
||||
backfilled=False,
|
||||
)
|
||||
elif stream_name == "backfill":
|
||||
|
@ -42,8 +42,8 @@ indicate which side is sending, these are *not* included on the wire::
|
||||
> POSITION backfill 1
|
||||
> POSITION caches 1
|
||||
> RDATA caches 2 ["get_user_by_id",["@01register-user:localhost:8823"],1490197670513]
|
||||
> RDATA events 14 ["$149019767112vOHxz:localhost:8823",
|
||||
"!AFDCvgApUmpdfVjIXm:localhost:8823","m.room.guest_access","",null]
|
||||
> RDATA events 14 ["ev", ["$149019767112vOHxz:localhost:8823",
|
||||
"!AFDCvgApUmpdfVjIXm:localhost:8823","m.room.guest_access","",null]]
|
||||
< PING 1490197675618
|
||||
> ERROR server stopping
|
||||
* connection closed by server *
|
||||
|
@ -13,28 +13,102 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from collections import namedtuple
|
||||
|
||||
import attr
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from ._base import Stream
|
||||
|
||||
EventStreamRow = namedtuple("EventStreamRow", (
|
||||
"event_id", # str
|
||||
"room_id", # str
|
||||
"type", # str
|
||||
"state_key", # str, optional
|
||||
"redacts", # str, optional
|
||||
))
|
||||
|
||||
"""Handling of the 'events' replication stream
|
||||
|
||||
This stream contains rows of various types. Each row therefore contains a 'type'
|
||||
identifier before the real data. For example::
|
||||
|
||||
RDATA events 12345 ["ev", ["$event:id", "!room:id", "m.type", null, null]]
|
||||
|
||||
An "ev" row is sent for each new event. The fields in the data part are:
|
||||
|
||||
* The new event id
|
||||
* The room id for the event
|
||||
* The type of the new event
|
||||
* The state key of the event, for state events
|
||||
* The event id of an event which is redacted by this event.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class EventsStreamRow(object):
|
||||
"""A parsed row from the events replication stream"""
|
||||
type = attr.ib() # str: the TypeId of one of the *EventsStreamRows
|
||||
data = attr.ib() # BaseEventsStreamRow
|
||||
|
||||
|
||||
class BaseEventsStreamRow(object):
|
||||
"""Base class for rows to be sent in the events stream.
|
||||
|
||||
Specifies how to identify, serialize and deserialize the different types.
|
||||
"""
|
||||
|
||||
TypeId = None # Unique string that ids the type. Must be overriden in sub classes.
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, data):
|
||||
"""Parse the data from the replication stream into a row.
|
||||
|
||||
By default we just call the constructor with the data list as arguments
|
||||
|
||||
Args:
|
||||
data: The value of the data object from the replication stream
|
||||
"""
|
||||
return cls(*data)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class EventsStreamEventRow(BaseEventsStreamRow):
|
||||
TypeId = "ev"
|
||||
|
||||
event_id = attr.ib() # str
|
||||
room_id = attr.ib() # str
|
||||
type = attr.ib() # str
|
||||
state_key = attr.ib() # str, optional
|
||||
redacts = attr.ib() # str, optional
|
||||
|
||||
|
||||
TypeToRow = {
|
||||
Row.TypeId: Row
|
||||
for Row in (
|
||||
EventsStreamEventRow,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
class EventsStream(Stream):
|
||||
"""We received a new event, or an event went from being an outlier to not
|
||||
"""
|
||||
NAME = "events"
|
||||
ROW_TYPE = EventStreamRow
|
||||
|
||||
def __init__(self, hs):
|
||||
store = hs.get_datastore()
|
||||
self.current_token = store.get_current_events_token
|
||||
self.update_function = store.get_all_new_forward_event_rows
|
||||
self._store = hs.get_datastore()
|
||||
self.current_token = self._store.get_current_events_token
|
||||
|
||||
super(EventsStream, self).__init__(hs)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def update_function(self, from_token, current_token, limit=None):
|
||||
event_rows = yield self._store.get_all_new_forward_event_rows(
|
||||
from_token, current_token, limit,
|
||||
)
|
||||
event_updates = (
|
||||
(row[0], EventsStreamEventRow.TypeId, row[1:])
|
||||
for row in event_rows
|
||||
)
|
||||
defer.returnValue(event_updates)
|
||||
|
||||
@classmethod
|
||||
def parse_row(cls, row):
|
||||
(typ, data) = row
|
||||
data = TypeToRow[typ].from_data(data)
|
||||
return EventsStreamRow(typ, data)
|
||||
|
Loading…
Reference in New Issue
Block a user