2014-08-12 10:10:52 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2014-09-03 12:29:13 -04:00
|
|
|
# Copyright 2014 OpenMarket Ltd
|
2014-08-12 10:10:52 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2014-08-12 22:14:34 -04:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
from synapse.util.logutils import log_function
|
2014-10-30 13:00:11 -04:00
|
|
|
from synapse.util.async import run_on_reactor
|
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
from collections import namedtuple
|
|
|
|
|
2014-10-17 14:37:41 -04:00
|
|
|
import copy
|
2014-08-12 10:10:52 -04:00
|
|
|
import logging
|
|
|
|
import hashlib
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
def _get_state_key_from_event(event):
|
|
|
|
return event.state_key
|
|
|
|
|
|
|
|
|
|
|
|
KeyStateTuple = namedtuple("KeyStateTuple", ("context", "type", "state_key"))
|
|
|
|
|
|
|
|
|
|
|
|
class StateHandler(object):
|
2014-10-14 11:59:51 -04:00
|
|
|
""" Responsible for doing state conflict resolution.
|
2014-08-12 10:10:52 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
self.store = hs.get_datastore()
|
|
|
|
self._replication = hs.get_replication_layer()
|
|
|
|
self.server_name = hs.hostname
|
2014-10-30 13:00:11 -04:00
|
|
|
self.hs = hs
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
2014-08-27 10:11:51 -04:00
|
|
|
def handle_new_event(self, event, snapshot):
|
2014-08-12 10:10:52 -04:00
|
|
|
""" Given an event this works out if a) we have sufficient power level
|
|
|
|
to update the state and b) works out what the prev_state should be.
|
|
|
|
|
|
|
|
Returns:
|
2014-10-14 11:59:51 -04:00
|
|
|
Deferred: Resolved with a boolean indicating if we successfully
|
2014-08-12 10:10:52 -04:00
|
|
|
updated the state.
|
|
|
|
|
|
|
|
Raised:
|
|
|
|
AuthError
|
|
|
|
"""
|
|
|
|
# This needs to be done in a transaction.
|
|
|
|
|
|
|
|
if not hasattr(event, "state_key"):
|
|
|
|
return
|
|
|
|
|
|
|
|
# Now I need to fill out the prev state and work out if it has auth
|
|
|
|
# (w.r.t. to power levels)
|
|
|
|
|
2014-08-27 10:11:51 -04:00
|
|
|
snapshot.fill_out_prev_events(event)
|
2014-10-15 11:06:59 -04:00
|
|
|
yield self.annotate_state_groups(event)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-10-31 10:27:14 -04:00
|
|
|
if event.old_state_events:
|
|
|
|
current_state = event.old_state_events.get(
|
|
|
|
(event.type, event.state_key)
|
|
|
|
)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-10-31 10:27:14 -04:00
|
|
|
if current_state:
|
|
|
|
event.prev_state = current_state.event_id
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
defer.returnValue(True)
|
|
|
|
|
2014-10-14 11:59:51 -04:00
|
|
|
@defer.inlineCallbacks
|
2014-10-17 13:56:42 -04:00
|
|
|
@log_function
|
2014-10-30 07:53:35 -04:00
|
|
|
def annotate_state_groups(self, event, old_state=None):
|
2014-10-30 13:00:11 -04:00
|
|
|
yield run_on_reactor()
|
|
|
|
|
2014-10-30 07:53:35 -04:00
|
|
|
if old_state:
|
2014-10-17 10:04:17 -04:00
|
|
|
event.state_group = None
|
2014-10-31 06:47:04 -04:00
|
|
|
event.old_state_events = {
|
|
|
|
(s.type, s.state_key): s for s in old_state
|
|
|
|
}
|
|
|
|
event.state_events = event.old_state_events
|
2014-10-30 07:53:35 -04:00
|
|
|
|
|
|
|
if hasattr(event, "state_key"):
|
|
|
|
event.state_events[(event.type, event.state_key)] = event
|
|
|
|
|
2014-10-17 13:56:42 -04:00
|
|
|
defer.returnValue(False)
|
|
|
|
return
|
|
|
|
|
|
|
|
if hasattr(event, "outlier") and event.outlier:
|
|
|
|
event.state_group = None
|
|
|
|
event.old_state_events = None
|
2014-10-17 14:37:41 -04:00
|
|
|
event.state_events = {}
|
2014-10-17 13:56:42 -04:00
|
|
|
defer.returnValue(False)
|
2014-10-17 10:04:17 -04:00
|
|
|
return
|
|
|
|
|
2014-10-29 12:59:24 -04:00
|
|
|
new_state = yield self.resolve_state_groups(
|
|
|
|
[e for e, _ in event.prev_events]
|
|
|
|
)
|
2014-10-17 13:56:42 -04:00
|
|
|
|
2014-10-17 14:37:41 -04:00
|
|
|
event.old_state_events = copy.deepcopy(new_state)
|
2014-10-17 13:56:42 -04:00
|
|
|
|
|
|
|
if hasattr(event, "state_key"):
|
|
|
|
new_state[(event.type, event.state_key)] = event
|
|
|
|
|
|
|
|
event.state_group = None
|
|
|
|
event.state_events = new_state
|
|
|
|
|
|
|
|
defer.returnValue(hasattr(event, "state_key"))
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_current_state(self, room_id, event_type=None, state_key=""):
|
2014-10-29 12:59:24 -04:00
|
|
|
events = yield self.store.get_latest_events_in_room(room_id)
|
2014-10-17 13:56:42 -04:00
|
|
|
|
2014-10-17 14:37:41 -04:00
|
|
|
event_ids = [
|
2014-10-29 12:59:24 -04:00
|
|
|
e_id
|
2014-10-30 07:53:35 -04:00
|
|
|
for e_id, _, _ in events
|
2014-10-17 14:37:41 -04:00
|
|
|
]
|
2014-10-17 13:56:42 -04:00
|
|
|
|
2014-10-17 14:37:41 -04:00
|
|
|
res = yield self.resolve_state_groups(event_ids)
|
2014-10-17 13:56:42 -04:00
|
|
|
|
|
|
|
if event_type:
|
|
|
|
defer.returnValue(res.get((event_type, state_key)))
|
|
|
|
return
|
|
|
|
|
|
|
|
defer.returnValue(res.values())
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def resolve_state_groups(self, event_ids):
|
2014-10-14 11:59:51 -04:00
|
|
|
state_groups = yield self.store.get_state_groups(
|
2014-10-17 13:56:42 -04:00
|
|
|
event_ids
|
2014-10-14 11:59:51 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
state = {}
|
|
|
|
for group in state_groups:
|
|
|
|
for s in group.state:
|
2014-10-17 13:56:42 -04:00
|
|
|
state.setdefault(
|
2014-10-14 11:59:51 -04:00
|
|
|
(s.type, s.state_key),
|
2014-10-17 13:56:42 -04:00
|
|
|
{}
|
|
|
|
)[s.event_id] = s
|
2014-10-14 11:59:51 -04:00
|
|
|
|
|
|
|
unconflicted_state = {
|
2014-10-17 13:56:42 -04:00
|
|
|
k: v.values()[0] for k, v in state.items()
|
|
|
|
if len(v.values()) == 1
|
2014-10-14 11:59:51 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
conflicted_state = {
|
2014-10-17 13:56:42 -04:00
|
|
|
k: v.values()
|
|
|
|
for k, v in state.items()
|
|
|
|
if len(v.values()) > 1
|
2014-10-14 11:59:51 -04:00
|
|
|
}
|
|
|
|
|
2014-10-17 13:56:42 -04:00
|
|
|
try:
|
|
|
|
new_state = {}
|
|
|
|
new_state.update(unconflicted_state)
|
|
|
|
for key, events in conflicted_state.items():
|
|
|
|
new_state[key] = yield self._resolve_state_events(events)
|
|
|
|
except:
|
|
|
|
logger.exception("Failed to resolve state")
|
|
|
|
raise
|
2014-10-14 11:59:51 -04:00
|
|
|
|
2014-10-17 13:56:42 -04:00
|
|
|
defer.returnValue(new_state)
|
2014-10-14 11:59:51 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2014-10-17 13:56:42 -04:00
|
|
|
@log_function
|
|
|
|
def _resolve_state_events(self, events):
|
2014-10-14 11:59:51 -04:00
|
|
|
curr_events = events
|
|
|
|
|
|
|
|
new_powers_deferreds = []
|
|
|
|
for e in curr_events:
|
|
|
|
new_powers_deferreds.append(
|
2014-10-17 10:04:17 -04:00
|
|
|
self.store.get_power_level(e.room_id, e.user_id)
|
2014-10-14 11:59:51 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
new_powers = yield defer.gatherResults(
|
|
|
|
new_powers_deferreds,
|
|
|
|
consumeErrors=True
|
|
|
|
)
|
|
|
|
|
|
|
|
max_power = max([int(p) for p in new_powers])
|
|
|
|
|
|
|
|
curr_events = [
|
|
|
|
z[0] for z in zip(curr_events, new_powers)
|
|
|
|
if int(z[1]) == max_power
|
|
|
|
]
|
|
|
|
|
|
|
|
if not curr_events:
|
|
|
|
raise RuntimeError("Max didn't get a max?")
|
|
|
|
elif len(curr_events) == 1:
|
|
|
|
defer.returnValue(curr_events[0])
|
|
|
|
|
|
|
|
# TODO: For now, just choose the one with the largest event_id.
|
|
|
|
defer.returnValue(
|
|
|
|
sorted(
|
|
|
|
curr_events,
|
|
|
|
key=lambda e: hashlib.sha1(
|
|
|
|
e.event_id + e.user_id + e.room_id + e.type
|
|
|
|
).hexdigest()
|
|
|
|
)[0]
|
|
|
|
)
|