Merge branch 'event_signing' of github.com:matrix-org/synapse into federation_authorization

Conflicts:
	synapse/storage/__init__.py
This commit is contained in:
Erik Johnston 2014-10-27 11:58:32 +00:00
commit ad9226eeec
24 changed files with 580 additions and 77 deletions

View file

@ -14,6 +14,9 @@
# limitations under the License.
from .units import Pdu
from synapse.crypto.event_signing import (
add_event_pdu_content_hash, sign_event_pdu
)
import copy
@ -33,6 +36,7 @@ def encode_event_id(pdu_id, origin):
class PduCodec(object):
def __init__(self, hs):
self.signing_key = hs.config.signing_key[0]
self.server_name = hs.hostname
self.event_factory = hs.get_event_factory()
self.clock = hs.get_clock()
@ -43,9 +47,7 @@ class PduCodec(object):
kwargs["event_id"] = encode_event_id(pdu.pdu_id, pdu.origin)
kwargs["room_id"] = pdu.context
kwargs["etype"] = pdu.pdu_type
kwargs["prev_events"] = [
encode_event_id(p[0], p[1]) for p in pdu.prev_pdus
]
kwargs["prev_pdus"] = pdu.prev_pdus
if hasattr(pdu, "prev_state_id") and hasattr(pdu, "prev_state_origin"):
kwargs["prev_state"] = encode_event_id(
@ -76,11 +78,8 @@ class PduCodec(object):
d["context"] = event.room_id
d["pdu_type"] = event.type
if hasattr(event, "prev_events"):
d["prev_pdus"] = [
decode_event_id(e, self.server_name)
for e in event.prev_events
]
if hasattr(event, "prev_pdus"):
d["prev_pdus"] = event.prev_pdus
if hasattr(event, "prev_state"):
d["prev_state_id"], d["prev_state_origin"] = (
@ -93,10 +92,12 @@ class PduCodec(object):
kwargs = copy.deepcopy(event.unrecognized_keys)
kwargs.update({
k: v for k, v in d.items()
if k not in ["event_id", "room_id", "type", "prev_events"]
if k not in ["event_id", "room_id", "type"]
})
if "origin_server_ts" not in kwargs:
kwargs["origin_server_ts"] = int(self.clock.time_msec())
return Pdu(**kwargs)
pdu = Pdu(**kwargs)
pdu = add_event_pdu_content_hash(pdu)
return sign_event_pdu(pdu, self.server_name, self.signing_key)

View file

@ -297,6 +297,10 @@ class ReplicationLayer(object):
transaction = Transaction(**transaction_data)
for p in transaction.pdus:
if "unsigned" in p:
unsigned = p["unsigned"]
if "age" in unsigned:
p["age"] = unsigned["age"]
if "age" in p:
p["age_ts"] = int(self._clock.time_msec()) - int(p["age"])
del p["age"]
@ -467,14 +471,16 @@ class ReplicationLayer(object):
transmission.
"""
pdus = [p.get_dict() for p in pdu_list]
time_now = self._clock.time_msec()
for p in pdus:
if "age_ts" in pdus:
p["age"] = int(self.clock.time_msec()) - p["age_ts"]
if "age_ts" in p:
age = time_now - p["age_ts"]
p.setdefault("unsigned", {})["age"] = int(age)
del p["age_ts"]
return Transaction(
origin=self.server_name,
pdus=pdus,
origin_server_ts=int(self._clock.time_msec()),
origin_server_ts=int(time_now),
destination=None,
)
@ -498,7 +504,7 @@ class ReplicationLayer(object):
min_depth = yield self.store.get_min_depth_for_context(pdu.context)
if min_depth and pdu.depth > min_depth:
for pdu_id, origin in pdu.prev_pdus:
for pdu_id, origin, hashes in pdu.prev_pdus:
exists = yield self._get_persisted_pdu(pdu_id, origin)
if not exists:
@ -654,7 +660,7 @@ class _TransactionQueue(object):
logger.debug("TX [%s] Persisting transaction...", destination)
transaction = Transaction.create_new(
origin_server_ts=self._clock.time_msec(),
origin_server_ts=int(self._clock.time_msec()),
transaction_id=str(self._next_txn_id),
origin=self.server_name,
destination=destination,
@ -679,7 +685,9 @@ class _TransactionQueue(object):
if "pdus" in data:
for p in data["pdus"]:
if "age_ts" in p:
p["age"] = now - int(p["age_ts"])
unsigned = p.setdefault("unsigned", {})
unsigned["age"] = now - int(p["age_ts"])
del p["age_ts"]
return data
code, response = yield self.transport_layer.send_transaction(

View file

@ -18,6 +18,7 @@ server protocol.
"""
from synapse.util.jsonobject import JsonEncodedObject
from syutil.base64util import encode_base64
import logging
import json
@ -63,9 +64,10 @@ class Pdu(JsonEncodedObject):
"depth",
"content",
"outlier",
"hashes",
"signatures",
"is_state", # Below this are keys valid only for State Pdus.
"state_key",
"power_level",
"prev_state_id",
"prev_state_origin",
"required_power_level",
@ -91,7 +93,7 @@ class Pdu(JsonEncodedObject):
# just leaving it as a dict. (OR DO WE?!)
def __init__(self, destinations=[], is_state=False, prev_pdus=[],
outlier=False, **kwargs):
outlier=False, hashes={}, signatures={}, **kwargs):
if is_state:
for required_key in ["state_key"]:
if required_key not in kwargs:
@ -99,9 +101,11 @@ class Pdu(JsonEncodedObject):
super(Pdu, self).__init__(
destinations=destinations,
is_state=is_state,
is_state=bool(is_state),
prev_pdus=prev_pdus,
outlier=outlier,
hashes=hashes,
signatures=signatures,
**kwargs
)
@ -120,6 +124,10 @@ class Pdu(JsonEncodedObject):
d = copy.copy(pdu_tuple.pdu_entry._asdict())
d["origin_server_ts"] = d.pop("ts")
for k in d.keys():
if d[k] is None:
del d[k]
d["content"] = json.loads(d["content_json"])
del d["content_json"]
@ -127,8 +135,28 @@ class Pdu(JsonEncodedObject):
if "unrecognized_keys" in d and d["unrecognized_keys"]:
args.update(json.loads(d["unrecognized_keys"]))
hashes = {
alg: encode_base64(hsh)
for alg, hsh in pdu_tuple.hashes.items()
}
signatures = {
kid: encode_base64(sig)
for kid, sig in pdu_tuple.signatures.items()
}
prev_pdus = []
for prev_pdu in pdu_tuple.prev_pdu_list:
prev_hashes = pdu_tuple.edge_hashes.get(prev_pdu, {})
prev_hashes = {
alg: encode_base64(hsh) for alg, hsh in prev_hashes.items()
}
prev_pdus.append((prev_pdu[0], prev_pdu[1], prev_hashes))
return Pdu(
prev_pdus=pdu_tuple.prev_pdu_list,
prev_pdus=prev_pdus,
hashes=hashes,
signatures=signatures,
**args
)
else: