mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-08-09 12:12:15 -04:00
Merge branch 'event_signing' of github.com:matrix-org/synapse into federation_authorization
Conflicts: synapse/storage/__init__.py
This commit is contained in:
commit
ad9226eeec
24 changed files with 580 additions and 77 deletions
|
@ -40,7 +40,14 @@ from .stream import StreamStore
|
|||
from .pdu import StatePduStore, PduStore, PdusTable
|
||||
from .transactions import TransactionStore
|
||||
from .keys import KeyStore
|
||||
|
||||
from .state import StateStore
|
||||
from .signatures import SignatureStore
|
||||
|
||||
from syutil.base64util import decode_base64
|
||||
|
||||
from synapse.crypto.event_signing import compute_pdu_event_reference_hash
|
||||
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
@ -61,6 +68,7 @@ SCHEMAS = [
|
|||
"keys",
|
||||
"redactions",
|
||||
"state",
|
||||
"signatures",
|
||||
]
|
||||
|
||||
|
||||
|
@ -78,7 +86,7 @@ class _RollbackButIsFineException(Exception):
|
|||
class DataStore(RoomMemberStore, RoomStore,
|
||||
RegistrationStore, StreamStore, ProfileStore, FeedbackStore,
|
||||
PresenceStore, PduStore, StatePduStore, TransactionStore,
|
||||
DirectoryStore, KeyStore, StateStore):
|
||||
DirectoryStore, KeyStore, StateStore, SignatureStore):
|
||||
|
||||
def __init__(self, hs):
|
||||
super(DataStore, self).__init__(hs)
|
||||
|
@ -146,6 +154,8 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||
def _persist_event_pdu_txn(self, txn, pdu):
|
||||
cols = dict(pdu.__dict__)
|
||||
unrec_keys = dict(pdu.unrecognized_keys)
|
||||
del cols["hashes"]
|
||||
del cols["signatures"]
|
||||
del cols["content"]
|
||||
del cols["prev_pdus"]
|
||||
cols["content_json"] = json.dumps(pdu.content)
|
||||
|
@ -161,6 +171,33 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||
|
||||
logger.debug("Persisting: %s", repr(cols))
|
||||
|
||||
for hash_alg, hash_base64 in pdu.hashes.items():
|
||||
hash_bytes = decode_base64(hash_base64)
|
||||
self._store_pdu_content_hash_txn(
|
||||
txn, pdu.pdu_id, pdu.origin, hash_alg, hash_bytes,
|
||||
)
|
||||
|
||||
signatures = pdu.signatures.get(pdu.origin, {})
|
||||
|
||||
for key_id, signature_base64 in signatures.items():
|
||||
signature_bytes = decode_base64(signature_base64)
|
||||
self._store_pdu_origin_signature_txn(
|
||||
txn, pdu.pdu_id, pdu.origin, key_id, signature_bytes,
|
||||
)
|
||||
|
||||
for prev_pdu_id, prev_origin, prev_hashes in pdu.prev_pdus:
|
||||
for alg, hash_base64 in prev_hashes.items():
|
||||
hash_bytes = decode_base64(hash_base64)
|
||||
self._store_prev_pdu_hash_txn(
|
||||
txn, pdu.pdu_id, pdu.origin, prev_pdu_id, prev_origin, alg,
|
||||
hash_bytes
|
||||
)
|
||||
|
||||
(ref_alg, ref_hash_bytes) = compute_pdu_event_reference_hash(pdu)
|
||||
self._store_pdu_reference_hash_txn(
|
||||
txn, pdu.pdu_id, pdu.origin, ref_alg, ref_hash_bytes
|
||||
)
|
||||
|
||||
if pdu.is_state:
|
||||
self._persist_state_txn(txn, pdu.prev_pdus, cols)
|
||||
else:
|
||||
|
@ -338,6 +375,7 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||
prev_pdus = self._get_latest_pdus_in_context(
|
||||
txn, room_id
|
||||
)
|
||||
|
||||
if state_type is not None and state_key is not None:
|
||||
prev_state_pdu = self._get_current_state_pdu(
|
||||
txn, room_id, state_type, state_key
|
||||
|
@ -387,17 +425,16 @@ class Snapshot(object):
|
|||
self.prev_state_pdu = prev_state_pdu
|
||||
|
||||
def fill_out_prev_events(self, event):
|
||||
if hasattr(event, "prev_events"):
|
||||
if hasattr(event, "prev_pdus"):
|
||||
return
|
||||
|
||||
es = [
|
||||
"%s@%s" % (p_id, origin) for p_id, origin, _ in self.prev_pdus
|
||||
event.prev_pdus = [
|
||||
(p_id, origin, hashes)
|
||||
for p_id, origin, hashes, _ in self.prev_pdus
|
||||
]
|
||||
|
||||
event.prev_events = [e for e in es if e != event.event_id]
|
||||
|
||||
if self.prev_pdus:
|
||||
event.depth = max([int(v) for _, _, v in self.prev_pdus]) + 1
|
||||
event.depth = max([int(v) for _, _, _, v in self.prev_pdus]) + 1
|
||||
else:
|
||||
event.depth = 0
|
||||
|
||||
|
|
|
@ -104,7 +104,6 @@ class KeyStore(SQLBaseStore):
|
|||
ts_now_ms (int): The time now in milliseconds
|
||||
verification_key (VerifyKey): The NACL verify key.
|
||||
"""
|
||||
verify_key_bytes = verify_key.encode()
|
||||
return self._simple_insert(
|
||||
table="server_signature_keys",
|
||||
values={
|
||||
|
|
|
@ -20,10 +20,13 @@ from ._base import SQLBaseStore, Table, JoinHelper
|
|||
from synapse.federation.units import Pdu
|
||||
from synapse.util.logutils import log_function
|
||||
|
||||
from syutil.base64util import encode_base64
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -64,6 +67,13 @@ class PduStore(SQLBaseStore):
|
|||
for r in PduEdgesTable.decode_results(txn.fetchall())
|
||||
]
|
||||
|
||||
edge_hashes = self._get_prev_pdu_hashes_txn(txn, pdu_id, origin)
|
||||
|
||||
hashes = self._get_pdu_content_hashes_txn(txn, pdu_id, origin)
|
||||
signatures = self._get_pdu_origin_signatures_txn(
|
||||
txn, pdu_id, origin
|
||||
)
|
||||
|
||||
query = (
|
||||
"SELECT %(fields)s FROM %(pdus)s as p "
|
||||
"LEFT JOIN %(state)s as s "
|
||||
|
@ -80,7 +90,9 @@ class PduStore(SQLBaseStore):
|
|||
|
||||
row = txn.fetchone()
|
||||
if row:
|
||||
results.append(PduTuple(PduEntry(*row), edges))
|
||||
results.append(PduTuple(
|
||||
PduEntry(*row), edges, hashes, signatures, edge_hashes
|
||||
))
|
||||
|
||||
return results
|
||||
|
||||
|
@ -309,9 +321,14 @@ class PduStore(SQLBaseStore):
|
|||
(context, )
|
||||
)
|
||||
|
||||
results = txn.fetchall()
|
||||
results = []
|
||||
for pdu_id, origin, depth in txn.fetchall():
|
||||
hashes = self._get_pdu_reference_hashes_txn(txn, pdu_id, origin)
|
||||
sha256_bytes = hashes["sha256"]
|
||||
prev_hashes = {"sha256": encode_base64(sha256_bytes)}
|
||||
results.append((pdu_id, origin, prev_hashes, depth))
|
||||
|
||||
return [(row[0], row[1], row[2]) for row in results]
|
||||
return results
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_oldest_pdus_in_context(self, context):
|
||||
|
@ -430,7 +447,7 @@ class PduStore(SQLBaseStore):
|
|||
"DELETE FROM %s WHERE pdu_id = ? AND origin = ?"
|
||||
% PduForwardExtremitiesTable.table_name
|
||||
)
|
||||
txn.executemany(query, prev_pdus)
|
||||
txn.executemany(query, list(p[:2] for p in prev_pdus))
|
||||
|
||||
# We only insert as a forward extremety the new pdu if there are no
|
||||
# other pdus that reference it as a prev pdu
|
||||
|
@ -453,7 +470,7 @@ class PduStore(SQLBaseStore):
|
|||
# deleted in a second if they're incorrect anyway.
|
||||
txn.executemany(
|
||||
PduBackwardExtremitiesTable.insert_statement(),
|
||||
[(i, o, context) for i, o in prev_pdus]
|
||||
[(i, o, context) for i, o, _ in prev_pdus]
|
||||
)
|
||||
|
||||
# Also delete from the backwards extremities table all ones that
|
||||
|
@ -914,7 +931,7 @@ This does not include a prev_pdus key.
|
|||
|
||||
PduTuple = namedtuple(
|
||||
"PduTuple",
|
||||
("pdu_entry", "prev_pdu_list")
|
||||
("pdu_entry", "prev_pdu_list", "hashes", "signatures", "edge_hashes")
|
||||
)
|
||||
""" This is a tuple of a `PduEntry` and a list of `PduIdTuple` that represent
|
||||
the `prev_pdus` key of a PDU.
|
||||
|
|
66
synapse/storage/schema/signatures.sql
Normal file
66
synapse/storage/schema/signatures.sql
Normal file
|
@ -0,0 +1,66 @@
|
|||
/* Copyright 2014 OpenMarket Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pdu_content_hashes (
|
||||
pdu_id TEXT,
|
||||
origin TEXT,
|
||||
algorithm TEXT,
|
||||
hash BLOB,
|
||||
CONSTRAINT uniqueness UNIQUE (pdu_id, origin, algorithm)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS pdu_content_hashes_id ON pdu_content_hashes (
|
||||
pdu_id, origin
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pdu_reference_hashes (
|
||||
pdu_id TEXT,
|
||||
origin TEXT,
|
||||
algorithm TEXT,
|
||||
hash BLOB,
|
||||
CONSTRAINT uniqueness UNIQUE (pdu_id, origin, algorithm)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS pdu_reference_hashes_id ON pdu_reference_hashes (
|
||||
pdu_id, origin
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pdu_origin_signatures (
|
||||
pdu_id TEXT,
|
||||
origin TEXT,
|
||||
key_id TEXT,
|
||||
signature BLOB,
|
||||
CONSTRAINT uniqueness UNIQUE (pdu_id, origin, key_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS pdu_origin_signatures_id ON pdu_origin_signatures (
|
||||
pdu_id, origin
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pdu_edge_hashes(
|
||||
pdu_id TEXT,
|
||||
origin TEXT,
|
||||
prev_pdu_id TEXT,
|
||||
prev_origin TEXT,
|
||||
algorithm TEXT,
|
||||
hash BLOB,
|
||||
CONSTRAINT uniqueness UNIQUE (
|
||||
pdu_id, origin, prev_pdu_id, prev_origin, algorithm
|
||||
)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS pdu_edge_hashes_id ON pdu_edge_hashes(
|
||||
pdu_id, origin
|
||||
);
|
155
synapse/storage/signatures.py
Normal file
155
synapse/storage/signatures.py
Normal file
|
@ -0,0 +1,155 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from _base import SQLBaseStore
|
||||
|
||||
|
||||
class SignatureStore(SQLBaseStore):
|
||||
"""Persistence for PDU signatures and hashes"""
|
||||
|
||||
def _get_pdu_content_hashes_txn(self, txn, pdu_id, origin):
|
||||
"""Get all the hashes for a given PDU.
|
||||
Args:
|
||||
txn (cursor):
|
||||
pdu_id (str): Id for the PDU.
|
||||
origin (str): origin of the PDU.
|
||||
Returns:
|
||||
A dict of algorithm -> hash.
|
||||
"""
|
||||
query = (
|
||||
"SELECT algorithm, hash"
|
||||
" FROM pdu_content_hashes"
|
||||
" WHERE pdu_id = ? and origin = ?"
|
||||
)
|
||||
txn.execute(query, (pdu_id, origin))
|
||||
return dict(txn.fetchall())
|
||||
|
||||
def _store_pdu_content_hash_txn(self, txn, pdu_id, origin, algorithm,
|
||||
hash_bytes):
|
||||
"""Store a hash for a PDU
|
||||
Args:
|
||||
txn (cursor):
|
||||
pdu_id (str): Id for the PDU.
|
||||
origin (str): origin of the PDU.
|
||||
algorithm (str): Hashing algorithm.
|
||||
hash_bytes (bytes): Hash function output bytes.
|
||||
"""
|
||||
self._simple_insert_txn(txn, "pdu_content_hashes", {
|
||||
"pdu_id": pdu_id,
|
||||
"origin": origin,
|
||||
"algorithm": algorithm,
|
||||
"hash": buffer(hash_bytes),
|
||||
})
|
||||
|
||||
def _get_pdu_reference_hashes_txn(self, txn, pdu_id, origin):
|
||||
"""Get all the hashes for a given PDU.
|
||||
Args:
|
||||
txn (cursor):
|
||||
pdu_id (str): Id for the PDU.
|
||||
origin (str): origin of the PDU.
|
||||
Returns:
|
||||
A dict of algorithm -> hash.
|
||||
"""
|
||||
query = (
|
||||
"SELECT algorithm, hash"
|
||||
" FROM pdu_reference_hashes"
|
||||
" WHERE pdu_id = ? and origin = ?"
|
||||
)
|
||||
txn.execute(query, (pdu_id, origin))
|
||||
return dict(txn.fetchall())
|
||||
|
||||
def _store_pdu_reference_hash_txn(self, txn, pdu_id, origin, algorithm,
|
||||
hash_bytes):
|
||||
"""Store a hash for a PDU
|
||||
Args:
|
||||
txn (cursor):
|
||||
pdu_id (str): Id for the PDU.
|
||||
origin (str): origin of the PDU.
|
||||
algorithm (str): Hashing algorithm.
|
||||
hash_bytes (bytes): Hash function output bytes.
|
||||
"""
|
||||
self._simple_insert_txn(txn, "pdu_reference_hashes", {
|
||||
"pdu_id": pdu_id,
|
||||
"origin": origin,
|
||||
"algorithm": algorithm,
|
||||
"hash": buffer(hash_bytes),
|
||||
})
|
||||
|
||||
|
||||
def _get_pdu_origin_signatures_txn(self, txn, pdu_id, origin):
|
||||
"""Get all the signatures for a given PDU.
|
||||
Args:
|
||||
txn (cursor):
|
||||
pdu_id (str): Id for the PDU.
|
||||
origin (str): origin of the PDU.
|
||||
Returns:
|
||||
A dict of key_id -> signature_bytes.
|
||||
"""
|
||||
query = (
|
||||
"SELECT key_id, signature"
|
||||
" FROM pdu_origin_signatures"
|
||||
" WHERE pdu_id = ? and origin = ?"
|
||||
)
|
||||
txn.execute(query, (pdu_id, origin))
|
||||
return dict(txn.fetchall())
|
||||
|
||||
def _store_pdu_origin_signature_txn(self, txn, pdu_id, origin, key_id,
|
||||
signature_bytes):
|
||||
"""Store a signature from the origin server for a PDU.
|
||||
Args:
|
||||
txn (cursor):
|
||||
pdu_id (str): Id for the PDU.
|
||||
origin (str): origin of the PDU.
|
||||
key_id (str): Id for the signing key.
|
||||
signature (bytes): The signature.
|
||||
"""
|
||||
self._simple_insert_txn(txn, "pdu_origin_signatures", {
|
||||
"pdu_id": pdu_id,
|
||||
"origin": origin,
|
||||
"key_id": key_id,
|
||||
"signature": buffer(signature_bytes),
|
||||
})
|
||||
|
||||
def _get_prev_pdu_hashes_txn(self, txn, pdu_id, origin):
|
||||
"""Get all the hashes for previous PDUs of a PDU
|
||||
Args:
|
||||
txn (cursor):
|
||||
pdu_id (str): Id of the PDU.
|
||||
origin (str): Origin of the PDU.
|
||||
Returns:
|
||||
dict of (pdu_id, origin) -> dict of algorithm -> hash_bytes.
|
||||
"""
|
||||
query = (
|
||||
"SELECT prev_pdu_id, prev_origin, algorithm, hash"
|
||||
" FROM pdu_edge_hashes"
|
||||
" WHERE pdu_id = ? and origin = ?"
|
||||
)
|
||||
txn.execute(query, (pdu_id, origin))
|
||||
results = {}
|
||||
for prev_pdu_id, prev_origin, algorithm, hash_bytes in txn.fetchall():
|
||||
hashes = results.setdefault((prev_pdu_id, prev_origin), {})
|
||||
hashes[algorithm] = hash_bytes
|
||||
return results
|
||||
|
||||
def _store_prev_pdu_hash_txn(self, txn, pdu_id, origin, prev_pdu_id,
|
||||
prev_origin, algorithm, hash_bytes):
|
||||
self._simple_insert_txn(txn, "pdu_edge_hashes", {
|
||||
"pdu_id": pdu_id,
|
||||
"origin": origin,
|
||||
"prev_pdu_id": prev_pdu_id,
|
||||
"prev_origin": prev_origin,
|
||||
"algorithm": algorithm,
|
||||
"hash": buffer(hash_bytes),
|
||||
})
|
Loading…
Add table
Add a link
Reference in a new issue