2019-10-21 07:56:42 -04:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
|
|
|
# Copyright 2019 New Vector Ltd.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import itertools
|
|
|
|
import logging
|
2022-04-27 08:05:00 -04:00
|
|
|
from typing import Any, Dict, Iterable, List, Optional, Tuple
|
2019-10-21 07:56:42 -04:00
|
|
|
|
|
|
|
from signedjson.key import decode_verify_key_bytes
|
|
|
|
|
|
|
|
from synapse.storage._base import SQLBaseStore
|
2022-04-27 08:05:00 -04:00
|
|
|
from synapse.storage.database import LoggingTransaction
|
2019-10-21 07:56:42 -04:00
|
|
|
from synapse.storage.keys import FetchKeyResult
|
2021-01-04 10:04:50 -05:00
|
|
|
from synapse.storage.types import Cursor
|
2019-10-21 07:56:42 -04:00
|
|
|
from synapse.util.caches.descriptors import cached, cachedList
|
2020-01-14 06:58:02 -05:00
|
|
|
from synapse.util.iterutils import batch_iter
|
2019-10-21 07:56:42 -04:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2020-05-15 14:26:54 -04:00
|
|
|
|
|
|
|
db_binary_type = memoryview
|
2019-10-21 07:56:42 -04:00
|
|
|
|
|
|
|
|
|
|
|
class KeyStore(SQLBaseStore):
|
2021-02-16 17:32:34 -05:00
|
|
|
"""Persistence for signature verification keys"""
|
2019-10-21 07:56:42 -04:00
|
|
|
|
|
|
|
@cached()
|
2022-04-27 08:05:00 -04:00
|
|
|
def _get_server_verify_key(
|
|
|
|
self, server_name_and_key_id: Tuple[str, str]
|
|
|
|
) -> FetchKeyResult:
|
2019-10-21 07:56:42 -04:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@cachedList(
|
|
|
|
cached_method_name="_get_server_verify_key", list_name="server_name_and_key_ids"
|
|
|
|
)
|
2020-08-28 07:54:27 -04:00
|
|
|
async def get_server_verify_keys(
|
|
|
|
self, server_name_and_key_ids: Iterable[Tuple[str, str]]
|
2021-01-04 10:04:50 -05:00
|
|
|
) -> Dict[Tuple[str, str], FetchKeyResult]:
|
2019-10-21 07:56:42 -04:00
|
|
|
"""
|
|
|
|
Args:
|
2020-08-28 07:54:27 -04:00
|
|
|
server_name_and_key_ids:
|
2019-10-21 07:56:42 -04:00
|
|
|
iterable of (server_name, key-id) tuples to fetch keys for
|
|
|
|
|
|
|
|
Returns:
|
2020-08-28 07:54:27 -04:00
|
|
|
A map from (server_name, key_id) -> FetchKeyResult, or None if the
|
|
|
|
key is unknown
|
2019-10-21 07:56:42 -04:00
|
|
|
"""
|
|
|
|
keys = {}
|
|
|
|
|
2021-05-24 15:32:01 -04:00
|
|
|
def _get_keys(txn: Cursor, batch: Tuple[Tuple[str, str], ...]) -> None:
|
2019-10-21 07:56:42 -04:00
|
|
|
"""Processes a batch of keys to fetch, and adds the result to `keys`."""
|
|
|
|
|
|
|
|
# batch_iter always returns tuples so it's safe to do len(batch)
|
|
|
|
sql = (
|
|
|
|
"SELECT server_name, key_id, verify_key, ts_valid_until_ms "
|
|
|
|
"FROM server_signature_keys WHERE 1=0"
|
|
|
|
) + " OR (server_name=? AND key_id=?)" * len(batch)
|
|
|
|
|
|
|
|
txn.execute(sql, tuple(itertools.chain.from_iterable(batch)))
|
|
|
|
|
|
|
|
for row in txn:
|
|
|
|
server_name, key_id, key_bytes, ts_valid_until_ms = row
|
|
|
|
|
|
|
|
if ts_valid_until_ms is None:
|
|
|
|
# Old keys may be stored with a ts_valid_until_ms of null,
|
|
|
|
# in which case we treat this as if it was set to `0`, i.e.
|
|
|
|
# it won't match key requests that define a minimum
|
|
|
|
# `ts_valid_until_ms`.
|
|
|
|
ts_valid_until_ms = 0
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
keys[(server_name, key_id)] = FetchKeyResult(
|
2019-10-21 07:56:42 -04:00
|
|
|
verify_key=decode_verify_key_bytes(key_id, bytes(key_bytes)),
|
|
|
|
valid_until_ts=ts_valid_until_ms,
|
|
|
|
)
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
def _txn(txn: Cursor) -> Dict[Tuple[str, str], FetchKeyResult]:
|
2019-10-21 07:56:42 -04:00
|
|
|
for batch in batch_iter(server_name_and_key_ids, 50):
|
|
|
|
_get_keys(txn, batch)
|
|
|
|
return keys
|
|
|
|
|
2020-08-28 07:54:27 -04:00
|
|
|
return await self.db_pool.runInteraction("get_server_verify_keys", _txn)
|
2019-10-21 07:56:42 -04:00
|
|
|
|
2020-08-20 06:39:55 -04:00
|
|
|
async def store_server_verify_keys(
|
|
|
|
self,
|
|
|
|
from_server: str,
|
|
|
|
ts_added_ms: int,
|
|
|
|
verify_keys: Iterable[Tuple[str, str, FetchKeyResult]],
|
|
|
|
) -> None:
|
2019-10-21 07:56:42 -04:00
|
|
|
"""Stores NACL verification keys for remote servers.
|
|
|
|
Args:
|
2020-08-20 06:39:55 -04:00
|
|
|
from_server: Where the verification keys were looked up
|
|
|
|
ts_added_ms: The time to record that the key was added
|
|
|
|
verify_keys:
|
2019-10-21 07:56:42 -04:00
|
|
|
keys to be stored. Each entry is a triplet of
|
|
|
|
(server_name, key_id, key).
|
|
|
|
"""
|
|
|
|
key_values = []
|
|
|
|
value_values = []
|
|
|
|
invalidations = []
|
|
|
|
for server_name, key_id, fetch_result in verify_keys:
|
|
|
|
key_values.append((server_name, key_id))
|
|
|
|
value_values.append(
|
|
|
|
(
|
|
|
|
from_server,
|
|
|
|
ts_added_ms,
|
|
|
|
fetch_result.valid_until_ts,
|
|
|
|
db_binary_type(fetch_result.verify_key.encode()),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# invalidate takes a tuple corresponding to the params of
|
|
|
|
# _get_server_verify_key. _get_server_verify_key only takes one
|
|
|
|
# param, which is itself the 2-tuple (server_name, key_id).
|
|
|
|
invalidations.append((server_name, key_id))
|
|
|
|
|
2020-10-14 10:50:59 -04:00
|
|
|
await self.db_pool.simple_upsert_many(
|
2019-10-21 07:56:42 -04:00
|
|
|
table="server_signature_keys",
|
|
|
|
key_names=("server_name", "key_id"),
|
|
|
|
key_values=key_values,
|
|
|
|
value_names=(
|
|
|
|
"from_server",
|
|
|
|
"ts_added_ms",
|
|
|
|
"ts_valid_until_ms",
|
|
|
|
"verify_key",
|
|
|
|
),
|
|
|
|
value_values=value_values,
|
2020-10-14 10:50:59 -04:00
|
|
|
desc="store_server_verify_keys",
|
2020-08-20 06:39:55 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
invalidate = self._get_server_verify_key.invalidate
|
|
|
|
for i in invalidations:
|
|
|
|
invalidate((i,))
|
2019-10-21 07:56:42 -04:00
|
|
|
|
2020-08-27 13:38:41 -04:00
|
|
|
async def store_server_keys_json(
|
|
|
|
self,
|
|
|
|
server_name: str,
|
|
|
|
key_id: str,
|
|
|
|
from_server: str,
|
|
|
|
ts_now_ms: int,
|
|
|
|
ts_expires_ms: int,
|
|
|
|
key_json_bytes: bytes,
|
|
|
|
) -> None:
|
2019-10-21 07:56:42 -04:00
|
|
|
"""Stores the JSON bytes for a set of keys from a server
|
|
|
|
The JSON should be signed by the originating server, the intermediate
|
|
|
|
server, and by this server. Updates the value for the
|
|
|
|
(server_name, key_id, from_server) triplet if one already existed.
|
|
|
|
Args:
|
2020-08-27 13:38:41 -04:00
|
|
|
server_name: The name of the server.
|
2021-02-12 11:01:48 -05:00
|
|
|
key_id: The identifier of the key this JSON is for.
|
2020-08-27 13:38:41 -04:00
|
|
|
from_server: The server this JSON was fetched from.
|
|
|
|
ts_now_ms: The time now in milliseconds.
|
|
|
|
ts_valid_until_ms: The time when this json stops being valid.
|
|
|
|
key_json_bytes: The encoded JSON.
|
2019-10-21 07:56:42 -04:00
|
|
|
"""
|
2020-08-27 13:38:41 -04:00
|
|
|
await self.db_pool.simple_upsert(
|
2019-10-21 07:56:42 -04:00
|
|
|
table="server_keys_json",
|
|
|
|
keyvalues={
|
|
|
|
"server_name": server_name,
|
|
|
|
"key_id": key_id,
|
|
|
|
"from_server": from_server,
|
|
|
|
},
|
|
|
|
values={
|
|
|
|
"server_name": server_name,
|
|
|
|
"key_id": key_id,
|
|
|
|
"from_server": from_server,
|
|
|
|
"ts_added_ms": ts_now_ms,
|
|
|
|
"ts_valid_until_ms": ts_expires_ms,
|
|
|
|
"key_json": db_binary_type(key_json_bytes),
|
|
|
|
},
|
|
|
|
desc="store_server_keys_json",
|
|
|
|
)
|
|
|
|
|
2020-08-28 07:54:27 -04:00
|
|
|
async def get_server_keys_json(
|
|
|
|
self, server_keys: Iterable[Tuple[str, Optional[str], Optional[str]]]
|
2022-04-27 08:05:00 -04:00
|
|
|
) -> Dict[Tuple[str, Optional[str], Optional[str]], List[Dict[str, Any]]]:
|
2021-02-12 11:01:48 -05:00
|
|
|
"""Retrieve the key json for a list of server_keys and key ids.
|
2019-10-21 07:56:42 -04:00
|
|
|
If no keys are found for a given server, key_id and source then
|
|
|
|
that server, key_id, and source triplet entry will be an empty list.
|
|
|
|
The JSON is returned as a byte array so that it can be efficiently
|
|
|
|
used in an HTTP response.
|
|
|
|
Args:
|
2022-04-27 08:05:00 -04:00
|
|
|
server_keys: List of (server_name, key_id, source) triplets.
|
2019-10-21 07:56:42 -04:00
|
|
|
Returns:
|
2020-08-28 07:54:27 -04:00
|
|
|
A mapping from (server_name, key_id, source) triplets to a list of dicts
|
2019-10-21 07:56:42 -04:00
|
|
|
"""
|
|
|
|
|
2022-04-27 08:05:00 -04:00
|
|
|
def _get_server_keys_json_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> Dict[Tuple[str, Optional[str], Optional[str]], List[Dict[str, Any]]]:
|
2019-10-21 07:56:42 -04:00
|
|
|
results = {}
|
|
|
|
for server_name, key_id, from_server in server_keys:
|
|
|
|
keyvalues = {"server_name": server_name}
|
|
|
|
if key_id is not None:
|
|
|
|
keyvalues["key_id"] = key_id
|
|
|
|
if from_server is not None:
|
|
|
|
keyvalues["from_server"] = from_server
|
2020-08-05 16:38:57 -04:00
|
|
|
rows = self.db_pool.simple_select_list_txn(
|
2019-10-21 07:56:42 -04:00
|
|
|
txn,
|
|
|
|
"server_keys_json",
|
|
|
|
keyvalues=keyvalues,
|
|
|
|
retcols=(
|
|
|
|
"key_id",
|
|
|
|
"from_server",
|
|
|
|
"ts_added_ms",
|
|
|
|
"ts_valid_until_ms",
|
|
|
|
"key_json",
|
|
|
|
),
|
|
|
|
)
|
|
|
|
results[(server_name, key_id, from_server)] = rows
|
|
|
|
return results
|
|
|
|
|
2020-08-28 07:54:27 -04:00
|
|
|
return await self.db_pool.runInteraction(
|
2020-08-05 16:38:57 -04:00
|
|
|
"get_server_keys_json", _get_server_keys_json_txn
|
|
|
|
)
|