2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-04-24 05:36:51 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
import logging
|
2022-03-29 17:37:50 -04:00
|
|
|
from typing import TYPE_CHECKING, Dict, Set
|
2019-08-21 05:39:45 -04:00
|
|
|
|
|
|
|
from signedjson.sign import sign_json
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.api.errors import Codes, SynapseError
|
2019-06-03 13:07:19 -04:00
|
|
|
from synapse.crypto.keyring import ServerKeyFetcher
|
2020-08-18 08:49:59 -04:00
|
|
|
from synapse.http.server import DirectServeJsonResource, respond_with_json
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.http.servlet import parse_integer, parse_json_object_from_request
|
2021-09-24 06:01:25 -04:00
|
|
|
from synapse.http.site import SynapseRequest
|
2021-09-15 08:45:32 -04:00
|
|
|
from synapse.types import JsonDict
|
2020-08-19 07:26:03 -04:00
|
|
|
from synapse.util import json_decoder
|
2021-06-02 11:37:59 -04:00
|
|
|
from synapse.util.async_helpers import yieldable_gather_results
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2021-09-15 08:45:32 -04:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2015-04-22 09:21:08 -04:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-07-03 14:02:19 -04:00
|
|
|
class RemoteKey(DirectServeJsonResource):
|
2020-08-19 07:26:03 -04:00
|
|
|
"""HTTP resource for retrieving the TLS certificate and NACL signature
|
2015-04-22 09:21:08 -04:00
|
|
|
verification keys for a collection of servers. Checks that the reported
|
|
|
|
X.509 TLS certificate matches the one used in the HTTPS connection. Checks
|
|
|
|
that the NACL signature for the remote server is valid. Returns a dict of
|
|
|
|
JSON signed by both the remote server and by this server.
|
|
|
|
|
|
|
|
Supports individual GET APIs and a bulk query POST API.
|
|
|
|
|
2020-09-14 11:46:58 -04:00
|
|
|
Requests:
|
2015-04-22 09:21:08 -04:00
|
|
|
|
|
|
|
GET /_matrix/key/v2/query/remote.server.example.com HTTP/1.1
|
|
|
|
|
|
|
|
GET /_matrix/key/v2/query/remote.server.example.com/a.key.id HTTP/1.1
|
|
|
|
|
|
|
|
POST /_matrix/v2/query HTTP/1.1
|
|
|
|
Content-Type: application/json
|
|
|
|
{
|
2015-04-29 06:57:26 -04:00
|
|
|
"server_keys": {
|
|
|
|
"remote.server.example.com": {
|
|
|
|
"a.key.id": {
|
|
|
|
"minimum_valid_until_ts": 1234567890123
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-04-22 09:21:08 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
Response:
|
|
|
|
|
|
|
|
HTTP/1.1 200 OK
|
|
|
|
Content-Type: application/json
|
|
|
|
{
|
|
|
|
"server_keys": [
|
|
|
|
{
|
|
|
|
"server_name": "remote.server.example.com"
|
2015-04-23 11:39:13 -04:00
|
|
|
"valid_until_ts": # posix timestamp
|
2015-04-22 09:21:08 -04:00
|
|
|
"verify_keys": {
|
|
|
|
"a.key.id": { # The identifier for a key.
|
|
|
|
key: "" # base64 encoded verification key.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"old_verify_keys": {
|
|
|
|
"an.old.key.id": { # The identifier for an old key.
|
|
|
|
key: "", # base64 encoded key
|
2015-04-23 11:39:13 -04:00
|
|
|
"expired_ts": 0, # when the key stop being used.
|
2015-04-22 09:21:08 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
"signatures": {
|
|
|
|
"remote.server.example.com": {...}
|
|
|
|
"this.server.example.com": {...}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
|
|
|
isLeaf = True
|
|
|
|
|
2021-09-15 08:45:32 -04:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2020-07-03 14:02:19 -04:00
|
|
|
super().__init__()
|
|
|
|
|
2019-05-23 09:52:13 -04:00
|
|
|
self.fetcher = ServerKeyFetcher(hs)
|
2022-02-23 06:04:02 -05:00
|
|
|
self.store = hs.get_datastores().main
|
2015-04-22 09:21:08 -04:00
|
|
|
self.clock = hs.get_clock()
|
2021-09-23 12:03:01 -04:00
|
|
|
self.federation_domain_whitelist = (
|
|
|
|
hs.config.federation.federation_domain_whitelist
|
|
|
|
)
|
2019-08-21 05:39:45 -04:00
|
|
|
self.config = hs.config
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2021-09-24 06:01:25 -04:00
|
|
|
async def _async_render_GET(self, request: SynapseRequest) -> None:
|
2021-09-15 08:45:32 -04:00
|
|
|
assert request.postpath is not None
|
2015-04-22 09:21:08 -04:00
|
|
|
if len(request.postpath) == 1:
|
2019-10-31 11:43:24 -04:00
|
|
|
(server,) = request.postpath
|
2021-07-16 13:22:36 -04:00
|
|
|
query: dict = {server.decode("ascii"): {}}
|
2015-04-22 09:21:08 -04:00
|
|
|
elif len(request.postpath) == 2:
|
|
|
|
server, key_id = request.postpath
|
2015-04-29 06:57:26 -04:00
|
|
|
minimum_valid_until_ts = parse_integer(request, "minimum_valid_until_ts")
|
|
|
|
arguments = {}
|
|
|
|
if minimum_valid_until_ts is not None:
|
|
|
|
arguments["minimum_valid_until_ts"] = minimum_valid_until_ts
|
2018-09-12 06:41:31 -04:00
|
|
|
query = {server.decode("ascii"): {key_id.decode("ascii"): arguments}}
|
2015-04-22 09:21:08 -04:00
|
|
|
else:
|
|
|
|
raise SynapseError(404, "Not found %r" % request.postpath, Codes.NOT_FOUND)
|
2018-09-12 06:41:31 -04:00
|
|
|
|
2019-06-29 03:06:55 -04:00
|
|
|
await self.query_keys(request, query, query_remote_on_cache_miss=True)
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2021-09-24 06:01:25 -04:00
|
|
|
async def _async_render_POST(self, request: SynapseRequest) -> None:
|
2016-03-11 11:41:03 -05:00
|
|
|
content = parse_json_object_from_request(request)
|
2015-04-22 09:21:08 -04:00
|
|
|
|
|
|
|
query = content["server_keys"]
|
|
|
|
|
2019-06-29 03:06:55 -04:00
|
|
|
await self.query_keys(request, query, query_remote_on_cache_miss=True)
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2021-09-15 08:45:32 -04:00
|
|
|
async def query_keys(
|
|
|
|
self,
|
2021-09-24 06:01:25 -04:00
|
|
|
request: SynapseRequest,
|
2021-09-15 08:45:32 -04:00
|
|
|
query: JsonDict,
|
|
|
|
query_remote_on_cache_miss: bool = False,
|
|
|
|
) -> None:
|
2015-04-29 06:57:26 -04:00
|
|
|
logger.info("Handling query for keys %r", query)
|
2018-09-12 06:41:31 -04:00
|
|
|
|
2015-04-22 09:21:08 -04:00
|
|
|
store_queries = []
|
|
|
|
for server_name, key_ids in query.items():
|
2015-04-29 06:57:26 -04:00
|
|
|
if not key_ids:
|
|
|
|
key_ids = (None,)
|
2015-04-22 09:21:08 -04:00
|
|
|
for key_id in key_ids:
|
|
|
|
store_queries.append((server_name, key_id, None))
|
|
|
|
|
2020-03-05 11:29:56 -05:00
|
|
|
cached = await self.store.get_server_keys_json(store_queries)
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2022-03-29 17:37:50 -04:00
|
|
|
json_results: Set[bytes] = set()
|
2015-04-22 09:21:08 -04:00
|
|
|
|
|
|
|
time_now_ms = self.clock.time_msec()
|
|
|
|
|
2022-09-01 08:54:02 -04:00
|
|
|
# Map server_name->key_id->int. Note that the value of the init is unused.
|
|
|
|
# XXX: why don't we just use a set?
|
2021-07-16 13:22:36 -04:00
|
|
|
cache_misses: Dict[str, Dict[str, int]] = {}
|
2021-09-15 08:45:32 -04:00
|
|
|
for (server_name, key_id, _), key_results in cached.items():
|
|
|
|
results = [(result["ts_added_ms"], result) for result in key_results]
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2022-09-01 08:54:02 -04:00
|
|
|
if key_id is None:
|
|
|
|
# all keys were requested. Just return what we have without worrying
|
|
|
|
# about validity
|
|
|
|
for _, result in results:
|
|
|
|
# Cast to bytes since postgresql returns a memoryview.
|
|
|
|
json_results.add(bytes(result["key_json"]))
|
2015-04-22 09:21:08 -04:00
|
|
|
continue
|
|
|
|
|
2022-09-01 08:54:02 -04:00
|
|
|
miss = False
|
|
|
|
if not results:
|
|
|
|
miss = True
|
|
|
|
else:
|
2015-04-23 11:39:13 -04:00
|
|
|
ts_added_ms, most_recent_result = max(results)
|
|
|
|
ts_valid_until_ms = most_recent_result["ts_valid_until_ms"]
|
2015-04-29 06:57:26 -04:00
|
|
|
req_key = query.get(server_name, {}).get(key_id, {})
|
|
|
|
req_valid_until = req_key.get("minimum_valid_until_ts")
|
|
|
|
if req_valid_until is not None:
|
|
|
|
if ts_valid_until_ms < req_valid_until:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is older than requested"
|
|
|
|
": valid_until (%r) < minimum_valid_until (%r)",
|
|
|
|
server_name,
|
|
|
|
key_id,
|
|
|
|
ts_valid_until_ms,
|
|
|
|
req_valid_until,
|
|
|
|
)
|
|
|
|
miss = True
|
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is newer than requested"
|
|
|
|
": valid_until (%r) >= minimum_valid_until (%r)",
|
|
|
|
server_name,
|
|
|
|
key_id,
|
|
|
|
ts_valid_until_ms,
|
|
|
|
req_valid_until,
|
|
|
|
)
|
|
|
|
elif (ts_added_ms + ts_valid_until_ms) / 2 < time_now_ms:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is too old"
|
|
|
|
": (added (%r) + valid_until (%r)) / 2 < now (%r)",
|
|
|
|
server_name,
|
|
|
|
key_id,
|
|
|
|
ts_added_ms,
|
|
|
|
ts_valid_until_ms,
|
|
|
|
time_now_ms,
|
|
|
|
)
|
2015-04-23 11:39:13 -04:00
|
|
|
# We more than half way through the lifetime of the
|
|
|
|
# response. We should fetch a fresh copy.
|
2015-04-29 06:57:26 -04:00
|
|
|
miss = True
|
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is still valid"
|
|
|
|
": (added (%r) + valid_until (%r)) / 2 < now (%r)",
|
|
|
|
server_name,
|
|
|
|
key_id,
|
|
|
|
ts_added_ms,
|
|
|
|
ts_valid_until_ms,
|
|
|
|
time_now_ms,
|
|
|
|
)
|
2020-07-15 13:40:54 -04:00
|
|
|
# Cast to bytes since postgresql returns a memoryview.
|
2015-04-23 11:39:13 -04:00
|
|
|
json_results.add(bytes(most_recent_result["key_json"]))
|
2022-09-01 08:54:02 -04:00
|
|
|
|
|
|
|
if miss and query_remote_on_cache_miss:
|
|
|
|
# only bother attempting to fetch keys from servers on our whitelist
|
|
|
|
if (
|
|
|
|
self.federation_domain_whitelist is None
|
|
|
|
or server_name in self.federation_domain_whitelist
|
|
|
|
):
|
|
|
|
cache_misses.setdefault(server_name, {})[key_id] = 0
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2020-08-19 07:26:03 -04:00
|
|
|
# If there is a cache miss, request the missing keys, then recurse (and
|
|
|
|
# ensure the result is sent).
|
2022-09-01 08:54:02 -04:00
|
|
|
if cache_misses:
|
2021-06-02 11:37:59 -04:00
|
|
|
await yieldable_gather_results(
|
|
|
|
lambda t: self.fetcher.get_keys(*t),
|
|
|
|
(
|
|
|
|
(server_name, list(keys), 0)
|
|
|
|
for server_name, keys in cache_misses.items()
|
|
|
|
),
|
|
|
|
)
|
2020-03-05 11:29:56 -05:00
|
|
|
await self.query_keys(request, query, query_remote_on_cache_miss=False)
|
2020-01-08 08:24:10 -05:00
|
|
|
else:
|
|
|
|
signed_keys = []
|
2022-03-29 17:37:50 -04:00
|
|
|
for key_json_raw in json_results:
|
|
|
|
key_json = json_decoder.decode(key_json_raw.decode("utf-8"))
|
2021-09-23 12:03:01 -04:00
|
|
|
for signing_key in self.config.key.key_server_signing_keys:
|
2021-09-13 13:07:12 -04:00
|
|
|
key_json = sign_json(
|
|
|
|
key_json, self.config.server.server_name, signing_key
|
|
|
|
)
|
2019-08-21 05:39:45 -04:00
|
|
|
|
2020-01-08 08:24:10 -05:00
|
|
|
signed_keys.append(key_json)
|
2019-08-21 05:39:45 -04:00
|
|
|
|
2021-09-15 08:45:32 -04:00
|
|
|
response = {"server_keys": signed_keys}
|
2019-08-21 05:39:45 -04:00
|
|
|
|
2021-09-15 08:45:32 -04:00
|
|
|
respond_with_json(request, 200, response, canonical_json=True)
|