2014-09-30 10:15:10 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2019-04-11 12:08:13 -04:00
|
|
|
# Copyright 2017, 2018 New Vector Ltd
|
2014-09-30 10:15:10 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
import abc
|
2018-07-09 02:09:20 -04:00
|
|
|
import logging
|
2020-06-16 08:51:47 -04:00
|
|
|
import urllib
|
2019-06-03 17:59:51 -04:00
|
|
|
from collections import defaultdict
|
2021-01-04 10:04:50 -05:00
|
|
|
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Tuple
|
2015-08-24 11:17:38 -04:00
|
|
|
|
2019-04-01 07:28:40 -04:00
|
|
|
import attr
|
2015-08-24 11:17:38 -04:00
|
|
|
from signedjson.key import (
|
2018-07-09 02:09:20 -04:00
|
|
|
decode_verify_key_bytes,
|
2018-06-08 07:01:36 -04:00
|
|
|
encode_verify_key_base64,
|
2018-07-09 02:09:20 -04:00
|
|
|
is_signing_algorithm_supported,
|
|
|
|
)
|
|
|
|
from signedjson.sign import (
|
|
|
|
SignatureVerifyException,
|
|
|
|
encode_canonical_json,
|
|
|
|
signature_ids,
|
|
|
|
verify_signed_json,
|
2014-09-30 10:15:10 -04:00
|
|
|
)
|
2019-01-22 06:04:20 -05:00
|
|
|
from unpaddedbase64 import decode_base64
|
2015-06-26 06:25:00 -04:00
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
from twisted.internet import defer
|
2014-09-30 10:15:10 -04:00
|
|
|
|
2019-02-23 10:06:02 -05:00
|
|
|
from synapse.api.errors import (
|
|
|
|
Codes,
|
|
|
|
HttpResponseException,
|
|
|
|
RequestSendFailed,
|
|
|
|
SynapseError,
|
|
|
|
)
|
2021-01-04 10:04:50 -05:00
|
|
|
from synapse.config.key import TrustedKeyServer
|
2019-07-03 10:07:04 -04:00
|
|
|
from synapse.logging.context import (
|
2018-07-09 02:09:20 -04:00
|
|
|
PreserveLoggingContext,
|
2019-07-03 10:07:04 -04:00
|
|
|
make_deferred_yieldable,
|
2018-07-09 02:09:20 -04:00
|
|
|
preserve_fn,
|
|
|
|
run_in_background,
|
|
|
|
)
|
2019-07-03 10:07:04 -04:00
|
|
|
from synapse.storage.keys import FetchKeyResult
|
2021-01-04 10:04:50 -05:00
|
|
|
from synapse.types import JsonDict
|
2019-07-03 10:07:04 -04:00
|
|
|
from synapse.util import unwrapFirstError
|
|
|
|
from synapse.util.async_helpers import yieldable_gather_results
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.util.metrics import Measure
|
2019-02-23 10:06:02 -05:00
|
|
|
from synapse.util.retryutils import NotRetryingDestination
|
2014-09-30 10:15:10 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.app.homeserver import HomeServer
|
|
|
|
|
2014-09-30 10:15:10 -04:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-04-01 07:28:40 -04:00
|
|
|
@attr.s(slots=True, cmp=False)
|
2020-09-04 06:54:56 -04:00
|
|
|
class VerifyJsonRequest:
|
2019-04-01 07:28:40 -04:00
|
|
|
"""
|
2019-06-04 11:12:57 -04:00
|
|
|
A request to verify a JSON object.
|
2019-04-01 07:28:40 -04:00
|
|
|
|
|
|
|
Attributes:
|
2021-01-04 10:04:50 -05:00
|
|
|
server_name: The name of the server to verify against.
|
2019-05-29 12:21:39 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
json_object: The JSON object to verify.
|
2019-05-29 12:21:39 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
minimum_valid_until_ts: time at which we require the signing key to
|
2019-06-03 17:59:51 -04:00
|
|
|
be valid. (0 implies we don't care)
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
request_name: The name of the request.
|
|
|
|
|
|
|
|
key_ids: The set of key_ids to that could be used to verify the JSON object
|
|
|
|
|
2019-06-05 05:35:13 -04:00
|
|
|
key_ready (Deferred[str, str, nacl.signing.VerifyKey]):
|
2019-04-01 07:28:40 -04:00
|
|
|
A deferred (server_name, key_id, verify_key) tuple that resolves when
|
|
|
|
a verify key has been fetched. The deferreds' callbacks are run with no
|
|
|
|
logcontext.
|
2019-05-29 12:21:39 -04:00
|
|
|
|
|
|
|
If we are unable to find a key which satisfies the request, the deferred
|
|
|
|
errbacks with an M_UNAUTHORIZED SynapseError.
|
2019-04-01 07:28:40 -04:00
|
|
|
"""
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
server_name = attr.ib(type=str)
|
|
|
|
json_object = attr.ib(type=JsonDict)
|
|
|
|
minimum_valid_until_ts = attr.ib(type=int)
|
|
|
|
request_name = attr.ib(type=str)
|
|
|
|
key_ids = attr.ib(init=False, type=List[str])
|
|
|
|
key_ready = attr.ib(default=attr.Factory(defer.Deferred), type=defer.Deferred)
|
2015-06-26 04:52:24 -04:00
|
|
|
|
2019-06-04 11:12:57 -04:00
|
|
|
def __attrs_post_init__(self):
|
|
|
|
self.key_ids = signature_ids(self.json_object, self.server_name)
|
|
|
|
|
2015-06-26 04:52:24 -04:00
|
|
|
|
2016-08-10 05:44:37 -04:00
|
|
|
class KeyLookupError(ValueError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2020-09-04 06:54:56 -04:00
|
|
|
class Keyring:
|
2021-01-04 10:04:50 -05:00
|
|
|
def __init__(
|
|
|
|
self, hs: "HomeServer", key_fetchers: "Optional[Iterable[KeyFetcher]]" = None
|
|
|
|
):
|
2014-09-30 10:15:10 -04:00
|
|
|
self.clock = hs.get_clock()
|
2019-04-09 13:28:17 -04:00
|
|
|
|
2019-06-03 17:59:51 -04:00
|
|
|
if key_fetchers is None:
|
|
|
|
key_fetchers = (
|
|
|
|
StoreKeyFetcher(hs),
|
|
|
|
PerspectivesKeyFetcher(hs),
|
|
|
|
ServerKeyFetcher(hs),
|
|
|
|
)
|
|
|
|
self._key_fetchers = key_fetchers
|
2014-09-30 10:15:10 -04:00
|
|
|
|
2017-09-18 13:31:01 -04:00
|
|
|
# map from server name to Deferred. Has an entry for each server with
|
|
|
|
# an ongoing key download; the Deferred completes once the download
|
|
|
|
# completes.
|
|
|
|
#
|
|
|
|
# These are regular, logcontext-agnostic Deferreds.
|
2021-01-04 10:04:50 -05:00
|
|
|
self.key_downloads = {} # type: Dict[str, defer.Deferred]
|
2015-04-27 09:20:26 -04:00
|
|
|
|
2019-06-04 11:12:57 -04:00
|
|
|
def verify_json_for_server(
|
2021-01-04 10:04:50 -05:00
|
|
|
self,
|
|
|
|
server_name: str,
|
|
|
|
json_object: JsonDict,
|
|
|
|
validity_time: int,
|
|
|
|
request_name: str,
|
|
|
|
) -> defer.Deferred:
|
2019-06-03 17:59:51 -04:00
|
|
|
"""Verify that a JSON object has been signed by a given server
|
|
|
|
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
server_name: name of the server which must have signed this object
|
2019-06-03 17:59:51 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
json_object: object to be checked
|
2019-06-03 17:59:51 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
validity_time: timestamp at which we require the signing key to
|
2019-06-03 17:59:51 -04:00
|
|
|
be valid. (0 implies we don't care)
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
request_name: an identifier for this json object (eg, an event id)
|
2019-06-04 11:12:57 -04:00
|
|
|
for logging.
|
|
|
|
|
2019-06-03 17:59:51 -04:00
|
|
|
Returns:
|
|
|
|
Deferred[None]: completes if the the object was correctly signed, otherwise
|
|
|
|
errbacks with an error
|
|
|
|
"""
|
2019-06-04 11:12:57 -04:00
|
|
|
req = VerifyJsonRequest(server_name, json_object, validity_time, request_name)
|
|
|
|
requests = (req,)
|
2019-07-03 10:07:04 -04:00
|
|
|
return make_deferred_yieldable(self._verify_objects(requests)[0])
|
2015-03-05 12:09:13 -05:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
def verify_json_objects_for_server(
|
|
|
|
self, server_and_json: Iterable[Tuple[str, dict, int, str]]
|
|
|
|
) -> List[defer.Deferred]:
|
2017-09-18 13:31:01 -04:00
|
|
|
"""Bulk verifies signatures of json objects, bulk fetching keys as
|
2015-06-26 04:52:24 -04:00
|
|
|
necessary.
|
2014-09-30 10:15:10 -04:00
|
|
|
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
server_and_json:
|
2019-06-04 11:12:57 -04:00
|
|
|
Iterable of (server_name, json_object, validity_time, request_name)
|
|
|
|
tuples.
|
|
|
|
|
|
|
|
validity_time is a timestamp at which the signing key must be
|
|
|
|
valid.
|
|
|
|
|
|
|
|
request_name is an identifier for this json object (eg, an event id)
|
|
|
|
for logging.
|
2015-06-26 04:52:24 -04:00
|
|
|
|
|
|
|
Returns:
|
2019-06-03 17:59:51 -04:00
|
|
|
List<Deferred[None]>: for each input triplet, a deferred indicating success
|
2017-09-19 20:32:42 -04:00
|
|
|
or failure to verify each json object's signature for the given
|
|
|
|
server_name. The deferreds run their callbacks in the sentinel
|
|
|
|
logcontext.
|
2014-09-30 10:15:10 -04:00
|
|
|
"""
|
2019-06-04 11:12:57 -04:00
|
|
|
return self._verify_objects(
|
|
|
|
VerifyJsonRequest(server_name, json_object, validity_time, request_name)
|
|
|
|
for server_name, json_object, validity_time, request_name in server_and_json
|
|
|
|
)
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
def _verify_objects(
|
|
|
|
self, verify_requests: Iterable[VerifyJsonRequest]
|
|
|
|
) -> List[defer.Deferred]:
|
2019-06-04 11:12:57 -04:00
|
|
|
"""Does the work of verify_json_[objects_]for_server
|
|
|
|
|
|
|
|
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
verify_requests: Iterable of verification requests.
|
2019-06-04 11:12:57 -04:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
List<Deferred[None]>: for each input item, a deferred indicating success
|
|
|
|
or failure to verify each json object's signature for the given
|
|
|
|
server_name. The deferreds run their callbacks in the sentinel
|
|
|
|
logcontext.
|
|
|
|
"""
|
|
|
|
# a list of VerifyJsonRequests which are awaiting a key lookup
|
|
|
|
key_lookups = []
|
2019-04-25 16:08:12 -04:00
|
|
|
handle = preserve_fn(_handle_key_deferred)
|
2015-06-26 04:52:24 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
def process(verify_request: VerifyJsonRequest) -> defer.Deferred:
|
2019-04-25 16:08:12 -04:00
|
|
|
"""Process an entry in the request list
|
2015-06-26 04:52:24 -04:00
|
|
|
|
2019-06-04 11:12:57 -04:00
|
|
|
Adds a key request to key_lookups, and returns a deferred which
|
2019-06-03 17:59:51 -04:00
|
|
|
will complete or fail (in the sentinel context) when verification completes.
|
2019-04-25 16:08:12 -04:00
|
|
|
"""
|
2019-06-04 11:12:57 -04:00
|
|
|
if not verify_request.key_ids:
|
2019-04-25 16:08:12 -04:00
|
|
|
return defer.fail(
|
|
|
|
SynapseError(
|
2019-06-04 11:12:57 -04:00
|
|
|
400,
|
|
|
|
"Not signed by %s" % (verify_request.server_name,),
|
|
|
|
Codes.UNAUTHORIZED,
|
2019-04-25 16:08:12 -04:00
|
|
|
)
|
|
|
|
)
|
2015-06-26 04:52:24 -04:00
|
|
|
|
2019-06-03 10:36:41 -04:00
|
|
|
logger.debug(
|
2019-06-04 11:12:57 -04:00
|
|
|
"Verifying %s for %s with key_ids %s, min_validity %i",
|
|
|
|
verify_request.request_name,
|
|
|
|
verify_request.server_name,
|
|
|
|
verify_request.key_ids,
|
|
|
|
verify_request.minimum_valid_until_ts,
|
2019-06-03 10:36:41 -04:00
|
|
|
)
|
2017-03-20 11:36:14 -04:00
|
|
|
|
2019-04-25 16:08:12 -04:00
|
|
|
# add the key request to the queue, but don't start it off yet.
|
2019-06-04 11:12:57 -04:00
|
|
|
key_lookups.append(verify_request)
|
2014-09-30 10:15:10 -04:00
|
|
|
|
2019-04-25 16:08:12 -04:00
|
|
|
# now run _handle_key_deferred, which will wait for the key request
|
|
|
|
# to complete and then do the verification.
|
|
|
|
#
|
|
|
|
# We want _handle_key_request to log to the right context, so we
|
|
|
|
# wrap it with preserve_fn (aka run_in_background)
|
|
|
|
return handle(verify_request)
|
2017-09-19 20:32:42 -04:00
|
|
|
|
2019-06-04 11:12:57 -04:00
|
|
|
results = [process(r) for r in verify_requests]
|
2016-08-19 12:38:15 -04:00
|
|
|
|
2019-06-04 11:12:57 -04:00
|
|
|
if key_lookups:
|
|
|
|
run_in_background(self._start_key_lookups, key_lookups)
|
2019-04-25 16:08:12 -04:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def _start_key_lookups(
|
|
|
|
self, verify_requests: List[VerifyJsonRequest]
|
|
|
|
) -> None:
|
2017-09-19 20:32:42 -04:00
|
|
|
"""Sets off the key fetches for each verify request
|
2016-08-19 12:38:15 -04:00
|
|
|
|
2019-06-05 05:35:13 -04:00
|
|
|
Once each fetch completes, verify_request.key_ready will be resolved.
|
2017-09-19 20:32:42 -04:00
|
|
|
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
verify_requests:
|
2017-09-19 20:32:42 -04:00
|
|
|
"""
|
2015-06-26 04:52:24 -04:00
|
|
|
|
2018-04-27 06:07:40 -04:00
|
|
|
try:
|
2019-07-19 12:49:19 -04:00
|
|
|
# map from server name to a set of outstanding request ids
|
2021-01-04 10:04:50 -05:00
|
|
|
server_to_request_ids = {} # type: Dict[str, Set[int]]
|
2018-04-27 06:07:40 -04:00
|
|
|
|
|
|
|
for verify_request in verify_requests:
|
|
|
|
server_name = verify_request.server_name
|
|
|
|
request_id = id(verify_request)
|
|
|
|
server_to_request_ids.setdefault(server_name, set()).add(request_id)
|
|
|
|
|
2019-07-19 12:49:19 -04:00
|
|
|
# Wait for any previous lookups to complete before proceeding.
|
2020-08-03 08:29:01 -04:00
|
|
|
await self.wait_for_previous_lookups(server_to_request_ids.keys())
|
2019-07-19 12:49:19 -04:00
|
|
|
|
|
|
|
# take out a lock on each of the servers by sticking a Deferred in
|
|
|
|
# key_downloads
|
|
|
|
for server_name in server_to_request_ids.keys():
|
|
|
|
self.key_downloads[server_name] = defer.Deferred()
|
|
|
|
logger.debug("Got key lookup lock on %s", server_name)
|
|
|
|
|
|
|
|
# When we've finished fetching all the keys for a given server_name,
|
|
|
|
# drop the lock by resolving the deferred in key_downloads.
|
2019-07-19 12:57:00 -04:00
|
|
|
def drop_server_lock(server_name):
|
|
|
|
d = self.key_downloads.pop(server_name)
|
|
|
|
d.callback(None)
|
|
|
|
|
2019-07-19 12:49:19 -04:00
|
|
|
def lookup_done(res, verify_request):
|
2018-04-27 06:07:40 -04:00
|
|
|
server_name = verify_request.server_name
|
2019-07-19 12:49:19 -04:00
|
|
|
server_requests = server_to_request_ids[server_name]
|
|
|
|
server_requests.remove(id(verify_request))
|
|
|
|
|
|
|
|
# if there are no more requests for this server, we can drop the lock.
|
|
|
|
if not server_requests:
|
2020-09-25 07:29:54 -04:00
|
|
|
logger.debug("Releasing key lookup lock on %s", server_name)
|
|
|
|
drop_server_lock(server_name)
|
2019-07-19 12:57:00 -04:00
|
|
|
|
2018-04-27 06:07:40 -04:00
|
|
|
return res
|
|
|
|
|
|
|
|
for verify_request in verify_requests:
|
2019-07-19 12:49:19 -04:00
|
|
|
verify_request.key_ready.addBoth(lookup_done, verify_request)
|
|
|
|
|
|
|
|
# Actually start fetching keys.
|
|
|
|
self._get_server_verify_keys(verify_requests)
|
2018-04-27 06:07:40 -04:00
|
|
|
except Exception:
|
|
|
|
logger.exception("Error starting key lookups")
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def wait_for_previous_lookups(self, server_names: Iterable[str]) -> None:
|
2015-06-26 06:25:00 -04:00
|
|
|
"""Waits for any previous key lookups for the given servers to finish.
|
|
|
|
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
server_names: list of servers which we want to look up
|
2019-07-19 12:49:19 -04:00
|
|
|
|
|
|
|
Returns:
|
2020-08-03 08:29:01 -04:00
|
|
|
Resolves once all key lookups for the given servers have
|
2019-07-19 12:49:19 -04:00
|
|
|
completed. Follows the synapse rules of logcontext preservation.
|
2015-06-26 06:25:00 -04:00
|
|
|
"""
|
2018-09-06 13:51:06 -04:00
|
|
|
loop_count = 1
|
2015-06-26 06:25:00 -04:00
|
|
|
while True:
|
|
|
|
wait_on = [
|
2018-09-06 13:51:06 -04:00
|
|
|
(server_name, self.key_downloads[server_name])
|
2019-07-19 12:49:19 -04:00
|
|
|
for server_name in server_names
|
2015-06-26 06:25:00 -04:00
|
|
|
if server_name in self.key_downloads
|
|
|
|
]
|
2018-09-06 13:51:06 -04:00
|
|
|
if not wait_on:
|
2015-06-26 06:25:00 -04:00
|
|
|
break
|
2018-09-06 13:51:06 -04:00
|
|
|
logger.info(
|
|
|
|
"Waiting for existing lookups for %s to complete [loop %i]",
|
2019-05-22 13:39:33 -04:00
|
|
|
[w[0] for w in wait_on],
|
|
|
|
loop_count,
|
2018-09-06 13:51:06 -04:00
|
|
|
)
|
|
|
|
with PreserveLoggingContext():
|
2020-08-03 08:29:01 -04:00
|
|
|
await defer.DeferredList((w[1] for w in wait_on))
|
2018-09-06 13:51:06 -04:00
|
|
|
|
|
|
|
loop_count += 1
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
def _get_server_verify_keys(self, verify_requests: List[VerifyJsonRequest]) -> None:
|
2017-03-20 11:34:35 -04:00
|
|
|
"""Tries to find at least one key for each verify request
|
|
|
|
|
2019-06-05 05:35:13 -04:00
|
|
|
For each verify_request, verify_request.key_ready is called back with
|
2017-03-20 11:34:35 -04:00
|
|
|
params (server_name, key_id, VerifyKey) if a key is found, or errbacked
|
|
|
|
with a SynapseError if none of the keys are found.
|
|
|
|
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
verify_requests: list of verify requests
|
2015-06-26 04:52:24 -04:00
|
|
|
"""
|
|
|
|
|
2020-02-21 07:15:07 -05:00
|
|
|
remaining_requests = {rq for rq in verify_requests if not rq.key_ready.called}
|
2019-04-09 13:30:13 -04:00
|
|
|
|
2020-08-03 08:29:01 -04:00
|
|
|
async def do_iterations():
|
|
|
|
try:
|
|
|
|
with Measure(self.clock, "get_server_verify_keys"):
|
|
|
|
for f in self._key_fetchers:
|
|
|
|
if not remaining_requests:
|
|
|
|
return
|
|
|
|
await self._attempt_key_fetches_with_fetcher(
|
|
|
|
f, remaining_requests
|
|
|
|
)
|
2016-08-19 12:56:44 -04:00
|
|
|
|
2020-08-03 08:29:01 -04:00
|
|
|
# look for any requests which weren't satisfied
|
2020-09-25 07:29:54 -04:00
|
|
|
while remaining_requests:
|
|
|
|
verify_request = remaining_requests.pop()
|
|
|
|
rq_str = (
|
|
|
|
"VerifyJsonRequest(server=%s, key_ids=%s, min_valid=%i)"
|
|
|
|
% (
|
|
|
|
verify_request.server_name,
|
|
|
|
verify_request.key_ids,
|
|
|
|
verify_request.minimum_valid_until_ts,
|
2020-08-03 08:29:01 -04:00
|
|
|
)
|
2020-09-25 07:29:54 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
# If we run the errback immediately, it may cancel our
|
|
|
|
# loggingcontext while we are still in it, so instead we
|
|
|
|
# schedule it for the next time round the reactor.
|
|
|
|
#
|
|
|
|
# (this also ensures that we don't get a stack overflow if we
|
|
|
|
# has a massive queue of lookups waiting for this server).
|
|
|
|
self.clock.call_later(
|
|
|
|
0,
|
|
|
|
verify_request.key_ready.errback,
|
|
|
|
SynapseError(
|
|
|
|
401,
|
|
|
|
"Failed to find any key to satisfy %s" % (rq_str,),
|
|
|
|
Codes.UNAUTHORIZED,
|
|
|
|
),
|
|
|
|
)
|
2020-08-03 08:29:01 -04:00
|
|
|
except Exception as err:
|
|
|
|
# we don't really expect to get here, because any errors should already
|
|
|
|
# have been caught and logged. But if we do, let's log the error and make
|
|
|
|
# sure that all of the deferreds are resolved.
|
|
|
|
logger.error("Unexpected error in _get_server_verify_keys: %s", err)
|
2017-09-19 20:32:42 -04:00
|
|
|
with PreserveLoggingContext():
|
2019-04-09 13:30:13 -04:00
|
|
|
for verify_request in remaining_requests:
|
2020-08-03 08:29:01 -04:00
|
|
|
if not verify_request.key_ready.called:
|
|
|
|
verify_request.key_ready.errback(err)
|
2015-06-26 04:52:24 -04:00
|
|
|
|
2020-08-03 08:29:01 -04:00
|
|
|
run_in_background(do_iterations)
|
2015-06-26 04:52:24 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def _attempt_key_fetches_with_fetcher(
|
|
|
|
self, fetcher: "KeyFetcher", remaining_requests: Set[VerifyJsonRequest]
|
|
|
|
):
|
2019-04-09 13:30:13 -04:00
|
|
|
"""Use a key fetcher to attempt to satisfy some key requests
|
|
|
|
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
fetcher: fetcher to use to fetch the keys
|
|
|
|
remaining_requests: outstanding key requests.
|
2019-06-03 17:59:51 -04:00
|
|
|
Any successfully-completed requests will be removed from the list.
|
2019-04-09 13:30:13 -04:00
|
|
|
"""
|
2021-01-04 10:04:50 -05:00
|
|
|
# The keys to fetch.
|
2019-06-03 17:59:51 -04:00
|
|
|
# server_name -> key_id -> min_valid_ts
|
2021-01-04 10:04:50 -05:00
|
|
|
missing_keys = defaultdict(dict) # type: Dict[str, Dict[str, int]]
|
2019-06-03 17:59:51 -04:00
|
|
|
|
2019-04-09 13:30:13 -04:00
|
|
|
for verify_request in remaining_requests:
|
|
|
|
# any completed requests should already have been removed
|
2019-06-05 05:35:13 -04:00
|
|
|
assert not verify_request.key_ready.called
|
2019-06-03 17:59:51 -04:00
|
|
|
keys_for_server = missing_keys[verify_request.server_name]
|
2019-04-09 13:30:13 -04:00
|
|
|
|
2019-06-03 17:59:51 -04:00
|
|
|
for key_id in verify_request.key_ids:
|
|
|
|
# If we have several requests for the same key, then we only need to
|
|
|
|
# request that key once, but we should do so with the greatest
|
|
|
|
# min_valid_until_ts of the requests, so that we can satisfy all of
|
|
|
|
# the requests.
|
|
|
|
keys_for_server[key_id] = max(
|
|
|
|
keys_for_server.get(key_id, -1),
|
2019-06-04 11:12:57 -04:00
|
|
|
verify_request.minimum_valid_until_ts,
|
2019-06-03 17:59:51 -04:00
|
|
|
)
|
|
|
|
|
2020-08-03 08:29:01 -04:00
|
|
|
results = await fetcher.get_keys(missing_keys)
|
2019-04-09 13:30:13 -04:00
|
|
|
|
2020-02-21 07:15:07 -05:00
|
|
|
completed = []
|
2019-04-09 13:30:13 -04:00
|
|
|
for verify_request in remaining_requests:
|
|
|
|
server_name = verify_request.server_name
|
|
|
|
|
|
|
|
# see if any of the keys we got this time are sufficient to
|
2019-06-04 11:12:57 -04:00
|
|
|
# complete this VerifyJsonRequest.
|
2019-04-09 13:30:13 -04:00
|
|
|
result_keys = results.get(server_name, {})
|
|
|
|
for key_id in verify_request.key_ids:
|
2019-06-03 17:59:51 -04:00
|
|
|
fetch_key_result = result_keys.get(key_id)
|
|
|
|
if not fetch_key_result:
|
|
|
|
# we didn't get a result for this key
|
|
|
|
continue
|
|
|
|
|
|
|
|
if (
|
|
|
|
fetch_key_result.valid_until_ts
|
|
|
|
< verify_request.minimum_valid_until_ts
|
|
|
|
):
|
|
|
|
# key was not valid at this point
|
|
|
|
continue
|
|
|
|
|
2020-09-25 07:29:54 -04:00
|
|
|
# we have a valid key for this request. If we run the callback
|
|
|
|
# immediately, it may cancel our loggingcontext while we are still in
|
|
|
|
# it, so instead we schedule it for the next time round the reactor.
|
|
|
|
#
|
|
|
|
# (this also ensures that we don't get a stack overflow if we had
|
|
|
|
# a massive queue of lookups waiting for this server).
|
|
|
|
logger.debug(
|
|
|
|
"Found key %s:%s for %s",
|
|
|
|
server_name,
|
|
|
|
key_id,
|
|
|
|
verify_request.request_name,
|
|
|
|
)
|
|
|
|
self.clock.call_later(
|
|
|
|
0,
|
|
|
|
verify_request.key_ready.callback,
|
|
|
|
(server_name, key_id, fetch_key_result.verify_key),
|
|
|
|
)
|
2019-06-03 17:59:51 -04:00
|
|
|
completed.append(verify_request)
|
|
|
|
break
|
2019-04-09 13:30:13 -04:00
|
|
|
|
|
|
|
remaining_requests.difference_update(completed)
|
|
|
|
|
2019-04-09 13:28:17 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
class KeyFetcher(metaclass=abc.ABCMeta):
|
|
|
|
@abc.abstractmethod
|
|
|
|
async def get_keys(
|
|
|
|
self, keys_to_fetch: Dict[str, Dict[str, int]]
|
|
|
|
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
2017-03-20 11:34:35 -04:00
|
|
|
"""
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
keys_to_fetch:
|
2019-06-03 17:59:51 -04:00
|
|
|
the keys to be fetched. server_name -> key_id -> min_valid_ts
|
2017-03-20 11:34:35 -04:00
|
|
|
|
|
|
|
Returns:
|
2021-01-04 10:04:50 -05:00
|
|
|
Map from server_name -> key_id -> FetchKeyResult
|
2017-03-20 11:34:35 -04:00
|
|
|
"""
|
2019-04-09 13:28:17 -04:00
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
|
|
|
|
class StoreKeyFetcher(KeyFetcher):
|
|
|
|
"""KeyFetcher impl which fetches keys from our data store"""
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2019-04-09 13:28:17 -04:00
|
|
|
self.store = hs.get_datastore()
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def get_keys(
|
|
|
|
self, keys_to_fetch: Dict[str, Dict[str, int]]
|
|
|
|
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
2019-04-09 13:28:17 -04:00
|
|
|
"""see KeyFetcher.get_keys"""
|
2019-06-03 17:59:51 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
key_ids_to_fetch = (
|
2019-04-08 09:51:07 -04:00
|
|
|
(server_name, key_id)
|
2019-06-03 17:59:51 -04:00
|
|
|
for server_name, keys_for_server in keys_to_fetch.items()
|
|
|
|
for key_id in keys_for_server.keys()
|
2019-04-08 09:51:07 -04:00
|
|
|
)
|
2019-06-03 17:59:51 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
res = await self.store.get_server_verify_keys(key_ids_to_fetch)
|
|
|
|
keys = {} # type: Dict[str, Dict[str, FetchKeyResult]]
|
2019-04-08 09:51:07 -04:00
|
|
|
for (server_name, key_id), key in res.items():
|
|
|
|
keys.setdefault(server_name, {})[key_id] = key
|
2019-07-23 09:00:55 -04:00
|
|
|
return keys
|
2015-04-29 08:31:14 -04:00
|
|
|
|
2019-04-09 13:28:17 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
class BaseV2KeyFetcher(KeyFetcher):
|
|
|
|
def __init__(self, hs: "HomeServer"):
|
2019-04-09 13:28:17 -04:00
|
|
|
self.store = hs.get_datastore()
|
|
|
|
self.config = hs.get_config()
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def process_v2_response(
|
|
|
|
self, from_server: str, response_json: JsonDict, time_added_ms: int
|
|
|
|
) -> Dict[str, FetchKeyResult]:
|
2019-04-09 13:28:17 -04:00
|
|
|
"""Parse a 'Server Keys' structure from the result of a /key request
|
|
|
|
|
|
|
|
This is used to parse either the entirety of the response from
|
|
|
|
GET /_matrix/key/v2/server, or a single entry from the list returned by
|
|
|
|
POST /_matrix/key/v2/query.
|
|
|
|
|
|
|
|
Checks that each signature in the response that claims to come from the origin
|
2019-04-09 08:03:56 -04:00
|
|
|
server is valid, and that there is at least one such signature.
|
2019-04-09 13:28:17 -04:00
|
|
|
|
|
|
|
Stores the json in server_keys_json so that it can be used for future responses
|
|
|
|
to /_matrix/key/v2/query.
|
|
|
|
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
from_server: the name of the server producing this result: either
|
2019-04-09 13:28:17 -04:00
|
|
|
the origin server for a /_matrix/key/v2/server request, or the notary
|
|
|
|
for a /_matrix/key/v2/query.
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
response_json: the json-decoded Server Keys response object
|
2019-04-09 13:28:17 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
time_added_ms: the timestamp to record in server_keys_json
|
2019-04-09 13:28:17 -04:00
|
|
|
|
|
|
|
Returns:
|
2021-01-04 10:04:50 -05:00
|
|
|
Map from key_id to result object
|
2019-04-09 13:28:17 -04:00
|
|
|
"""
|
|
|
|
ts_valid_until_ms = response_json["valid_until_ts"]
|
|
|
|
|
|
|
|
# start by extracting the keys from the response, since they may be required
|
|
|
|
# to validate the signature on the response.
|
|
|
|
verify_keys = {}
|
|
|
|
for key_id, key_data in response_json["verify_keys"].items():
|
|
|
|
if is_signing_algorithm_supported(key_id):
|
|
|
|
key_base64 = key_data["key"]
|
|
|
|
key_bytes = decode_base64(key_base64)
|
|
|
|
verify_key = decode_verify_key_bytes(key_id, key_bytes)
|
|
|
|
verify_keys[key_id] = FetchKeyResult(
|
|
|
|
verify_key=verify_key, valid_until_ts=ts_valid_until_ms
|
|
|
|
)
|
|
|
|
|
|
|
|
server_name = response_json["server_name"]
|
2019-04-09 08:03:56 -04:00
|
|
|
verified = False
|
2019-04-09 13:28:17 -04:00
|
|
|
for key_id in response_json["signatures"].get(server_name, {}):
|
2019-04-09 08:03:56 -04:00
|
|
|
key = verify_keys.get(key_id)
|
|
|
|
if not key:
|
2020-01-06 07:33:56 -05:00
|
|
|
# the key may not be present in verify_keys if:
|
|
|
|
# * we got the key from the notary server, and:
|
|
|
|
# * the key belongs to the notary server, and:
|
|
|
|
# * the notary server is using a different key to sign notary
|
|
|
|
# responses.
|
|
|
|
continue
|
2019-04-09 13:28:17 -04:00
|
|
|
|
2019-04-09 08:03:56 -04:00
|
|
|
verify_signed_json(response_json, server_name, key.verify_key)
|
|
|
|
verified = True
|
2020-01-06 07:33:56 -05:00
|
|
|
break
|
2019-04-09 08:03:56 -04:00
|
|
|
|
|
|
|
if not verified:
|
|
|
|
raise KeyLookupError(
|
|
|
|
"Key response for %s is not signed by the origin server"
|
|
|
|
% (server_name,)
|
2019-04-09 13:28:17 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
for key_id, key_data in response_json["old_verify_keys"].items():
|
|
|
|
if is_signing_algorithm_supported(key_id):
|
|
|
|
key_base64 = key_data["key"]
|
|
|
|
key_bytes = decode_base64(key_base64)
|
|
|
|
verify_key = decode_verify_key_bytes(key_id, key_bytes)
|
|
|
|
verify_keys[key_id] = FetchKeyResult(
|
|
|
|
verify_key=verify_key, valid_until_ts=key_data["expired_ts"]
|
|
|
|
)
|
|
|
|
|
2019-08-23 09:52:11 -04:00
|
|
|
key_json_bytes = encode_canonical_json(response_json)
|
2019-04-09 13:28:17 -04:00
|
|
|
|
2020-08-03 08:29:01 -04:00
|
|
|
await make_deferred_yieldable(
|
2019-04-09 13:28:17 -04:00
|
|
|
defer.gatherResults(
|
|
|
|
[
|
|
|
|
run_in_background(
|
|
|
|
self.store.store_server_keys_json,
|
|
|
|
server_name=server_name,
|
|
|
|
key_id=key_id,
|
|
|
|
from_server=from_server,
|
|
|
|
ts_now_ms=time_added_ms,
|
|
|
|
ts_expires_ms=ts_valid_until_ms,
|
2019-08-23 09:52:11 -04:00
|
|
|
key_json_bytes=key_json_bytes,
|
2019-04-09 13:28:17 -04:00
|
|
|
)
|
2019-05-31 10:48:36 -04:00
|
|
|
for key_id in verify_keys
|
2019-04-09 13:28:17 -04:00
|
|
|
],
|
|
|
|
consumeErrors=True,
|
|
|
|
).addErrback(unwrapFirstError)
|
|
|
|
)
|
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return verify_keys
|
2019-04-09 13:28:17 -04:00
|
|
|
|
|
|
|
|
|
|
|
class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|
|
|
"""KeyFetcher impl which fetches keys from the "perspectives" servers"""
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2020-09-18 09:56:44 -04:00
|
|
|
super().__init__(hs)
|
2019-04-09 13:28:17 -04:00
|
|
|
self.clock = hs.get_clock()
|
2020-12-02 11:09:24 -05:00
|
|
|
self.client = hs.get_federation_http_client()
|
2019-06-06 12:33:11 -04:00
|
|
|
self.key_servers = self.config.key_servers
|
2019-04-09 13:28:17 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def get_keys(
|
|
|
|
self, keys_to_fetch: Dict[str, Dict[str, int]]
|
|
|
|
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
2019-04-09 13:28:17 -04:00
|
|
|
"""see KeyFetcher.get_keys"""
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def get_key(key_server: TrustedKeyServer) -> Dict:
|
2015-05-19 06:56:18 -04:00
|
|
|
try:
|
2021-01-04 10:04:50 -05:00
|
|
|
return await self.get_server_verify_key_v2_indirect(
|
2019-06-06 12:33:11 -04:00
|
|
|
keys_to_fetch, key_server
|
2015-05-19 06:56:18 -04:00
|
|
|
)
|
2019-02-23 10:06:02 -05:00
|
|
|
except KeyLookupError as e:
|
2019-06-06 12:33:11 -04:00
|
|
|
logger.warning(
|
|
|
|
"Key lookup failed from %r: %s", key_server.server_name, e
|
|
|
|
)
|
2015-05-19 06:56:18 -04:00
|
|
|
except Exception as e:
|
2015-06-26 04:52:24 -04:00
|
|
|
logger.exception(
|
|
|
|
"Unable to get key from %r: %s %s",
|
2019-06-06 12:33:11 -04:00
|
|
|
key_server.server_name,
|
2019-05-22 13:39:33 -04:00
|
|
|
type(e).__name__,
|
|
|
|
str(e),
|
2015-05-19 06:56:18 -04:00
|
|
|
)
|
2019-02-23 10:06:02 -05:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return {}
|
2015-04-29 08:31:14 -04:00
|
|
|
|
2020-08-03 08:29:01 -04:00
|
|
|
results = await make_deferred_yieldable(
|
2019-05-22 13:39:33 -04:00
|
|
|
defer.gatherResults(
|
2019-06-06 12:33:11 -04:00
|
|
|
[run_in_background(get_key, server) for server in self.key_servers],
|
2019-05-22 13:39:33 -04:00
|
|
|
consumeErrors=True,
|
|
|
|
).addErrback(unwrapFirstError)
|
|
|
|
)
|
2015-04-29 08:31:14 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
union_of_keys = {} # type: Dict[str, Dict[str, FetchKeyResult]]
|
2015-06-26 04:52:24 -04:00
|
|
|
for result in results:
|
|
|
|
for server_name, keys in result.items():
|
|
|
|
union_of_keys.setdefault(server_name, {}).update(keys)
|
2014-09-30 10:15:10 -04:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return union_of_keys
|
2014-09-30 10:15:10 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def get_server_verify_key_v2_indirect(
|
|
|
|
self, keys_to_fetch: Dict[str, Dict[str, int]], key_server: TrustedKeyServer
|
|
|
|
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
2019-04-03 13:10:24 -04:00
|
|
|
"""
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
keys_to_fetch:
|
2019-06-03 17:59:51 -04:00
|
|
|
the keys to be fetched. server_name -> key_id -> min_valid_ts
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
key_server: notary server to query for the keys
|
2019-04-03 13:10:24 -04:00
|
|
|
|
|
|
|
Returns:
|
2021-01-04 10:04:50 -05:00
|
|
|
Map from server_name -> key_id -> FetchKeyResult
|
2019-05-23 12:31:26 -04:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
KeyLookupError if there was an error processing the entire response from
|
|
|
|
the server
|
2019-04-03 13:10:24 -04:00
|
|
|
"""
|
2019-06-06 12:33:11 -04:00
|
|
|
perspective_name = key_server.server_name
|
2019-05-23 12:31:26 -04:00
|
|
|
logger.info(
|
|
|
|
"Requesting keys %s from notary server %s",
|
2019-06-03 17:59:51 -04:00
|
|
|
keys_to_fetch.items(),
|
2019-05-23 12:31:26 -04:00
|
|
|
perspective_name,
|
|
|
|
)
|
2019-06-03 17:59:51 -04:00
|
|
|
|
2019-02-23 10:06:02 -05:00
|
|
|
try:
|
2020-08-03 08:29:01 -04:00
|
|
|
query_response = await self.client.post_json(
|
|
|
|
destination=perspective_name,
|
|
|
|
path="/_matrix/key/v2/query",
|
|
|
|
data={
|
|
|
|
"server_keys": {
|
|
|
|
server_name: {
|
|
|
|
key_id: {"minimum_valid_until_ts": min_valid_ts}
|
|
|
|
for key_id, min_valid_ts in server_keys.items()
|
2019-02-23 10:06:02 -05:00
|
|
|
}
|
2020-08-03 08:29:01 -04:00
|
|
|
for server_name, server_keys in keys_to_fetch.items()
|
|
|
|
}
|
|
|
|
},
|
2019-02-23 10:06:02 -05:00
|
|
|
)
|
|
|
|
except (NotRetryingDestination, RequestSendFailed) as e:
|
2019-08-22 05:42:06 -04:00
|
|
|
# these both have str() representations which we can't really improve upon
|
|
|
|
raise KeyLookupError(str(e))
|
2019-02-23 10:06:02 -05:00
|
|
|
except HttpResponseException as e:
|
2019-08-22 05:42:06 -04:00
|
|
|
raise KeyLookupError("Remote server returned an error: %s" % (e,))
|
2015-04-20 11:23:47 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
keys = {} # type: Dict[str, Dict[str, FetchKeyResult]]
|
|
|
|
added_keys = [] # type: List[Tuple[str, str, FetchKeyResult]]
|
2015-04-20 11:23:47 -04:00
|
|
|
|
2019-05-23 06:45:39 -04:00
|
|
|
time_now_ms = self.clock.time_msec()
|
2015-05-19 05:23:02 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
assert isinstance(query_response, dict)
|
2019-05-23 06:45:39 -04:00
|
|
|
for response in query_response["server_keys"]:
|
2019-05-23 12:31:26 -04:00
|
|
|
# do this first, so that we can give useful errors thereafter
|
|
|
|
server_name = response.get("server_name")
|
2020-06-16 08:51:47 -04:00
|
|
|
if not isinstance(server_name, str):
|
2016-08-10 05:44:37 -04:00
|
|
|
raise KeyLookupError(
|
2019-05-23 12:31:26 -04:00
|
|
|
"Malformed response from key notary server %s: invalid server_name"
|
|
|
|
% (perspective_name,)
|
2015-04-20 11:23:47 -04:00
|
|
|
)
|
|
|
|
|
2019-05-23 12:31:26 -04:00
|
|
|
try:
|
2019-06-06 12:33:11 -04:00
|
|
|
self._validate_perspectives_response(key_server, response)
|
|
|
|
|
2020-08-03 08:29:01 -04:00
|
|
|
processed_response = await self.process_v2_response(
|
2019-06-06 12:33:11 -04:00
|
|
|
perspective_name, response, time_added_ms=time_now_ms
|
2015-04-20 11:23:47 -04:00
|
|
|
)
|
2019-05-23 12:31:26 -04:00
|
|
|
except KeyLookupError as e:
|
|
|
|
logger.warning(
|
|
|
|
"Error processing response from key notary server %s for origin "
|
|
|
|
"server %s: %s",
|
|
|
|
perspective_name,
|
|
|
|
server_name,
|
|
|
|
e,
|
2015-04-20 11:23:47 -04:00
|
|
|
)
|
2019-05-23 12:31:26 -04:00
|
|
|
# we continue to process the rest of the response
|
|
|
|
continue
|
2015-04-20 11:23:47 -04:00
|
|
|
|
2019-05-23 06:45:39 -04:00
|
|
|
added_keys.extend(
|
|
|
|
(server_name, key_id, key) for key_id, key in processed_response.items()
|
|
|
|
)
|
2019-04-04 14:12:54 -04:00
|
|
|
keys.setdefault(server_name, {}).update(processed_response)
|
2015-04-20 11:23:47 -04:00
|
|
|
|
2020-08-03 08:29:01 -04:00
|
|
|
await self.store.store_server_verify_keys(
|
2019-05-23 06:45:39 -04:00
|
|
|
perspective_name, time_now_ms, added_keys
|
2019-05-22 13:39:33 -04:00
|
|
|
)
|
2015-04-20 11:23:47 -04:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return keys
|
2015-04-20 11:23:47 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
def _validate_perspectives_response(
|
|
|
|
self, key_server: TrustedKeyServer, response: JsonDict
|
|
|
|
) -> None:
|
2019-06-06 12:33:11 -04:00
|
|
|
"""Optionally check the signature on the result of a /key/query request
|
2019-05-23 12:31:26 -04:00
|
|
|
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
key_server: the notary server that produced this result
|
2019-05-23 12:31:26 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
response: the json-decoded Server Keys response object
|
2019-06-06 12:33:11 -04:00
|
|
|
"""
|
|
|
|
perspective_name = key_server.server_name
|
|
|
|
perspective_keys = key_server.verify_keys
|
2019-05-23 12:31:26 -04:00
|
|
|
|
2019-06-06 12:33:11 -04:00
|
|
|
if perspective_keys is None:
|
|
|
|
# signature checking is disabled on this server
|
|
|
|
return
|
2019-05-23 12:31:26 -04:00
|
|
|
|
|
|
|
if (
|
|
|
|
"signatures" not in response
|
|
|
|
or perspective_name not in response["signatures"]
|
|
|
|
):
|
|
|
|
raise KeyLookupError("Response not signed by the notary server")
|
|
|
|
|
|
|
|
verified = False
|
|
|
|
for key_id in response["signatures"][perspective_name]:
|
|
|
|
if key_id in perspective_keys:
|
|
|
|
verify_signed_json(response, perspective_name, perspective_keys[key_id])
|
|
|
|
verified = True
|
|
|
|
|
|
|
|
if not verified:
|
|
|
|
raise KeyLookupError(
|
|
|
|
"Response not signed with a known key: signed with: %r, known keys: %r"
|
|
|
|
% (
|
|
|
|
list(response["signatures"][perspective_name].keys()),
|
|
|
|
list(perspective_keys.keys()),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2019-04-09 13:28:17 -04:00
|
|
|
|
|
|
|
class ServerKeyFetcher(BaseV2KeyFetcher):
|
|
|
|
"""KeyFetcher impl which fetches keys from the origin servers"""
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2020-09-18 09:56:44 -04:00
|
|
|
super().__init__(hs)
|
2019-04-09 13:28:17 -04:00
|
|
|
self.clock = hs.get_clock()
|
2020-12-02 11:09:24 -05:00
|
|
|
self.client = hs.get_federation_http_client()
|
2019-04-09 13:28:17 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def get_keys(
|
|
|
|
self, keys_to_fetch: Dict[str, Dict[str, int]]
|
|
|
|
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
2019-06-03 13:07:19 -04:00
|
|
|
"""
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
keys_to_fetch:
|
2019-06-03 13:07:19 -04:00
|
|
|
the keys to be fetched. server_name -> key_ids
|
|
|
|
|
|
|
|
Returns:
|
2021-01-04 10:04:50 -05:00
|
|
|
Map from server_name -> key_id -> FetchKeyResult
|
2019-06-03 13:07:19 -04:00
|
|
|
"""
|
2019-04-09 13:28:17 -04:00
|
|
|
|
2019-06-03 10:36:41 -04:00
|
|
|
results = {}
|
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def get_key(key_to_fetch_item: Tuple[str, Dict[str, int]]) -> None:
|
2019-06-03 10:36:41 -04:00
|
|
|
server_name, key_ids = key_to_fetch_item
|
|
|
|
try:
|
2020-08-03 08:29:01 -04:00
|
|
|
keys = await self.get_server_verify_key_v2_direct(server_name, key_ids)
|
2019-06-03 10:36:41 -04:00
|
|
|
results[server_name] = keys
|
|
|
|
except KeyLookupError as e:
|
|
|
|
logger.warning(
|
|
|
|
"Error looking up keys %s from %s: %s", key_ids, server_name, e
|
|
|
|
)
|
|
|
|
except Exception:
|
|
|
|
logger.exception("Error getting keys %s from %s", key_ids, server_name)
|
2019-04-09 13:28:17 -04:00
|
|
|
|
2020-08-20 06:39:55 -04:00
|
|
|
await yieldable_gather_results(get_key, keys_to_fetch.items())
|
|
|
|
return results
|
2019-04-09 13:28:17 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def get_server_verify_key_v2_direct(
|
|
|
|
self, server_name: str, key_ids: Iterable[str]
|
|
|
|
) -> Dict[str, FetchKeyResult]:
|
2019-06-03 10:36:41 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
server_name:
|
|
|
|
key_ids:
|
2019-06-03 10:36:41 -04:00
|
|
|
|
|
|
|
Returns:
|
2021-01-04 10:04:50 -05:00
|
|
|
Map from key ID to lookup result
|
2019-06-03 10:36:41 -04:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
KeyLookupError if there was a problem making the lookup
|
|
|
|
"""
|
2021-01-04 10:04:50 -05:00
|
|
|
keys = {} # type: Dict[str, FetchKeyResult]
|
2015-04-20 11:23:47 -04:00
|
|
|
|
|
|
|
for requested_key_id in key_ids:
|
2019-06-03 17:59:51 -04:00
|
|
|
# we may have found this key as a side-effect of asking for another.
|
2015-04-20 11:23:47 -04:00
|
|
|
if requested_key_id in keys:
|
|
|
|
continue
|
|
|
|
|
2019-05-23 06:45:39 -04:00
|
|
|
time_now_ms = self.clock.time_msec()
|
2019-02-23 10:06:02 -05:00
|
|
|
try:
|
2020-08-03 08:29:01 -04:00
|
|
|
response = await self.client.get_json(
|
|
|
|
destination=server_name,
|
|
|
|
path="/_matrix/key/v2/server/"
|
|
|
|
+ urllib.parse.quote(requested_key_id),
|
|
|
|
ignore_backoff=True,
|
|
|
|
# we only give the remote server 10s to respond. It should be an
|
|
|
|
# easy request to handle, so if it doesn't reply within 10s, it's
|
|
|
|
# probably not going to.
|
|
|
|
#
|
|
|
|
# Furthermore, when we are acting as a notary server, we cannot
|
|
|
|
# wait all day for all of the origin servers, as the requesting
|
|
|
|
# server will otherwise time out before we can respond.
|
|
|
|
#
|
|
|
|
# (Note that get_json may make 4 attempts, so this can still take
|
|
|
|
# almost 45 seconds to fetch the headers, plus up to another 60s to
|
|
|
|
# read the response).
|
|
|
|
timeout=10000,
|
2019-02-23 10:06:02 -05:00
|
|
|
)
|
|
|
|
except (NotRetryingDestination, RequestSendFailed) as e:
|
2019-08-22 05:42:06 -04:00
|
|
|
# these both have str() representations which we can't really improve
|
|
|
|
# upon
|
|
|
|
raise KeyLookupError(str(e))
|
2019-02-23 10:06:02 -05:00
|
|
|
except HttpResponseException as e:
|
2019-08-22 05:42:06 -04:00
|
|
|
raise KeyLookupError("Remote server returned an error: %s" % (e,))
|
2015-04-20 11:23:47 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
assert isinstance(response, dict)
|
2019-04-04 14:12:54 -04:00
|
|
|
if response["server_name"] != server_name:
|
2019-05-22 13:39:33 -04:00
|
|
|
raise KeyLookupError(
|
|
|
|
"Expected a response for server %r not %r"
|
|
|
|
% (server_name, response["server_name"])
|
|
|
|
)
|
2019-04-04 14:12:54 -04:00
|
|
|
|
2020-08-03 08:29:01 -04:00
|
|
|
response_keys = await self.process_v2_response(
|
2015-04-20 11:23:47 -04:00
|
|
|
from_server=server_name,
|
2015-04-22 09:21:08 -04:00
|
|
|
response_json=response,
|
2019-05-23 06:51:39 -04:00
|
|
|
time_added_ms=time_now_ms,
|
2015-04-20 11:23:47 -04:00
|
|
|
)
|
2020-08-03 08:29:01 -04:00
|
|
|
await self.store.store_server_verify_keys(
|
2019-05-23 06:45:39 -04:00
|
|
|
server_name,
|
|
|
|
time_now_ms,
|
|
|
|
((server_name, key_id, key) for key_id, key in response_keys.items()),
|
|
|
|
)
|
2015-04-20 11:23:47 -04:00
|
|
|
keys.update(response_keys)
|
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return keys
|
2015-04-20 11:23:47 -04:00
|
|
|
|
2017-09-19 20:32:42 -04:00
|
|
|
|
2021-01-04 10:04:50 -05:00
|
|
|
async def _handle_key_deferred(verify_request: VerifyJsonRequest) -> None:
|
2018-06-08 07:01:36 -04:00
|
|
|
"""Waits for the key to become available, and then performs a verification
|
|
|
|
|
|
|
|
Args:
|
2021-01-04 10:04:50 -05:00
|
|
|
verify_request:
|
2018-06-08 07:01:36 -04:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
SynapseError if there was a problem performing the verification
|
|
|
|
"""
|
2017-09-19 20:32:42 -04:00
|
|
|
server_name = verify_request.server_name
|
2019-05-29 12:21:39 -04:00
|
|
|
with PreserveLoggingContext():
|
2020-08-03 08:29:01 -04:00
|
|
|
_, key_id, verify_key = await verify_request.key_ready
|
2017-09-19 20:32:42 -04:00
|
|
|
|
|
|
|
json_object = verify_request.json_object
|
|
|
|
|
|
|
|
try:
|
|
|
|
verify_signed_json(json_object, server_name, verify_key)
|
2018-06-08 07:01:36 -04:00
|
|
|
except SignatureVerifyException as e:
|
|
|
|
logger.debug(
|
|
|
|
"Error verifying signature for %s:%s:%s with key %s: %s",
|
2019-05-22 13:39:33 -04:00
|
|
|
server_name,
|
|
|
|
verify_key.alg,
|
|
|
|
verify_key.version,
|
2018-06-08 07:01:36 -04:00
|
|
|
encode_verify_key_base64(verify_key),
|
|
|
|
str(e),
|
|
|
|
)
|
2017-09-19 20:32:42 -04:00
|
|
|
raise SynapseError(
|
|
|
|
401,
|
2019-05-22 13:39:33 -04:00
|
|
|
"Invalid signature for server %s with key %s:%s: %s"
|
|
|
|
% (server_name, verify_key.alg, verify_key.version, str(e)),
|
2017-09-19 20:32:42 -04:00
|
|
|
Codes.UNAUTHORIZED,
|
|
|
|
)
|