2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-04-24 05:36:51 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
import logging
|
2019-08-21 05:39:45 -04:00
|
|
|
|
2019-08-23 09:52:11 -04:00
|
|
|
from canonicaljson import encode_canonical_json, json
|
2019-08-21 05:39:45 -04:00
|
|
|
from signedjson.sign import sign_json
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
from twisted.internet import defer
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.api.errors import Codes, SynapseError
|
2019-06-03 13:07:19 -04:00
|
|
|
from synapse.crypto.keyring import ServerKeyFetcher
|
2019-06-29 03:06:55 -04:00
|
|
|
from synapse.http.server import (
|
|
|
|
DirectServeResource,
|
|
|
|
respond_with_json_bytes,
|
|
|
|
wrap_json_request_handler,
|
|
|
|
)
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.http.servlet import parse_integer, parse_json_object_from_request
|
2015-04-22 09:21:08 -04:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-06-29 03:06:55 -04:00
|
|
|
class RemoteKey(DirectServeResource):
|
2015-04-22 09:21:08 -04:00
|
|
|
"""HTTP resource for retreiving the TLS certificate and NACL signature
|
|
|
|
verification keys for a collection of servers. Checks that the reported
|
|
|
|
X.509 TLS certificate matches the one used in the HTTPS connection. Checks
|
|
|
|
that the NACL signature for the remote server is valid. Returns a dict of
|
|
|
|
JSON signed by both the remote server and by this server.
|
|
|
|
|
|
|
|
Supports individual GET APIs and a bulk query POST API.
|
|
|
|
|
|
|
|
Requsts:
|
|
|
|
|
|
|
|
GET /_matrix/key/v2/query/remote.server.example.com HTTP/1.1
|
|
|
|
|
|
|
|
GET /_matrix/key/v2/query/remote.server.example.com/a.key.id HTTP/1.1
|
|
|
|
|
|
|
|
POST /_matrix/v2/query HTTP/1.1
|
|
|
|
Content-Type: application/json
|
|
|
|
{
|
2015-04-29 06:57:26 -04:00
|
|
|
"server_keys": {
|
|
|
|
"remote.server.example.com": {
|
|
|
|
"a.key.id": {
|
|
|
|
"minimum_valid_until_ts": 1234567890123
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-04-22 09:21:08 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
Response:
|
|
|
|
|
|
|
|
HTTP/1.1 200 OK
|
|
|
|
Content-Type: application/json
|
|
|
|
{
|
|
|
|
"server_keys": [
|
|
|
|
{
|
|
|
|
"server_name": "remote.server.example.com"
|
2015-04-23 11:39:13 -04:00
|
|
|
"valid_until_ts": # posix timestamp
|
2015-04-22 09:21:08 -04:00
|
|
|
"verify_keys": {
|
|
|
|
"a.key.id": { # The identifier for a key.
|
|
|
|
key: "" # base64 encoded verification key.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"old_verify_keys": {
|
|
|
|
"an.old.key.id": { # The identifier for an old key.
|
|
|
|
key: "", # base64 encoded key
|
2015-04-23 11:39:13 -04:00
|
|
|
"expired_ts": 0, # when the key stop being used.
|
2015-04-22 09:21:08 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
"tls_fingerprints": [
|
|
|
|
{ "sha256": # fingerprint }
|
|
|
|
]
|
|
|
|
"signatures": {
|
|
|
|
"remote.server.example.com": {...}
|
|
|
|
"this.server.example.com": {...}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
|
|
|
isLeaf = True
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
2019-05-23 09:52:13 -04:00
|
|
|
self.fetcher = ServerKeyFetcher(hs)
|
2015-04-22 09:21:08 -04:00
|
|
|
self.store = hs.get_datastore()
|
|
|
|
self.clock = hs.get_clock()
|
2018-01-22 13:11:18 -05:00
|
|
|
self.federation_domain_whitelist = hs.config.federation_domain_whitelist
|
2019-08-21 05:39:45 -04:00
|
|
|
self.config = hs.config
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2018-05-10 07:10:27 -04:00
|
|
|
@wrap_json_request_handler
|
2019-06-29 03:06:55 -04:00
|
|
|
async def _async_render_GET(self, request):
|
2015-04-22 09:21:08 -04:00
|
|
|
if len(request.postpath) == 1:
|
2019-10-31 11:43:24 -04:00
|
|
|
(server,) = request.postpath
|
2019-06-20 05:32:02 -04:00
|
|
|
query = {server.decode("ascii"): {}}
|
2015-04-22 09:21:08 -04:00
|
|
|
elif len(request.postpath) == 2:
|
|
|
|
server, key_id = request.postpath
|
2019-06-20 05:32:02 -04:00
|
|
|
minimum_valid_until_ts = parse_integer(request, "minimum_valid_until_ts")
|
2015-04-29 06:57:26 -04:00
|
|
|
arguments = {}
|
|
|
|
if minimum_valid_until_ts is not None:
|
|
|
|
arguments["minimum_valid_until_ts"] = minimum_valid_until_ts
|
2019-06-20 05:32:02 -04:00
|
|
|
query = {server.decode("ascii"): {key_id.decode("ascii"): arguments}}
|
2015-04-22 09:21:08 -04:00
|
|
|
else:
|
2019-06-20 05:32:02 -04:00
|
|
|
raise SynapseError(404, "Not found %r" % request.postpath, Codes.NOT_FOUND)
|
2018-09-12 06:41:31 -04:00
|
|
|
|
2019-06-29 03:06:55 -04:00
|
|
|
await self.query_keys(request, query, query_remote_on_cache_miss=True)
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2018-05-10 07:10:27 -04:00
|
|
|
@wrap_json_request_handler
|
2019-06-29 03:06:55 -04:00
|
|
|
async def _async_render_POST(self, request):
|
2016-03-11 11:41:03 -05:00
|
|
|
content = parse_json_object_from_request(request)
|
2015-04-22 09:21:08 -04:00
|
|
|
|
|
|
|
query = content["server_keys"]
|
|
|
|
|
2019-06-29 03:06:55 -04:00
|
|
|
await self.query_keys(request, query, query_remote_on_cache_miss=True)
|
2015-04-22 09:21:08 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def query_keys(self, request, query, query_remote_on_cache_miss=False):
|
2015-04-29 06:57:26 -04:00
|
|
|
logger.info("Handling query for keys %r", query)
|
2018-09-12 06:41:31 -04:00
|
|
|
|
2015-04-22 09:21:08 -04:00
|
|
|
store_queries = []
|
|
|
|
for server_name, key_ids in query.items():
|
2018-01-22 13:11:18 -05:00
|
|
|
if (
|
2019-06-20 05:32:02 -04:00
|
|
|
self.federation_domain_whitelist is not None
|
|
|
|
and server_name not in self.federation_domain_whitelist
|
2018-01-22 13:11:18 -05:00
|
|
|
):
|
|
|
|
logger.debug("Federation denied with %s", server_name)
|
|
|
|
continue
|
|
|
|
|
2015-04-29 06:57:26 -04:00
|
|
|
if not key_ids:
|
|
|
|
key_ids = (None,)
|
2015-04-22 09:21:08 -04:00
|
|
|
for key_id in key_ids:
|
|
|
|
store_queries.append((server_name, key_id, None))
|
|
|
|
|
|
|
|
cached = yield self.store.get_server_keys_json(store_queries)
|
|
|
|
|
2015-04-23 11:39:13 -04:00
|
|
|
json_results = set()
|
2015-04-22 09:21:08 -04:00
|
|
|
|
|
|
|
time_now_ms = self.clock.time_msec()
|
|
|
|
|
|
|
|
cache_misses = dict()
|
|
|
|
for (server_name, key_id, from_server), results in cached.items():
|
2019-06-20 05:32:02 -04:00
|
|
|
results = [(result["ts_added_ms"], result) for result in results]
|
2015-04-22 09:21:08 -04:00
|
|
|
|
2015-04-23 11:39:13 -04:00
|
|
|
if not results and key_id is not None:
|
|
|
|
cache_misses.setdefault(server_name, set()).add(key_id)
|
2015-04-22 09:21:08 -04:00
|
|
|
continue
|
|
|
|
|
|
|
|
if key_id is not None:
|
2015-04-23 11:39:13 -04:00
|
|
|
ts_added_ms, most_recent_result = max(results)
|
|
|
|
ts_valid_until_ms = most_recent_result["ts_valid_until_ms"]
|
2015-04-29 06:57:26 -04:00
|
|
|
req_key = query.get(server_name, {}).get(key_id, {})
|
|
|
|
req_valid_until = req_key.get("minimum_valid_until_ts")
|
|
|
|
miss = False
|
|
|
|
if req_valid_until is not None:
|
|
|
|
if ts_valid_until_ms < req_valid_until:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is older than requested"
|
|
|
|
": valid_until (%r) < minimum_valid_until (%r)",
|
2019-06-20 05:32:02 -04:00
|
|
|
server_name,
|
|
|
|
key_id,
|
|
|
|
ts_valid_until_ms,
|
|
|
|
req_valid_until,
|
2015-04-29 06:57:26 -04:00
|
|
|
)
|
|
|
|
miss = True
|
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is newer than requested"
|
|
|
|
": valid_until (%r) >= minimum_valid_until (%r)",
|
2019-06-20 05:32:02 -04:00
|
|
|
server_name,
|
|
|
|
key_id,
|
|
|
|
ts_valid_until_ms,
|
|
|
|
req_valid_until,
|
2015-04-29 06:57:26 -04:00
|
|
|
)
|
|
|
|
elif (ts_added_ms + ts_valid_until_ms) / 2 < time_now_ms:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is too old"
|
|
|
|
": (added (%r) + valid_until (%r)) / 2 < now (%r)",
|
2019-06-20 05:32:02 -04:00
|
|
|
server_name,
|
|
|
|
key_id,
|
|
|
|
ts_added_ms,
|
|
|
|
ts_valid_until_ms,
|
|
|
|
time_now_ms,
|
2015-04-29 06:57:26 -04:00
|
|
|
)
|
2015-04-23 11:39:13 -04:00
|
|
|
# We more than half way through the lifetime of the
|
|
|
|
# response. We should fetch a fresh copy.
|
2015-04-29 06:57:26 -04:00
|
|
|
miss = True
|
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is still valid"
|
|
|
|
": (added (%r) + valid_until (%r)) / 2 < now (%r)",
|
2019-06-20 05:32:02 -04:00
|
|
|
server_name,
|
|
|
|
key_id,
|
|
|
|
ts_added_ms,
|
|
|
|
ts_valid_until_ms,
|
|
|
|
time_now_ms,
|
2015-04-29 06:57:26 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
if miss:
|
2015-04-23 11:39:13 -04:00
|
|
|
cache_misses.setdefault(server_name, set()).add(key_id)
|
|
|
|
json_results.add(bytes(most_recent_result["key_json"]))
|
2015-04-22 09:21:08 -04:00
|
|
|
else:
|
2015-04-23 11:39:13 -04:00
|
|
|
for ts_added, result in results:
|
|
|
|
json_results.add(bytes(result["key_json"]))
|
2015-04-22 09:21:08 -04:00
|
|
|
|
|
|
|
if cache_misses and query_remote_on_cache_miss:
|
2019-06-03 13:07:19 -04:00
|
|
|
yield self.fetcher.get_keys(cache_misses)
|
2019-06-20 05:32:02 -04:00
|
|
|
yield self.query_keys(request, query, query_remote_on_cache_miss=False)
|
2015-04-22 09:21:08 -04:00
|
|
|
else:
|
2019-08-21 05:39:45 -04:00
|
|
|
signed_keys = []
|
|
|
|
for key_json in json_results:
|
|
|
|
key_json = json.loads(key_json)
|
|
|
|
for signing_key in self.config.key_server_signing_keys:
|
|
|
|
key_json = sign_json(key_json, self.config.server_name, signing_key)
|
|
|
|
|
|
|
|
signed_keys.append(key_json)
|
|
|
|
|
|
|
|
results = {"server_keys": signed_keys}
|
|
|
|
|
2019-08-23 09:52:11 -04:00
|
|
|
respond_with_json_bytes(request, 200, encode_canonical_json(results))
|