2015-07-06 13:46:47 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2019-07-25 11:08:24 -04:00
|
|
|
# Copyright 2019 New Vector Ltd
|
|
|
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
2015-07-06 13:46:47 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2018-07-09 02:09:20 -04:00
|
|
|
from six import iteritems
|
|
|
|
|
2019-07-25 11:08:24 -04:00
|
|
|
from canonicaljson import encode_canonical_json, json
|
2018-07-09 02:09:20 -04:00
|
|
|
|
2017-01-25 09:27:27 -05:00
|
|
|
from twisted.internet import defer
|
2016-07-27 07:18:03 -04:00
|
|
|
|
2019-08-22 06:28:12 -04:00
|
|
|
from synapse.logging.opentracing import log_kv, set_tag, trace
|
2017-05-08 10:34:27 -04:00
|
|
|
from synapse.util.caches.descriptors import cached
|
2017-03-23 09:48:30 -04:00
|
|
|
|
2018-08-30 10:19:58 -04:00
|
|
|
from ._base import SQLBaseStore, db_to_json
|
2015-07-06 13:46:47 -04:00
|
|
|
|
|
|
|
|
2019-03-04 13:03:29 -05:00
|
|
|
class EndToEndKeyWorkerStore(SQLBaseStore):
|
2019-08-22 06:28:12 -04:00
|
|
|
@trace
|
2017-01-30 12:11:24 -05:00
|
|
|
@defer.inlineCallbacks
|
2018-07-11 20:32:39 -04:00
|
|
|
def get_e2e_device_keys(
|
2019-04-03 05:07:29 -04:00
|
|
|
self, query_list, include_all_devices=False, include_deleted_devices=False
|
2018-07-11 20:32:39 -04:00
|
|
|
):
|
2015-07-06 13:46:47 -04:00
|
|
|
"""Fetch a list of device keys.
|
|
|
|
Args:
|
|
|
|
query_list(list): List of pairs of user_ids and device_ids.
|
2017-01-27 05:33:26 -05:00
|
|
|
include_all_devices (bool): whether to include entries for devices
|
|
|
|
that don't have device keys
|
2018-07-11 20:32:39 -04:00
|
|
|
include_deleted_devices (bool): whether to include null entries for
|
2018-07-19 05:59:02 -04:00
|
|
|
devices which no longer exist (but were in the query_list).
|
|
|
|
This option only takes effect if include_all_devices is true.
|
2015-07-06 13:46:47 -04:00
|
|
|
Returns:
|
|
|
|
Dict mapping from user-id to dict mapping from device_id to
|
2016-08-03 02:46:57 -04:00
|
|
|
dict containing "key_json", "device_display_name".
|
2015-07-06 13:46:47 -04:00
|
|
|
"""
|
2019-08-22 06:28:12 -04:00
|
|
|
set_tag("query_list", query_list)
|
2016-08-03 02:46:57 -04:00
|
|
|
if not query_list:
|
2019-07-23 09:00:55 -04:00
|
|
|
return {}
|
2016-08-03 02:46:57 -04:00
|
|
|
|
2017-01-30 12:11:24 -05:00
|
|
|
results = yield self.runInteraction(
|
2019-04-03 05:07:29 -04:00
|
|
|
"get_e2e_device_keys",
|
|
|
|
self._get_e2e_device_keys_txn,
|
|
|
|
query_list,
|
|
|
|
include_all_devices,
|
|
|
|
include_deleted_devices,
|
2016-08-03 02:46:57 -04:00
|
|
|
)
|
|
|
|
|
2018-04-28 07:19:12 -04:00
|
|
|
for user_id, device_keys in iteritems(results):
|
|
|
|
for device_id, device_info in iteritems(device_keys):
|
2018-08-30 10:19:58 -04:00
|
|
|
device_info["keys"] = db_to_json(device_info.pop("key_json"))
|
2017-01-30 12:11:24 -05:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return results
|
2017-01-30 12:11:24 -05:00
|
|
|
|
2019-08-22 06:28:12 -04:00
|
|
|
@trace
|
2018-07-12 06:39:43 -04:00
|
|
|
def _get_e2e_device_keys_txn(
|
2019-04-03 05:07:29 -04:00
|
|
|
self, txn, query_list, include_all_devices=False, include_deleted_devices=False
|
2018-07-12 06:39:43 -04:00
|
|
|
):
|
2019-08-22 06:28:12 -04:00
|
|
|
set_tag("include_all_devices", include_all_devices)
|
|
|
|
set_tag("include_deleted_devices", include_deleted_devices)
|
|
|
|
|
2016-08-03 02:46:57 -04:00
|
|
|
query_clauses = []
|
|
|
|
query_params = []
|
|
|
|
|
2018-07-19 05:59:02 -04:00
|
|
|
if include_all_devices is False:
|
|
|
|
include_deleted_devices = False
|
|
|
|
|
2018-07-12 06:39:43 -04:00
|
|
|
if include_deleted_devices:
|
|
|
|
deleted_devices = set(query_list)
|
|
|
|
|
2016-08-03 02:46:57 -04:00
|
|
|
for (user_id, device_id) in query_list:
|
2017-01-26 11:06:54 -05:00
|
|
|
query_clause = "user_id = ?"
|
2016-08-03 02:46:57 -04:00
|
|
|
query_params.append(user_id)
|
|
|
|
|
2017-02-08 11:04:29 -05:00
|
|
|
if device_id is not None:
|
2017-01-26 11:06:54 -05:00
|
|
|
query_clause += " AND device_id = ?"
|
2016-08-03 02:46:57 -04:00
|
|
|
query_params.append(device_id)
|
|
|
|
|
|
|
|
query_clauses.append(query_clause)
|
|
|
|
|
|
|
|
sql = (
|
2017-01-25 09:27:27 -05:00
|
|
|
"SELECT user_id, device_id, "
|
2016-08-03 09:57:46 -04:00
|
|
|
" d.display_name AS device_display_name, "
|
|
|
|
" k.key_json"
|
2017-01-26 11:55:50 -05:00
|
|
|
" FROM devices d"
|
|
|
|
" %s JOIN e2e_device_keys_json k USING (user_id, device_id)"
|
2019-07-30 23:09:50 -04:00
|
|
|
" WHERE %s AND NOT d.hidden"
|
2016-08-03 09:57:46 -04:00
|
|
|
) % (
|
2017-01-26 11:55:50 -05:00
|
|
|
"LEFT" if include_all_devices else "INNER",
|
2019-04-03 05:07:29 -04:00
|
|
|
" OR ".join("(" + q + ")" for q in query_clauses),
|
2016-08-03 09:57:46 -04:00
|
|
|
)
|
2016-08-03 02:46:57 -04:00
|
|
|
|
|
|
|
txn.execute(sql, query_params)
|
|
|
|
rows = self.cursor_to_dict(txn)
|
|
|
|
|
2017-01-25 09:27:27 -05:00
|
|
|
result = {}
|
2016-08-03 02:46:57 -04:00
|
|
|
for row in rows:
|
2018-07-12 06:39:43 -04:00
|
|
|
if include_deleted_devices:
|
|
|
|
deleted_devices.remove((row["user_id"], row["device_id"]))
|
2017-01-25 09:27:27 -05:00
|
|
|
result.setdefault(row["user_id"], {})[row["device_id"]] = row
|
2016-08-03 02:46:57 -04:00
|
|
|
|
2018-07-12 06:39:43 -04:00
|
|
|
if include_deleted_devices:
|
|
|
|
for user_id, device_id in deleted_devices:
|
|
|
|
result.setdefault(user_id, {})[device_id] = None
|
|
|
|
|
2019-08-22 06:28:12 -04:00
|
|
|
log_kv(result)
|
2016-08-03 02:46:57 -04:00
|
|
|
return result
|
2015-07-06 13:46:47 -04:00
|
|
|
|
2017-03-23 09:17:00 -04:00
|
|
|
@defer.inlineCallbacks
|
2017-05-09 13:26:54 -04:00
|
|
|
def get_e2e_one_time_keys(self, user_id, device_id, key_ids):
|
|
|
|
"""Retrieve a number of one-time keys for a user
|
2017-03-23 09:17:00 -04:00
|
|
|
|
2017-05-09 13:26:54 -04:00
|
|
|
Args:
|
|
|
|
user_id(str): id of user to get keys for
|
|
|
|
device_id(str): id of device to get keys for
|
|
|
|
key_ids(list[str]): list of key ids (excluding algorithm) to
|
|
|
|
retrieve
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
deferred resolving to Dict[(str, str), str]: map from (algorithm,
|
|
|
|
key_id) to json string for key
|
2017-03-23 09:17:00 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
rows = yield self._simple_select_many_batch(
|
|
|
|
table="e2e_one_time_keys_json",
|
|
|
|
column="key_id",
|
2017-05-09 13:26:54 -04:00
|
|
|
iterable=key_ids,
|
2019-04-03 05:07:29 -04:00
|
|
|
retcols=("algorithm", "key_id", "key_json"),
|
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
2017-03-23 09:17:00 -04:00
|
|
|
desc="add_e2e_one_time_keys_check",
|
|
|
|
)
|
2019-08-22 06:28:12 -04:00
|
|
|
result = {(row["algorithm"], row["key_id"]): row["key_json"] for row in rows}
|
|
|
|
log_kv({"message": "Fetched one time keys for user", "one_time_keys": result})
|
|
|
|
return result
|
2017-05-09 13:26:54 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def add_e2e_one_time_keys(self, user_id, device_id, time_now, new_keys):
|
|
|
|
"""Insert some new one time keys for a device. Errors if any of the
|
|
|
|
keys already exist.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): id of user to get keys for
|
|
|
|
device_id(str): id of device to get keys for
|
|
|
|
time_now(long): insertion time to record (ms since epoch)
|
|
|
|
new_keys(iterable[(str, str, str)]: keys to add - each a tuple of
|
|
|
|
(algorithm, key_id, key json)
|
|
|
|
"""
|
2017-03-23 09:17:00 -04:00
|
|
|
|
2015-07-06 13:46:47 -04:00
|
|
|
def _add_e2e_one_time_keys(txn):
|
2019-08-22 06:28:12 -04:00
|
|
|
set_tag("user_id", user_id)
|
|
|
|
set_tag("device_id", device_id)
|
|
|
|
set_tag("new_keys", new_keys)
|
2017-03-23 09:17:00 -04:00
|
|
|
# We are protected from race between lookup and insertion due to
|
|
|
|
# a unique constraint. If there is a race of two calls to
|
|
|
|
# `add_e2e_one_time_keys` then they'll conflict and we will only
|
|
|
|
# insert one set.
|
|
|
|
self._simple_insert_many_txn(
|
2019-04-03 05:07:29 -04:00
|
|
|
txn,
|
|
|
|
table="e2e_one_time_keys_json",
|
2017-03-23 09:17:00 -04:00
|
|
|
values=[
|
|
|
|
{
|
2015-07-06 13:46:47 -04:00
|
|
|
"user_id": user_id,
|
|
|
|
"device_id": device_id,
|
|
|
|
"algorithm": algorithm,
|
|
|
|
"key_id": key_id,
|
|
|
|
"ts_added_ms": time_now,
|
|
|
|
"key_json": json_bytes,
|
|
|
|
}
|
2017-03-23 09:17:00 -04:00
|
|
|
for algorithm, key_id, json_bytes in new_keys
|
|
|
|
],
|
|
|
|
)
|
2017-05-22 11:19:22 -04:00
|
|
|
self._invalidate_cache_and_stream(
|
2019-04-03 05:07:29 -04:00
|
|
|
txn, self.count_e2e_one_time_keys, (user_id, device_id)
|
2017-05-08 10:34:27 -04:00
|
|
|
)
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2017-03-23 09:17:00 -04:00
|
|
|
yield self.runInteraction(
|
|
|
|
"add_e2e_one_time_keys_insert", _add_e2e_one_time_keys
|
2015-07-06 13:46:47 -04:00
|
|
|
)
|
|
|
|
|
2017-05-08 10:34:27 -04:00
|
|
|
@cached(max_entries=10000)
|
2015-07-09 09:04:03 -04:00
|
|
|
def count_e2e_one_time_keys(self, user_id, device_id):
|
2015-07-06 13:46:47 -04:00
|
|
|
""" Count the number of one time keys the server has for a device
|
|
|
|
Returns:
|
|
|
|
Dict mapping from algorithm to number of keys for that algorithm.
|
|
|
|
"""
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2015-07-06 13:46:47 -04:00
|
|
|
def _count_e2e_one_time_keys(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT algorithm, COUNT(key_id) FROM e2e_one_time_keys_json"
|
2015-07-09 09:04:03 -04:00
|
|
|
" WHERE user_id = ? AND device_id = ?"
|
2015-07-06 13:46:47 -04:00
|
|
|
" GROUP BY algorithm"
|
|
|
|
)
|
2015-07-09 09:04:03 -04:00
|
|
|
txn.execute(sql, (user_id, device_id))
|
2015-07-06 13:46:47 -04:00
|
|
|
result = {}
|
2017-03-23 13:53:49 -04:00
|
|
|
for algorithm, key_count in txn:
|
2015-07-06 13:46:47 -04:00
|
|
|
result[algorithm] = key_count
|
|
|
|
return result
|
2019-04-03 05:07:29 -04:00
|
|
|
|
|
|
|
return self.runInteraction("count_e2e_one_time_keys", _count_e2e_one_time_keys)
|
2015-07-06 13:46:47 -04:00
|
|
|
|
2019-03-04 13:03:29 -05:00
|
|
|
|
|
|
|
class EndToEndKeyStore(EndToEndKeyWorkerStore, SQLBaseStore):
|
|
|
|
def set_e2e_device_keys(self, user_id, device_id, time_now, device_keys):
|
|
|
|
"""Stores device keys for a device. Returns whether there was a change
|
|
|
|
or the keys were already in the database.
|
|
|
|
"""
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2019-03-04 13:03:29 -05:00
|
|
|
def _set_e2e_device_keys_txn(txn):
|
2019-08-22 06:28:12 -04:00
|
|
|
set_tag("user_id", user_id)
|
|
|
|
set_tag("device_id", device_id)
|
|
|
|
set_tag("time_now", time_now)
|
|
|
|
set_tag("device_keys", device_keys)
|
|
|
|
|
2019-03-04 13:03:29 -05:00
|
|
|
old_key_json = self._simple_select_one_onecol_txn(
|
|
|
|
txn,
|
|
|
|
table="e2e_device_keys_json",
|
2019-04-03 05:07:29 -04:00
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
2019-03-04 13:03:29 -05:00
|
|
|
retcol="key_json",
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
# In py3 we need old_key_json to match new_key_json type. The DB
|
|
|
|
# returns unicode while encode_canonical_json returns bytes.
|
|
|
|
new_key_json = encode_canonical_json(device_keys).decode("utf-8")
|
|
|
|
|
|
|
|
if old_key_json == new_key_json:
|
2019-08-22 06:28:12 -04:00
|
|
|
log_kv({"Message": "Device key already stored."})
|
2019-03-04 13:03:29 -05:00
|
|
|
return False
|
|
|
|
|
|
|
|
self._simple_upsert_txn(
|
|
|
|
txn,
|
|
|
|
table="e2e_device_keys_json",
|
2019-04-03 05:07:29 -04:00
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
|
|
|
values={"ts_added_ms": time_now, "key_json": new_key_json},
|
2019-03-04 13:03:29 -05:00
|
|
|
)
|
2019-08-22 06:28:12 -04:00
|
|
|
log_kv({"message": "Device keys stored."})
|
2019-03-04 13:03:29 -05:00
|
|
|
return True
|
|
|
|
|
2019-04-03 05:07:29 -04:00
|
|
|
return self.runInteraction("set_e2e_device_keys", _set_e2e_device_keys_txn)
|
2019-03-04 13:03:29 -05:00
|
|
|
|
2015-07-14 08:08:33 -04:00
|
|
|
def claim_e2e_one_time_keys(self, query_list):
|
2015-07-06 13:46:47 -04:00
|
|
|
"""Take a list of one time keys out of the database"""
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2019-08-22 06:28:12 -04:00
|
|
|
@trace
|
2015-07-14 08:08:33 -04:00
|
|
|
def _claim_e2e_one_time_keys(txn):
|
2015-07-06 13:46:47 -04:00
|
|
|
sql = (
|
|
|
|
"SELECT key_id, key_json FROM e2e_one_time_keys_json"
|
|
|
|
" WHERE user_id = ? AND device_id = ? AND algorithm = ?"
|
|
|
|
" LIMIT 1"
|
|
|
|
)
|
|
|
|
result = {}
|
|
|
|
delete = []
|
|
|
|
for user_id, device_id, algorithm in query_list:
|
|
|
|
user_result = result.setdefault(user_id, {})
|
|
|
|
device_result = user_result.setdefault(device_id, {})
|
2015-07-09 09:04:03 -04:00
|
|
|
txn.execute(sql, (user_id, device_id, algorithm))
|
2017-03-23 13:53:49 -04:00
|
|
|
for key_id, key_json in txn:
|
2015-07-06 13:46:47 -04:00
|
|
|
device_result[algorithm + ":" + key_id] = key_json
|
|
|
|
delete.append((user_id, device_id, algorithm, key_id))
|
|
|
|
sql = (
|
|
|
|
"DELETE FROM e2e_one_time_keys_json"
|
|
|
|
" WHERE user_id = ? AND device_id = ? AND algorithm = ?"
|
|
|
|
" AND key_id = ?"
|
|
|
|
)
|
|
|
|
for user_id, device_id, algorithm, key_id in delete:
|
2019-08-22 06:28:12 -04:00
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"message": "Executing claim e2e_one_time_keys transaction on database."
|
|
|
|
}
|
|
|
|
)
|
2015-07-06 13:46:47 -04:00
|
|
|
txn.execute(sql, (user_id, device_id, algorithm, key_id))
|
2019-08-22 06:28:12 -04:00
|
|
|
log_kv({"message": "finished executing and invalidating cache"})
|
2017-05-23 04:36:52 -04:00
|
|
|
self._invalidate_cache_and_stream(
|
2019-04-03 05:07:29 -04:00
|
|
|
txn, self.count_e2e_one_time_keys, (user_id, device_id)
|
2017-05-23 04:36:52 -04:00
|
|
|
)
|
2015-07-06 13:46:47 -04:00
|
|
|
return result
|
2019-04-03 05:07:29 -04:00
|
|
|
|
|
|
|
return self.runInteraction("claim_e2e_one_time_keys", _claim_e2e_one_time_keys)
|
2016-07-27 07:18:03 -04:00
|
|
|
|
|
|
|
def delete_e2e_keys_by_device(self, user_id, device_id):
|
2017-05-22 11:19:22 -04:00
|
|
|
def delete_e2e_keys_by_device_txn(txn):
|
2019-08-22 06:28:12 -04:00
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"message": "Deleting keys for device",
|
|
|
|
"device_id": device_id,
|
|
|
|
"user_id": user_id,
|
|
|
|
}
|
|
|
|
)
|
2017-05-22 11:19:22 -04:00
|
|
|
self._simple_delete_txn(
|
|
|
|
txn,
|
|
|
|
table="e2e_device_keys_json",
|
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
|
|
|
)
|
|
|
|
self._simple_delete_txn(
|
|
|
|
txn,
|
|
|
|
table="e2e_one_time_keys_json",
|
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(
|
2019-04-03 05:07:29 -04:00
|
|
|
txn, self.count_e2e_one_time_keys, (user_id, device_id)
|
2017-05-22 11:19:22 -04:00
|
|
|
)
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2017-05-22 11:19:22 -04:00
|
|
|
return self.runInteraction(
|
|
|
|
"delete_e2e_keys_by_device", delete_e2e_keys_by_device_txn
|
2016-07-27 07:18:03 -04:00
|
|
|
)
|
2019-07-25 11:08:24 -04:00
|
|
|
|
2019-08-28 20:17:21 -04:00
|
|
|
def _set_e2e_cross_signing_key_txn(self, txn, user_id, key_type, key):
|
2019-07-25 11:08:24 -04:00
|
|
|
"""Set a user's cross-signing key.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
txn (twisted.enterprise.adbapi.Connection): db connection
|
|
|
|
user_id (str): the user to set the signing key for
|
|
|
|
key_type (str): the type of key that is being set: either 'master'
|
|
|
|
for a master key, 'self_signing' for a self-signing key, or
|
|
|
|
'user_signing' for a user-signing key
|
|
|
|
key (dict): the key data
|
|
|
|
"""
|
|
|
|
# the cross-signing keys need to occupy the same namespace as devices,
|
|
|
|
# since signatures are identified by device ID. So add an entry to the
|
|
|
|
# device table to make sure that we don't have a collision with device
|
|
|
|
# IDs
|
|
|
|
|
|
|
|
# the 'key' dict will look something like:
|
|
|
|
# {
|
|
|
|
# "user_id": "@alice:example.com",
|
|
|
|
# "usage": ["self_signing"],
|
|
|
|
# "keys": {
|
|
|
|
# "ed25519:base64+self+signing+public+key": "base64+self+signing+public+key",
|
|
|
|
# },
|
|
|
|
# "signatures": {
|
|
|
|
# "@alice:example.com": {
|
|
|
|
# "ed25519:base64+master+public+key": "base64+signature"
|
|
|
|
# }
|
|
|
|
# }
|
|
|
|
# }
|
|
|
|
# The "keys" property must only have one entry, which will be the public
|
|
|
|
# key, so we just grab the first value in there
|
|
|
|
pubkey = next(iter(key["keys"].values()))
|
2019-10-18 05:56:54 -04:00
|
|
|
self._simple_insert_txn(
|
|
|
|
txn,
|
2019-07-25 11:08:24 -04:00
|
|
|
"devices",
|
|
|
|
values={
|
|
|
|
"user_id": user_id,
|
|
|
|
"device_id": pubkey,
|
|
|
|
"display_name": key_type + " signing key",
|
|
|
|
"hidden": True,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# and finally, store the key itself
|
2019-08-28 20:17:21 -04:00
|
|
|
with self._cross_signing_id_gen.get_next() as stream_id:
|
2019-10-18 05:56:54 -04:00
|
|
|
self._simple_insert_txn(
|
|
|
|
txn,
|
2019-08-28 20:17:21 -04:00
|
|
|
"e2e_cross_signing_keys",
|
|
|
|
values={
|
|
|
|
"user_id": user_id,
|
|
|
|
"keytype": key_type,
|
|
|
|
"keydata": json.dumps(key),
|
|
|
|
"stream_id": stream_id,
|
|
|
|
},
|
|
|
|
)
|
2019-07-25 11:08:24 -04:00
|
|
|
|
2019-08-28 20:17:21 -04:00
|
|
|
def set_e2e_cross_signing_key(self, user_id, key_type, key):
|
2019-07-25 11:08:24 -04:00
|
|
|
"""Set a user's cross-signing key.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): the user to set the user-signing key for
|
|
|
|
key_type (str): the type of cross-signing key to set
|
|
|
|
key (dict): the key data
|
|
|
|
"""
|
|
|
|
return self.runInteraction(
|
|
|
|
"add_e2e_cross_signing_key",
|
|
|
|
self._set_e2e_cross_signing_key_txn,
|
|
|
|
user_id,
|
|
|
|
key_type,
|
|
|
|
key,
|
|
|
|
)
|
|
|
|
|
|
|
|
def _get_e2e_cross_signing_key_txn(self, txn, user_id, key_type, from_user_id=None):
|
|
|
|
"""Returns a user's cross-signing key.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
txn (twisted.enterprise.adbapi.Connection): db connection
|
|
|
|
user_id (str): the user whose key is being requested
|
|
|
|
key_type (str): the type of key that is being set: either 'master'
|
|
|
|
for a master key, 'self_signing' for a self-signing key, or
|
|
|
|
'user_signing' for a user-signing key
|
|
|
|
from_user_id (str): if specified, signatures made by this user on
|
|
|
|
the key will be included in the result
|
|
|
|
|
|
|
|
Returns:
|
2019-08-21 16:19:35 -04:00
|
|
|
dict of the key data or None if not found
|
2019-07-25 11:08:24 -04:00
|
|
|
"""
|
|
|
|
sql = (
|
|
|
|
"SELECT keydata "
|
|
|
|
" FROM e2e_cross_signing_keys "
|
2019-08-28 20:17:21 -04:00
|
|
|
" WHERE user_id = ? AND keytype = ? ORDER BY stream_id DESC LIMIT 1"
|
2019-07-25 11:08:24 -04:00
|
|
|
)
|
|
|
|
txn.execute(sql, (user_id, key_type))
|
|
|
|
row = txn.fetchone()
|
|
|
|
if not row:
|
|
|
|
return None
|
|
|
|
key = json.loads(row[0])
|
|
|
|
|
|
|
|
device_id = None
|
|
|
|
for k in key["keys"].values():
|
|
|
|
device_id = k
|
|
|
|
|
|
|
|
if from_user_id is not None:
|
|
|
|
sql = (
|
|
|
|
"SELECT key_id, signature "
|
|
|
|
" FROM e2e_cross_signing_signatures "
|
|
|
|
" WHERE user_id = ? "
|
|
|
|
" AND target_user_id = ? "
|
|
|
|
" AND target_device_id = ? "
|
|
|
|
)
|
|
|
|
txn.execute(sql, (from_user_id, user_id, device_id))
|
|
|
|
row = txn.fetchone()
|
|
|
|
if row:
|
|
|
|
key.setdefault("signatures", {}).setdefault(from_user_id, {})[
|
|
|
|
row[0]
|
|
|
|
] = row[1]
|
|
|
|
|
|
|
|
return key
|
|
|
|
|
|
|
|
def get_e2e_cross_signing_key(self, user_id, key_type, from_user_id=None):
|
|
|
|
"""Returns a user's cross-signing key.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): the user whose self-signing key is being requested
|
|
|
|
key_type (str): the type of cross-signing key to get
|
|
|
|
from_user_id (str): if specified, signatures made by this user on
|
|
|
|
the self-signing key will be included in the result
|
|
|
|
|
|
|
|
Returns:
|
2019-08-21 16:19:35 -04:00
|
|
|
dict of the key data or None if not found
|
2019-07-25 11:08:24 -04:00
|
|
|
"""
|
|
|
|
return self.runInteraction(
|
|
|
|
"get_e2e_cross_signing_key",
|
|
|
|
self._get_e2e_cross_signing_key_txn,
|
|
|
|
user_id,
|
|
|
|
key_type,
|
|
|
|
from_user_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
def store_e2e_cross_signing_signatures(self, user_id, signatures):
|
|
|
|
"""Stores cross-signing signatures.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): the user who made the signatures
|
|
|
|
signatures (iterable[(str, str, str, str)]): signatures to add - each
|
|
|
|
a tuple of (key_id, target_user_id, target_device_id, signature),
|
|
|
|
where key_id is the ID of the key (including the signature
|
|
|
|
algorithm) that made the signature, target_user_id and
|
|
|
|
target_device_id indicate the device being signed, and signature
|
|
|
|
is the signature of the device
|
|
|
|
"""
|
|
|
|
return self._simple_insert_many(
|
|
|
|
"e2e_cross_signing_signatures",
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"user_id": user_id,
|
|
|
|
"key_id": key_id,
|
|
|
|
"target_user_id": target_user_id,
|
|
|
|
"target_device_id": target_device_id,
|
|
|
|
"signature": signature,
|
|
|
|
}
|
|
|
|
for (key_id, target_user_id, target_device_id, signature) in signatures
|
|
|
|
],
|
|
|
|
"add_e2e_signing_key",
|
|
|
|
)
|