mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-10-21 23:56:02 -04:00
Run Black. (#5482)
This commit is contained in:
parent
7dcf984075
commit
32e7c9e7f2
376 changed files with 9142 additions and 10388 deletions
|
@ -46,9 +46,7 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
|||
if name not in hashes:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Algorithm %s not in hashes %s" % (
|
||||
name, list(hashes),
|
||||
),
|
||||
"Algorithm %s not in hashes %s" % (name, list(hashes)),
|
||||
Codes.UNAUTHORIZED,
|
||||
)
|
||||
message_hash_base64 = hashes[name]
|
||||
|
@ -56,9 +54,7 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
|||
message_hash_bytes = decode_base64(message_hash_base64)
|
||||
except Exception:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Invalid base64: %s" % (message_hash_base64,),
|
||||
Codes.UNAUTHORIZED,
|
||||
400, "Invalid base64: %s" % (message_hash_base64,), Codes.UNAUTHORIZED
|
||||
)
|
||||
return message_hash_bytes == expected_hash
|
||||
|
||||
|
@ -135,8 +131,9 @@ def compute_event_signature(event_dict, signature_name, signing_key):
|
|||
return redact_json["signatures"]
|
||||
|
||||
|
||||
def add_hashes_and_signatures(event_dict, signature_name, signing_key,
|
||||
hash_algorithm=hashlib.sha256):
|
||||
def add_hashes_and_signatures(
|
||||
event_dict, signature_name, signing_key, hash_algorithm=hashlib.sha256
|
||||
):
|
||||
"""Add content hash and sign the event
|
||||
|
||||
Args:
|
||||
|
@ -153,7 +150,5 @@ def add_hashes_and_signatures(event_dict, signature_name, signing_key,
|
|||
event_dict.setdefault("hashes", {})[name] = encode_base64(digest)
|
||||
|
||||
event_dict["signatures"] = compute_event_signature(
|
||||
event_dict,
|
||||
signature_name=signature_name,
|
||||
signing_key=signing_key,
|
||||
event_dict, signature_name=signature_name, signing_key=signing_key
|
||||
)
|
||||
|
|
|
@ -505,7 +505,7 @@ class BaseV2KeyFetcher(object):
|
|||
Returns:
|
||||
Deferred[dict[str, FetchKeyResult]]: map from key_id to result object
|
||||
"""
|
||||
ts_valid_until_ms = response_json[u"valid_until_ts"]
|
||||
ts_valid_until_ms = response_json["valid_until_ts"]
|
||||
|
||||
# start by extracting the keys from the response, since they may be required
|
||||
# to validate the signature on the response.
|
||||
|
@ -614,10 +614,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
|
||||
results = yield logcontext.make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[
|
||||
run_in_background(get_key, server)
|
||||
for server in self.key_servers
|
||||
],
|
||||
[run_in_background(get_key, server) for server in self.key_servers],
|
||||
consumeErrors=True,
|
||||
).addErrback(unwrapFirstError)
|
||||
)
|
||||
|
@ -630,9 +627,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
defer.returnValue(union_of_keys)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_server_verify_key_v2_indirect(
|
||||
self, keys_to_fetch, key_server
|
||||
):
|
||||
def get_server_verify_key_v2_indirect(self, keys_to_fetch, key_server):
|
||||
"""
|
||||
Args:
|
||||
keys_to_fetch (dict[str, dict[str, int]]):
|
||||
|
@ -661,9 +656,9 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
destination=perspective_name,
|
||||
path="/_matrix/key/v2/query",
|
||||
data={
|
||||
u"server_keys": {
|
||||
"server_keys": {
|
||||
server_name: {
|
||||
key_id: {u"minimum_valid_until_ts": min_valid_ts}
|
||||
key_id: {"minimum_valid_until_ts": min_valid_ts}
|
||||
for key_id, min_valid_ts in server_keys.items()
|
||||
}
|
||||
for server_name, server_keys in keys_to_fetch.items()
|
||||
|
@ -690,10 +685,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
)
|
||||
|
||||
try:
|
||||
self._validate_perspectives_response(
|
||||
key_server,
|
||||
response,
|
||||
)
|
||||
self._validate_perspectives_response(key_server, response)
|
||||
|
||||
processed_response = yield self.process_v2_response(
|
||||
perspective_name, response, time_added_ms=time_now_ms
|
||||
|
@ -720,9 +712,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
|
||||
defer.returnValue(keys)
|
||||
|
||||
def _validate_perspectives_response(
|
||||
self, key_server, response,
|
||||
):
|
||||
def _validate_perspectives_response(self, key_server, response):
|
||||
"""Optionally check the signature on the result of a /key/query request
|
||||
|
||||
Args:
|
||||
|
@ -739,13 +729,13 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
return
|
||||
|
||||
if (
|
||||
u"signatures" not in response
|
||||
or perspective_name not in response[u"signatures"]
|
||||
"signatures" not in response
|
||||
or perspective_name not in response["signatures"]
|
||||
):
|
||||
raise KeyLookupError("Response not signed by the notary server")
|
||||
|
||||
verified = False
|
||||
for key_id in response[u"signatures"][perspective_name]:
|
||||
for key_id in response["signatures"][perspective_name]:
|
||||
if key_id in perspective_keys:
|
||||
verify_signed_json(response, perspective_name, perspective_keys[key_id])
|
||||
verified = True
|
||||
|
@ -754,7 +744,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
raise KeyLookupError(
|
||||
"Response not signed with a known key: signed with: %r, known keys: %r"
|
||||
% (
|
||||
list(response[u"signatures"][perspective_name].keys()),
|
||||
list(response["signatures"][perspective_name].keys()),
|
||||
list(perspective_keys.keys()),
|
||||
)
|
||||
)
|
||||
|
@ -826,7 +816,6 @@ class ServerKeyFetcher(BaseV2KeyFetcher):
|
|||
path="/_matrix/key/v2/server/"
|
||||
+ urllib.parse.quote(requested_key_id),
|
||||
ignore_backoff=True,
|
||||
|
||||
# we only give the remote server 10s to respond. It should be an
|
||||
# easy request to handle, so if it doesn't reply within 10s, it's
|
||||
# probably not going to.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue