mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-01-05 01:30:51 -05:00
Fix flake8 (#4519)
This commit is contained in:
parent
457fbfaf22
commit
3f189c902e
1
changelog.d/4519.misc
Normal file
1
changelog.d/4519.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix code to comply with linting in PyFlakes 3.7.1.
|
@ -46,7 +46,7 @@ def request_registration(
|
|||||||
# Get the nonce
|
# Get the nonce
|
||||||
r = requests.get(url, verify=False)
|
r = requests.get(url, verify=False)
|
||||||
|
|
||||||
if r.status_code is not 200:
|
if r.status_code != 200:
|
||||||
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
|
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
|
||||||
if 400 <= r.status_code < 500:
|
if 400 <= r.status_code < 500:
|
||||||
try:
|
try:
|
||||||
@ -84,7 +84,7 @@ def request_registration(
|
|||||||
_print("Sending registration request...")
|
_print("Sending registration request...")
|
||||||
r = requests.post(url, json=data, verify=False)
|
r = requests.post(url, json=data, verify=False)
|
||||||
|
|
||||||
if r.status_code is not 200:
|
if r.status_code != 200:
|
||||||
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
|
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
|
||||||
if 400 <= r.status_code < 500:
|
if 400 <= r.status_code < 500:
|
||||||
try:
|
try:
|
||||||
|
@ -57,8 +57,8 @@ class DirectoryHandler(BaseHandler):
|
|||||||
# general association creation for both human users and app services
|
# general association creation for both human users and app services
|
||||||
|
|
||||||
for wchar in string.whitespace:
|
for wchar in string.whitespace:
|
||||||
if wchar in room_alias.localpart:
|
if wchar in room_alias.localpart:
|
||||||
raise SynapseError(400, "Invalid characters in room alias")
|
raise SynapseError(400, "Invalid characters in room alias")
|
||||||
|
|
||||||
if not self.hs.is_mine(room_alias):
|
if not self.hs.is_mine(room_alias):
|
||||||
raise SynapseError(400, "Room alias must be local")
|
raise SynapseError(400, "Room alias must be local")
|
||||||
|
@ -102,7 +102,7 @@ class FederationHandler(BaseHandler):
|
|||||||
|
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
|
|
||||||
self.store = hs.get_datastore() # type: synapse.storage.DataStore
|
self.store = hs.get_datastore()
|
||||||
self.federation_client = hs.get_federation_client()
|
self.federation_client = hs.get_federation_client()
|
||||||
self.state_handler = hs.get_state_handler()
|
self.state_handler = hs.get_state_handler()
|
||||||
self.server_name = hs.hostname
|
self.server_name = hs.hostname
|
||||||
|
@ -84,7 +84,7 @@ def _rule_to_template(rule):
|
|||||||
templaterule["pattern"] = thecond["pattern"]
|
templaterule["pattern"] = thecond["pattern"]
|
||||||
|
|
||||||
if unscoped_rule_id:
|
if unscoped_rule_id:
|
||||||
templaterule['rule_id'] = unscoped_rule_id
|
templaterule['rule_id'] = unscoped_rule_id
|
||||||
if 'default' in rule:
|
if 'default' in rule:
|
||||||
templaterule['default'] = rule['default']
|
templaterule['default'] = rule['default']
|
||||||
return templaterule
|
return templaterule
|
||||||
|
@ -317,7 +317,7 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||||||
thirty_days_ago_in_secs))
|
thirty_days_ago_in_secs))
|
||||||
|
|
||||||
for row in txn:
|
for row in txn:
|
||||||
if row[0] is 'unknown':
|
if row[0] == 'unknown':
|
||||||
pass
|
pass
|
||||||
results[row[0]] = row[1]
|
results[row[0]] = row[1]
|
||||||
|
|
||||||
|
@ -904,105 +904,105 @@ class EventsStore(StateGroupWorkerStore, EventFederationStore, EventsWorkerStore
|
|||||||
|
|
||||||
def _update_current_state_txn(self, txn, state_delta_by_room, max_stream_order):
|
def _update_current_state_txn(self, txn, state_delta_by_room, max_stream_order):
|
||||||
for room_id, current_state_tuple in iteritems(state_delta_by_room):
|
for room_id, current_state_tuple in iteritems(state_delta_by_room):
|
||||||
to_delete, to_insert = current_state_tuple
|
to_delete, to_insert = current_state_tuple
|
||||||
|
|
||||||
# First we add entries to the current_state_delta_stream. We
|
# First we add entries to the current_state_delta_stream. We
|
||||||
# do this before updating the current_state_events table so
|
# do this before updating the current_state_events table so
|
||||||
# that we can use it to calculate the `prev_event_id`. (This
|
# that we can use it to calculate the `prev_event_id`. (This
|
||||||
# allows us to not have to pull out the existing state
|
# allows us to not have to pull out the existing state
|
||||||
# unnecessarily).
|
# unnecessarily).
|
||||||
sql = """
|
sql = """
|
||||||
INSERT INTO current_state_delta_stream
|
INSERT INTO current_state_delta_stream
|
||||||
(stream_id, room_id, type, state_key, event_id, prev_event_id)
|
(stream_id, room_id, type, state_key, event_id, prev_event_id)
|
||||||
SELECT ?, ?, ?, ?, ?, (
|
SELECT ?, ?, ?, ?, ?, (
|
||||||
SELECT event_id FROM current_state_events
|
SELECT event_id FROM current_state_events
|
||||||
WHERE room_id = ? AND type = ? AND state_key = ?
|
WHERE room_id = ? AND type = ? AND state_key = ?
|
||||||
)
|
|
||||||
"""
|
|
||||||
txn.executemany(sql, (
|
|
||||||
(
|
|
||||||
max_stream_order, room_id, etype, state_key, None,
|
|
||||||
room_id, etype, state_key,
|
|
||||||
)
|
|
||||||
for etype, state_key in to_delete
|
|
||||||
# We sanity check that we're deleting rather than updating
|
|
||||||
if (etype, state_key) not in to_insert
|
|
||||||
))
|
|
||||||
txn.executemany(sql, (
|
|
||||||
(
|
|
||||||
max_stream_order, room_id, etype, state_key, ev_id,
|
|
||||||
room_id, etype, state_key,
|
|
||||||
)
|
|
||||||
for (etype, state_key), ev_id in iteritems(to_insert)
|
|
||||||
))
|
|
||||||
|
|
||||||
# Now we actually update the current_state_events table
|
|
||||||
|
|
||||||
txn.executemany(
|
|
||||||
"DELETE FROM current_state_events"
|
|
||||||
" WHERE room_id = ? AND type = ? AND state_key = ?",
|
|
||||||
(
|
|
||||||
(room_id, etype, state_key)
|
|
||||||
for etype, state_key in itertools.chain(to_delete, to_insert)
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
"""
|
||||||
self._simple_insert_many_txn(
|
txn.executemany(sql, (
|
||||||
txn,
|
(
|
||||||
table="current_state_events",
|
max_stream_order, room_id, etype, state_key, None,
|
||||||
values=[
|
room_id, etype, state_key,
|
||||||
{
|
|
||||||
"event_id": ev_id,
|
|
||||||
"room_id": room_id,
|
|
||||||
"type": key[0],
|
|
||||||
"state_key": key[1],
|
|
||||||
}
|
|
||||||
for key, ev_id in iteritems(to_insert)
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
for etype, state_key in to_delete
|
||||||
txn.call_after(
|
# We sanity check that we're deleting rather than updating
|
||||||
self._curr_state_delta_stream_cache.entity_has_changed,
|
if (etype, state_key) not in to_insert
|
||||||
room_id, max_stream_order,
|
))
|
||||||
|
txn.executemany(sql, (
|
||||||
|
(
|
||||||
|
max_stream_order, room_id, etype, state_key, ev_id,
|
||||||
|
room_id, etype, state_key,
|
||||||
)
|
)
|
||||||
|
for (etype, state_key), ev_id in iteritems(to_insert)
|
||||||
|
))
|
||||||
|
|
||||||
# Invalidate the various caches
|
# Now we actually update the current_state_events table
|
||||||
|
|
||||||
# Figure out the changes of membership to invalidate the
|
txn.executemany(
|
||||||
# `get_rooms_for_user` cache.
|
"DELETE FROM current_state_events"
|
||||||
# We find out which membership events we may have deleted
|
" WHERE room_id = ? AND type = ? AND state_key = ?",
|
||||||
# and which we have added, then we invlidate the caches for all
|
(
|
||||||
# those users.
|
(room_id, etype, state_key)
|
||||||
members_changed = set(
|
for etype, state_key in itertools.chain(to_delete, to_insert)
|
||||||
state_key
|
),
|
||||||
for ev_type, state_key in itertools.chain(to_delete, to_insert)
|
)
|
||||||
if ev_type == EventTypes.Member
|
|
||||||
)
|
|
||||||
|
|
||||||
for member in members_changed:
|
self._simple_insert_many_txn(
|
||||||
self._invalidate_cache_and_stream(
|
txn,
|
||||||
txn, self.get_rooms_for_user_with_stream_ordering, (member,)
|
table="current_state_events",
|
||||||
)
|
values=[
|
||||||
|
{
|
||||||
|
"event_id": ev_id,
|
||||||
|
"room_id": room_id,
|
||||||
|
"type": key[0],
|
||||||
|
"state_key": key[1],
|
||||||
|
}
|
||||||
|
for key, ev_id in iteritems(to_insert)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
for host in set(get_domain_from_id(u) for u in members_changed):
|
txn.call_after(
|
||||||
self._invalidate_cache_and_stream(
|
self._curr_state_delta_stream_cache.entity_has_changed,
|
||||||
txn, self.is_host_joined, (room_id, host)
|
room_id, max_stream_order,
|
||||||
)
|
)
|
||||||
self._invalidate_cache_and_stream(
|
|
||||||
txn, self.was_host_joined, (room_id, host)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
# Invalidate the various caches
|
||||||
|
|
||||||
|
# Figure out the changes of membership to invalidate the
|
||||||
|
# `get_rooms_for_user` cache.
|
||||||
|
# We find out which membership events we may have deleted
|
||||||
|
# and which we have added, then we invlidate the caches for all
|
||||||
|
# those users.
|
||||||
|
members_changed = set(
|
||||||
|
state_key
|
||||||
|
for ev_type, state_key in itertools.chain(to_delete, to_insert)
|
||||||
|
if ev_type == EventTypes.Member
|
||||||
|
)
|
||||||
|
|
||||||
|
for member in members_changed:
|
||||||
self._invalidate_cache_and_stream(
|
self._invalidate_cache_and_stream(
|
||||||
txn, self.get_users_in_room, (room_id,)
|
txn, self.get_rooms_for_user_with_stream_ordering, (member,)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
for host in set(get_domain_from_id(u) for u in members_changed):
|
||||||
self._invalidate_cache_and_stream(
|
self._invalidate_cache_and_stream(
|
||||||
txn, self.get_room_summary, (room_id,)
|
txn, self.is_host_joined, (room_id, host)
|
||||||
|
)
|
||||||
|
self._invalidate_cache_and_stream(
|
||||||
|
txn, self.was_host_joined, (room_id, host)
|
||||||
)
|
)
|
||||||
|
|
||||||
self._invalidate_cache_and_stream(
|
self._invalidate_cache_and_stream(
|
||||||
txn, self.get_current_state_ids, (room_id,)
|
txn, self.get_users_in_room, (room_id,)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._invalidate_cache_and_stream(
|
||||||
|
txn, self.get_room_summary, (room_id,)
|
||||||
|
)
|
||||||
|
|
||||||
|
self._invalidate_cache_and_stream(
|
||||||
|
txn, self.get_current_state_ids, (room_id,)
|
||||||
|
)
|
||||||
|
|
||||||
def _update_forward_extremities_txn(self, txn, new_forward_extremities,
|
def _update_forward_extremities_txn(self, txn, new_forward_extremities,
|
||||||
max_stream_order):
|
max_stream_order):
|
||||||
|
@ -220,7 +220,7 @@ class EventsWorkerStore(SQLBaseStore):
|
|||||||
defer.returnValue(events)
|
defer.returnValue(events)
|
||||||
|
|
||||||
def _invalidate_get_event_cache(self, event_id):
|
def _invalidate_get_event_cache(self, event_id):
|
||||||
self._get_event_cache.invalidate((event_id,))
|
self._get_event_cache.invalidate((event_id,))
|
||||||
|
|
||||||
def _get_events_from_cache(self, events, allow_rejected, update_metrics=True):
|
def _get_events_from_cache(self, events, allow_rejected, update_metrics=True):
|
||||||
"""Fetch events from the caches
|
"""Fetch events from the caches
|
||||||
|
@ -11,7 +11,7 @@ class BackgroundUpdateTestCase(unittest.TestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
hs = yield setup_test_homeserver(
|
hs = yield setup_test_homeserver(
|
||||||
self.addCleanup
|
self.addCleanup
|
||||||
) # type: synapse.server.HomeServer
|
)
|
||||||
self.store = hs.get_datastore()
|
self.store = hs.get_datastore()
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
|
|
||||||
|
@ -20,9 +20,6 @@ import tests.utils
|
|||||||
|
|
||||||
|
|
||||||
class EndToEndKeyStoreTestCase(tests.unittest.TestCase):
|
class EndToEndKeyStoreTestCase(tests.unittest.TestCase):
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(EndToEndKeyStoreTestCase, self).__init__(*args, **kwargs)
|
|
||||||
self.store = None # type: synapse.storage.DataStore
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -22,9 +22,6 @@ import tests.utils
|
|||||||
|
|
||||||
|
|
||||||
class KeyStoreTestCase(tests.unittest.TestCase):
|
class KeyStoreTestCase(tests.unittest.TestCase):
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(KeyStoreTestCase, self).__init__(*args, **kwargs)
|
|
||||||
self.store = None # type: synapse.storage.keys.KeyStore
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -28,9 +28,6 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class StateStoreTestCase(tests.unittest.TestCase):
|
class StateStoreTestCase(tests.unittest.TestCase):
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(StateStoreTestCase, self).__init__(*args, **kwargs)
|
|
||||||
self.store = None # type: synapse.storage.DataStore
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
Loading…
Reference in New Issue
Block a user