Fix (final) Bugbear violations (#9838)

This commit is contained in:
Jonathan de Jong 2021-04-20 12:50:49 +02:00 committed by GitHub
parent 71f0623de9
commit 495b214f4f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 46 additions and 49 deletions

1
changelog.d/9838.misc Normal file
View File

@ -0,0 +1 @@
Introduce flake8-bugbear to the test suite and fix some of its lint violations.

View File

@ -140,7 +140,7 @@ if __name__ == "__main__":
definitions = {} definitions = {}
for directory in args.directories: for directory in args.directories:
for root, dirs, files in os.walk(directory): for root, _, files in os.walk(directory):
for filename in files: for filename in files:
if filename.endswith(".py"): if filename.endswith(".py"):
filepath = os.path.join(root, filename) filepath = os.path.join(root, filename)

View File

@ -48,7 +48,7 @@ args = parser.parse_args()
for directory in args.directories: for directory in args.directories:
for root, dirs, files in os.walk(directory): for root, _, files in os.walk(directory):
for filename in files: for filename in files:
if filename.endswith(".py"): if filename.endswith(".py"):
filepath = os.path.join(root, filename) filepath = os.path.join(root, filename)

View File

@ -18,8 +18,7 @@ ignore =
# E203: whitespace before ':' (which is contrary to pep8?) # E203: whitespace before ':' (which is contrary to pep8?)
# E731: do not assign a lambda expression, use a def # E731: do not assign a lambda expression, use a def
# E501: Line too long (black enforces this for us) # E501: Line too long (black enforces this for us)
# B007: Subsection of the bugbear suite (TODO: add in remaining fixes) ignore=W503,W504,E203,E731,E501
ignore=W503,W504,E203,E731,E501,B007
[isort] [isort]
line_length = 88 line_length = 88

View File

@ -670,7 +670,7 @@ def _verify_third_party_invite(event: EventBase, auth_events: StateMap[EventBase
public_key = public_key_object["public_key"] public_key = public_key_object["public_key"]
try: try:
for server, signature_block in signed["signatures"].items(): for server, signature_block in signed["signatures"].items():
for key_name, encoded_signature in signature_block.items(): for key_name in signature_block.keys():
if not key_name.startswith("ed25519:"): if not key_name.startswith("ed25519:"):
continue continue
verify_key = decode_verify_key_bytes( verify_key = decode_verify_key_bytes(

View File

@ -501,10 +501,10 @@ def process_rows_for_federation(
states=[state], destinations=destinations states=[state], destinations=destinations
) )
for destination, edu_map in buff.keyed_edus.items(): for edu_map in buff.keyed_edus.values():
for key, edu in edu_map.items(): for key, edu in edu_map.items():
transaction_queue.send_edu(edu, key) transaction_queue.send_edu(edu, key)
for destination, edu_list in buff.edus.items(): for edu_list in buff.edus.values():
for edu in edu_list: for edu in edu_list:
transaction_queue.send_edu(edu, None) transaction_queue.send_edu(edu, None)

View File

@ -1248,7 +1248,7 @@ class AuthHandler(BaseHandler):
# see if any of our auth providers want to know about this # see if any of our auth providers want to know about this
for provider in self.password_providers: for provider in self.password_providers:
for token, token_id, device_id in tokens_and_devices: for token, _, device_id in tokens_and_devices:
await provider.on_logged_out( await provider.on_logged_out(
user_id=user_id, device_id=device_id, access_token=token user_id=user_id, device_id=device_id, access_token=token
) )

View File

@ -156,8 +156,7 @@ class DeviceWorkerHandler(BaseHandler):
# The user may have left the room # The user may have left the room
# TODO: Check if they actually did or if we were just invited. # TODO: Check if they actually did or if we were just invited.
if room_id not in room_ids: if room_id not in room_ids:
for key, event_id in current_state_ids.items(): for etype, state_key in current_state_ids.keys():
etype, state_key = key
if etype != EventTypes.Member: if etype != EventTypes.Member:
continue continue
possibly_left.add(state_key) possibly_left.add(state_key)
@ -179,8 +178,7 @@ class DeviceWorkerHandler(BaseHandler):
log_kv( log_kv(
{"event": "encountered empty previous state", "room_id": room_id} {"event": "encountered empty previous state", "room_id": room_id}
) )
for key, event_id in current_state_ids.items(): for etype, state_key in current_state_ids.keys():
etype, state_key = key
if etype != EventTypes.Member: if etype != EventTypes.Member:
continue continue
possibly_changed.add(state_key) possibly_changed.add(state_key)
@ -198,8 +196,7 @@ class DeviceWorkerHandler(BaseHandler):
for state_dict in prev_state_ids.values(): for state_dict in prev_state_ids.values():
member_event = state_dict.get((EventTypes.Member, user_id), None) member_event = state_dict.get((EventTypes.Member, user_id), None)
if not member_event or member_event != current_member_id: if not member_event or member_event != current_member_id:
for key, event_id in current_state_ids.items(): for etype, state_key in current_state_ids.keys():
etype, state_key = key
if etype != EventTypes.Member: if etype != EventTypes.Member:
continue continue
possibly_changed.add(state_key) possibly_changed.add(state_key)
@ -714,7 +711,7 @@ class DeviceListUpdater:
# This can happen since we batch updates # This can happen since we batch updates
return return
for device_id, stream_id, prev_ids, content in pending_updates: for device_id, stream_id, prev_ids, _ in pending_updates:
logger.debug( logger.debug(
"Handling update %r/%r, ID: %r, prev: %r ", "Handling update %r/%r, ID: %r, prev: %r ",
user_id, user_id,
@ -740,7 +737,7 @@ class DeviceListUpdater:
else: else:
# Simply update the single device, since we know that is the only # Simply update the single device, since we know that is the only
# change (because of the single prev_id matching the current cache) # change (because of the single prev_id matching the current cache)
for device_id, stream_id, prev_ids, content in pending_updates: for device_id, stream_id, _, content in pending_updates:
await self.store.update_remote_device_list_cache_entry( await self.store.update_remote_device_list_cache_entry(
user_id, device_id, content, stream_id user_id, device_id, content, stream_id
) )

View File

@ -2956,7 +2956,7 @@ class FederationHandler(BaseHandler):
try: try:
# for each sig on the third_party_invite block of the actual invite # for each sig on the third_party_invite block of the actual invite
for server, signature_block in signed["signatures"].items(): for server, signature_block in signed["signatures"].items():
for key_name, encoded_signature in signature_block.items(): for key_name in signature_block.keys():
if not key_name.startswith("ed25519:"): if not key_name.startswith("ed25519:"):
continue continue

View File

@ -226,11 +226,11 @@ class RemoteHandler(logging.Handler):
old_buffer = self._buffer old_buffer = self._buffer
self._buffer = deque() self._buffer = deque()
for i in range(buffer_split): for _ in range(buffer_split):
self._buffer.append(old_buffer.popleft()) self._buffer.append(old_buffer.popleft())
end_buffer = [] end_buffer = []
for i in range(buffer_split): for _ in range(buffer_split):
end_buffer.append(old_buffer.pop()) end_buffer.append(old_buffer.pop())
self._buffer.extend(reversed(end_buffer)) self._buffer.extend(reversed(end_buffer))

View File

@ -144,7 +144,7 @@ class RemoteKey(DirectServeJsonResource):
# Note that the value is unused. # Note that the value is unused.
cache_misses = {} # type: Dict[str, Dict[str, int]] cache_misses = {} # type: Dict[str, Dict[str, int]]
for (server_name, key_id, from_server), results in cached.items(): for (server_name, key_id, _), results in cached.items():
results = [(result["ts_added_ms"], result) for result in results] results = [(result["ts_added_ms"], result) for result in results]
if not results and key_id is not None: if not results and key_id is not None:
@ -206,7 +206,7 @@ class RemoteKey(DirectServeJsonResource):
# Cast to bytes since postgresql returns a memoryview. # Cast to bytes since postgresql returns a memoryview.
json_results.add(bytes(most_recent_result["key_json"])) json_results.add(bytes(most_recent_result["key_json"]))
else: else:
for ts_added, result in results: for _, result in results:
# Cast to bytes since postgresql returns a memoryview. # Cast to bytes since postgresql returns a memoryview.
json_results.add(bytes(result["key_json"])) json_results.add(bytes(result["key_json"]))

View File

@ -170,7 +170,7 @@ class PersistEventsStore:
) )
async with stream_ordering_manager as stream_orderings: async with stream_ordering_manager as stream_orderings:
for (event, context), stream in zip(events_and_contexts, stream_orderings): for (event, _), stream in zip(events_and_contexts, stream_orderings):
event.internal_metadata.stream_ordering = stream event.internal_metadata.stream_ordering = stream
await self.db_pool.runInteraction( await self.db_pool.runInteraction(
@ -297,7 +297,7 @@ class PersistEventsStore:
txn.execute(sql + clause, args) txn.execute(sql + clause, args)
to_recursively_check = [] to_recursively_check = []
for event_id, prev_event_id, metadata, rejected in txn: for _, prev_event_id, metadata, rejected in txn:
if prev_event_id in existing_prevs: if prev_event_id in existing_prevs:
continue continue
@ -1127,7 +1127,7 @@ class PersistEventsStore:
def _update_forward_extremities_txn( def _update_forward_extremities_txn(
self, txn, new_forward_extremities, max_stream_order self, txn, new_forward_extremities, max_stream_order
): ):
for room_id, new_extrem in new_forward_extremities.items(): for room_id in new_forward_extremities.keys():
self.db_pool.simple_delete_txn( self.db_pool.simple_delete_txn(
txn, table="event_forward_extremities", keyvalues={"room_id": room_id} txn, table="event_forward_extremities", keyvalues={"room_id": room_id}
) )
@ -1399,7 +1399,7 @@ class PersistEventsStore:
] ]
state_values = [] state_values = []
for event, context in state_events_and_contexts: for event, _ in state_events_and_contexts:
vals = { vals = {
"event_id": event.event_id, "event_id": event.event_id,
"room_id": event.room_id, "room_id": event.room_id,
@ -1468,7 +1468,7 @@ class PersistEventsStore:
# nothing to do here # nothing to do here
return return
for event, context in events_and_contexts: for event, _ in events_and_contexts:
if event.type == EventTypes.Redaction and event.redacts is not None: if event.type == EventTypes.Redaction and event.redacts is not None:
# Remove the entries in the event_push_actions table for the # Remove the entries in the event_push_actions table for the
# redacted event. # redacted event.

View File

@ -222,7 +222,7 @@ class FederationTestCase(unittest.HomeserverTestCase):
room_version, room_version,
) )
for i in range(3): for _ in range(3):
event = create_invite() event = create_invite()
self.get_success( self.get_success(
self.handler.on_invite_request( self.handler.on_invite_request(

View File

@ -239,7 +239,7 @@ class EventsStreamTestCase(BaseStreamTestCase):
# the state rows are unsorted # the state rows are unsorted
state_rows = [] # type: List[EventsStreamCurrentStateRow] state_rows = [] # type: List[EventsStreamCurrentStateRow]
for stream_name, token, row in received_rows: for stream_name, _, row in received_rows:
self.assertEqual("events", stream_name) self.assertEqual("events", stream_name)
self.assertIsInstance(row, EventsStreamRow) self.assertIsInstance(row, EventsStreamRow)
self.assertEqual(row.type, "state") self.assertEqual(row.type, "state")
@ -356,7 +356,7 @@ class EventsStreamTestCase(BaseStreamTestCase):
# the state rows are unsorted # the state rows are unsorted
state_rows = [] # type: List[EventsStreamCurrentStateRow] state_rows = [] # type: List[EventsStreamCurrentStateRow]
for j in range(STATES_PER_USER + 1): for _ in range(STATES_PER_USER + 1):
stream_name, token, row = received_rows.pop(0) stream_name, token, row = received_rows.pop(0)
self.assertEqual("events", stream_name) self.assertEqual("events", stream_name)
self.assertIsInstance(row, EventsStreamRow) self.assertIsInstance(row, EventsStreamRow)

View File

@ -430,7 +430,7 @@ class DevicesRestTestCase(unittest.HomeserverTestCase):
""" """
# Create devices # Create devices
number_devices = 5 number_devices = 5
for n in range(number_devices): for _ in range(number_devices):
self.login("user", "pass") self.login("user", "pass")
# Get devices # Get devices
@ -547,7 +547,7 @@ class DeleteDevicesRestTestCase(unittest.HomeserverTestCase):
# Create devices # Create devices
number_devices = 5 number_devices = 5
for n in range(number_devices): for _ in range(number_devices):
self.login("user", "pass") self.login("user", "pass")
# Get devices # Get devices

View File

@ -48,22 +48,22 @@ class EventReportsTestCase(unittest.HomeserverTestCase):
self.helper.join(self.room_id2, user=self.admin_user, tok=self.admin_user_tok) self.helper.join(self.room_id2, user=self.admin_user, tok=self.admin_user_tok)
# Two rooms and two users. Every user sends and reports every room event # Two rooms and two users. Every user sends and reports every room event
for i in range(5): for _ in range(5):
self._create_event_and_report( self._create_event_and_report(
room_id=self.room_id1, room_id=self.room_id1,
user_tok=self.other_user_tok, user_tok=self.other_user_tok,
) )
for i in range(5): for _ in range(5):
self._create_event_and_report( self._create_event_and_report(
room_id=self.room_id2, room_id=self.room_id2,
user_tok=self.other_user_tok, user_tok=self.other_user_tok,
) )
for i in range(5): for _ in range(5):
self._create_event_and_report( self._create_event_and_report(
room_id=self.room_id1, room_id=self.room_id1,
user_tok=self.admin_user_tok, user_tok=self.admin_user_tok,
) )
for i in range(5): for _ in range(5):
self._create_event_and_report( self._create_event_and_report(
room_id=self.room_id2, room_id=self.room_id2,
user_tok=self.admin_user_tok, user_tok=self.admin_user_tok,

View File

@ -615,7 +615,7 @@ class RoomTestCase(unittest.HomeserverTestCase):
# Create 3 test rooms # Create 3 test rooms
total_rooms = 3 total_rooms = 3
room_ids = [] room_ids = []
for x in range(total_rooms): for _ in range(total_rooms):
room_id = self.helper.create_room_as( room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok self.admin_user, tok=self.admin_user_tok
) )
@ -679,7 +679,7 @@ class RoomTestCase(unittest.HomeserverTestCase):
# Create 5 test rooms # Create 5 test rooms
total_rooms = 5 total_rooms = 5
room_ids = [] room_ids = []
for x in range(total_rooms): for _ in range(total_rooms):
room_id = self.helper.create_room_as( room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok self.admin_user, tok=self.admin_user_tok
) )
@ -1577,7 +1577,7 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
channel.json_body["event"]["event_id"], events[midway]["event_id"] channel.json_body["event"]["event_id"], events[midway]["event_id"]
) )
for i, found_event in enumerate(channel.json_body["events_before"]): for found_event in channel.json_body["events_before"]:
for j, posted_event in enumerate(events): for j, posted_event in enumerate(events):
if found_event["event_id"] == posted_event["event_id"]: if found_event["event_id"] == posted_event["event_id"]:
self.assertTrue(j < midway) self.assertTrue(j < midway)
@ -1585,7 +1585,7 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
else: else:
self.fail("Event %s from events_before not found" % j) self.fail("Event %s from events_before not found" % j)
for i, found_event in enumerate(channel.json_body["events_after"]): for found_event in channel.json_body["events_after"]:
for j, posted_event in enumerate(events): for j, posted_event in enumerate(events):
if found_event["event_id"] == posted_event["event_id"]: if found_event["event_id"] == posted_event["event_id"]:
self.assertTrue(j > midway) self.assertTrue(j > midway)

View File

@ -467,7 +467,7 @@ class UserMediaStatisticsTestCase(unittest.HomeserverTestCase):
number_media: Number of media to be created for the user number_media: Number of media to be created for the user
""" """
upload_resource = self.media_repo.children[b"upload"] upload_resource = self.media_repo.children[b"upload"]
for i in range(number_media): for _ in range(number_media):
# file size is 67 Byte # file size is 67 Byte
image_data = unhexlify( image_data = unhexlify(
b"89504e470d0a1a0a0000000d4948445200000001000000010806" b"89504e470d0a1a0a0000000d4948445200000001000000010806"

View File

@ -1937,7 +1937,7 @@ class UserMembershipRestTestCase(unittest.HomeserverTestCase):
# Create rooms and join # Create rooms and join
other_user_tok = self.login("user", "pass") other_user_tok = self.login("user", "pass")
number_rooms = 5 number_rooms = 5
for n in range(number_rooms): for _ in range(number_rooms):
self.helper.create_room_as(self.other_user, tok=other_user_tok) self.helper.create_room_as(self.other_user, tok=other_user_tok)
# Get rooms # Get rooms
@ -2517,7 +2517,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
user_token: Access token of the user user_token: Access token of the user
number_media: Number of media to be created for the user number_media: Number of media to be created for the user
""" """
for i in range(number_media): for _ in range(number_media):
# file size is 67 Byte # file size is 67 Byte
image_data = unhexlify( image_data = unhexlify(
b"89504e470d0a1a0a0000000d4948445200000001000000010806" b"89504e470d0a1a0a0000000d4948445200000001000000010806"

View File

@ -646,7 +646,7 @@ class RoomInviteRatelimitTestCase(RoomBase):
def test_invites_by_users_ratelimit(self): def test_invites_by_users_ratelimit(self):
"""Tests that invites to a specific user are actually rate-limited.""" """Tests that invites to a specific user are actually rate-limited."""
for i in range(3): for _ in range(3):
room_id = self.helper.create_room_as(self.user_id) room_id = self.helper.create_room_as(self.user_id)
self.helper.invite(room_id, self.user_id, "@other-users:red") self.helper.invite(room_id, self.user_id, "@other-users:red")
@ -668,7 +668,7 @@ class RoomJoinRatelimitTestCase(RoomBase):
) )
def test_join_local_ratelimit(self): def test_join_local_ratelimit(self):
"""Tests that local joins are actually rate-limited.""" """Tests that local joins are actually rate-limited."""
for i in range(3): for _ in range(3):
self.helper.create_room_as(self.user_id) self.helper.create_room_as(self.user_id)
self.helper.create_room_as(self.user_id, expect_code=429) self.helper.create_room_as(self.user_id, expect_code=429)
@ -733,7 +733,7 @@ class RoomJoinRatelimitTestCase(RoomBase):
for path in paths_to_test: for path in paths_to_test:
# Make sure we send more requests than the rate-limiting config would allow # Make sure we send more requests than the rate-limiting config would allow
# if all of these requests ended up joining the user to a room. # if all of these requests ended up joining the user to a room.
for i in range(4): for _ in range(4):
channel = self.make_request("POST", path % room_id, {}) channel = self.make_request("POST", path % room_id, {})
self.assertEquals(channel.code, 200) self.assertEquals(channel.code, 200)

View File

@ -38,12 +38,12 @@ class ExtremStatisticsTestCase(HomeserverTestCase):
last_event = None last_event = None
# Make a real event chain # Make a real event chain
for i in range(event_count): for _ in range(event_count):
ev = self.create_and_send_event(room_id, user, False, last_event) ev = self.create_and_send_event(room_id, user, False, last_event)
last_event = [ev] last_event = [ev]
# Sprinkle in some extremities # Sprinkle in some extremities
for i in range(extrems): for _ in range(extrems):
ev = self.create_and_send_event(room_id, user, False, last_event) ev = self.create_and_send_event(room_id, user, False, last_event)
# Let it run for a while, then pull out the statistics from the # Let it run for a while, then pull out the statistics from the

View File

@ -133,7 +133,7 @@ class TestCase(unittest.TestCase):
def assertObjectHasAttributes(self, attrs, obj): def assertObjectHasAttributes(self, attrs, obj):
"""Asserts that the given object has each of the attributes given, and """Asserts that the given object has each of the attributes given, and
that the value of each matches according to assertEquals.""" that the value of each matches according to assertEquals."""
for (key, value) in attrs.items(): for key in attrs.keys():
if not hasattr(obj, key): if not hasattr(obj, key):
raise AssertionError("Expected obj to have a '.%s'" % key) raise AssertionError("Expected obj to have a '.%s'" % key)
try: try:

View File

@ -303,7 +303,7 @@ def setup_test_homeserver(
# database for a few more seconds due to flakiness, preventing # database for a few more seconds due to flakiness, preventing
# us from dropping it when the test is over. If we can't drop # us from dropping it when the test is over. If we can't drop
# it, warn and move on. # it, warn and move on.
for x in range(5): for _ in range(5):
try: try:
cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,)) cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
db_conn.commit() db_conn.commit()