mirror of
https://mau.dev/maunium/synapse.git
synced 2024-10-01 01:36:05 -04:00
Bump flake8-bugbear from 21.3.2 to 22.9.23 (#14042)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Erik Johnston <erik@matrix.org> Co-authored-by: David Robertson <davidr@element.io>
This commit is contained in:
parent
695a85d1bc
commit
0b7830e457
9
.flake8
9
.flake8
@ -8,4 +8,11 @@
|
||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
||||
# E731: do not assign a lambda expression, use a def
|
||||
# E501: Line too long (black enforces this for us)
|
||||
ignore=W503,W504,E203,E731,E501
|
||||
#
|
||||
# flake8-bugbear runs extra checks. Its error codes are described at
|
||||
# https://github.com/PyCQA/flake8-bugbear#list-of-warnings
|
||||
# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks
|
||||
# B023: Functions defined inside a loop must not use variables redefined in the loop
|
||||
# B024: Abstract base class with no abstract method.
|
||||
|
||||
ignore=W503,W504,E203,E731,E501,B019,B023,B024
|
||||
|
1
changelog.d/14042.misc
Normal file
1
changelog.d/14042.misc
Normal file
@ -0,0 +1 @@
|
||||
Bump flake8-bugbear from 21.3.2 to 22.9.23.
|
8
poetry.lock
generated
8
poetry.lock
generated
@ -260,7 +260,7 @@ pyflakes = ">=2.4.0,<2.5.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-bugbear"
|
||||
version = "21.3.2"
|
||||
version = "22.9.23"
|
||||
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
|
||||
category = "dev"
|
||||
optional = false
|
||||
@ -271,7 +271,7 @@ attrs = ">=19.2.0"
|
||||
flake8 = ">=3.0.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["black", "coverage", "hypothesis", "hypothesmith"]
|
||||
dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8-comprehensions"
|
||||
@ -1826,8 +1826,8 @@ flake8 = [
|
||||
{file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"},
|
||||
]
|
||||
flake8-bugbear = [
|
||||
{file = "flake8-bugbear-21.3.2.tar.gz", hash = "sha256:cadce434ceef96463b45a7c3000f23527c04ea4b531d16c7ac8886051f516ca0"},
|
||||
{file = "flake8_bugbear-21.3.2-py36.py37.py38-none-any.whl", hash = "sha256:5d6ccb0c0676c738a6e066b4d50589c408dcc1c5bf1d73b464b18b73cd6c05c2"},
|
||||
{file = "flake8-bugbear-22.9.23.tar.gz", hash = "sha256:17b9623325e6e0dcdcc80ed9e4aa811287fcc81d7e03313b8736ea5733759937"},
|
||||
{file = "flake8_bugbear-22.9.23-py3-none-any.whl", hash = "sha256:cd2779b2b7ada212d7a322814a1e5651f1868ab0d3f24cc9da66169ab8fda474"},
|
||||
]
|
||||
flake8-comprehensions = [
|
||||
{file = "flake8-comprehensions-3.8.0.tar.gz", hash = "sha256:8e108707637b1d13734f38e03435984f6b7854fa6b5a4e34f93e69534be8e521"},
|
||||
|
@ -707,8 +707,8 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
||||
|
||||
# 250 users is pretty arbitrary but the data can be quite large if users
|
||||
# are in many rooms.
|
||||
for user_ids in batch_iter(user_ids, 250):
|
||||
all_user_rooms.update(await self._get_rooms_for_users(user_ids))
|
||||
for batch_user_ids in batch_iter(user_ids, 250):
|
||||
all_user_rooms.update(await self._get_rooms_for_users(batch_user_ids))
|
||||
|
||||
return all_user_rooms
|
||||
|
||||
|
@ -395,8 +395,8 @@ class DeferredCache(Generic[KT, VT]):
|
||||
# _pending_deferred_cache.pop should either return a CacheEntry, or, in the
|
||||
# case of a TreeCache, a dict of keys to cache entries. Either way calling
|
||||
# iterate_tree_cache_entry on it will do the right thing.
|
||||
for entry in iterate_tree_cache_entry(entry):
|
||||
for cb in entry.get_invalidation_callbacks(key):
|
||||
for iter_entry in iterate_tree_cache_entry(entry):
|
||||
for cb in iter_entry.get_invalidation_callbacks(key):
|
||||
cb()
|
||||
|
||||
def invalidate_all(self) -> None:
|
||||
|
@ -432,7 +432,7 @@ class DeferredCacheListDescriptor(_CacheDescriptorBase):
|
||||
num_args = cached_method.num_args
|
||||
|
||||
if num_args != self.num_args:
|
||||
raise Exception(
|
||||
raise TypeError(
|
||||
"Number of args (%s) does not match underlying cache_method_name=%s (%s)."
|
||||
% (self.num_args, self.cached_method_name, num_args)
|
||||
)
|
||||
|
@ -17,6 +17,7 @@ from unittest.mock import Mock
|
||||
|
||||
from synapse.api.room_versions import RoomVersions
|
||||
from synapse.federation.transport.client import SendJoinParser
|
||||
from synapse.util import ExceptionBundle
|
||||
|
||||
from tests.unittest import TestCase
|
||||
|
||||
@ -121,10 +122,8 @@ class SendJoinParserTestCase(TestCase):
|
||||
# Send half of the data to the parser
|
||||
parser.write(serialisation[: len(serialisation) // 2])
|
||||
|
||||
# Close the parser. There should be _some_ kind of exception, but it need not
|
||||
# be that RuntimeError directly. E.g. we might want to raise a wrapper
|
||||
# encompassing multiple errors from multiple coroutines.
|
||||
with self.assertRaises(Exception):
|
||||
# Close the parser. There should be _some_ kind of exception.
|
||||
with self.assertRaises(ExceptionBundle):
|
||||
parser.finish()
|
||||
|
||||
# In any case, we should have tried to close both coros.
|
||||
|
@ -1037,5 +1037,5 @@ class CachedListDescriptorTestCase(unittest.TestCase):
|
||||
obj = Cls()
|
||||
|
||||
# Make sure this raises an error about the arg mismatch
|
||||
with self.assertRaises(Exception):
|
||||
with self.assertRaises(TypeError):
|
||||
obj.list_fn([("foo", "bar")])
|
||||
|
Loading…
Reference in New Issue
Block a user