Update black, and run auto formatting over the codebase (#9381)

- Update black version to the latest
 - Run black auto formatting over the codebase
    - Run autoformatting according to [`docs/code_style.md
`](80d6dc9783/docs/code_style.md)
 - Update `code_style.md` docs around installing black to use the correct version
This commit is contained in:
Eric Eastwood 2021-02-16 16:32:34 -06:00 committed by GitHub
parent 5636e597c3
commit 0a00b7ff14
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
271 changed files with 2802 additions and 1713 deletions

View file

@ -252,8 +252,7 @@ class Linearizer:
self.key_to_defer = {} # type: Dict[Hashable, _LinearizerEntry]
def is_queued(self, key: Hashable) -> bool:
"""Checks whether there is a process queued up waiting
"""
"""Checks whether there is a process queued up waiting"""
entry = self.key_to_defer.get(key)
if not entry:
# No entry so nothing is waiting.
@ -452,7 +451,9 @@ R = TypeVar("R")
def timeout_deferred(
deferred: defer.Deferred, timeout: float, reactor: IReactorTime,
deferred: defer.Deferred,
timeout: float,
reactor: IReactorTime,
) -> defer.Deferred:
"""The in built twisted `Deferred.addTimeout` fails to time out deferreds
that have a canceller that throws exceptions. This method creates a new
@ -529,8 +530,7 @@ def timeout_deferred(
@attr.s(slots=True, frozen=True)
class DoneAwaitable:
"""Simple awaitable that returns the provided value.
"""
"""Simple awaitable that returns the provided value."""
value = attr.ib()
@ -545,8 +545,7 @@ class DoneAwaitable:
def maybe_awaitable(value: Union[Awaitable[R], R]) -> Awaitable[R]:
"""Convert a value to an awaitable if not already an awaitable.
"""
"""Convert a value to an awaitable if not already an awaitable."""
if inspect.isawaitable(value):
assert isinstance(value, Awaitable)
return value

View file

@ -149,8 +149,7 @@ KNOWN_KEYS = {
def intern_string(string):
"""Takes a (potentially) unicode string and interns it if it's ascii
"""
"""Takes a (potentially) unicode string and interns it if it's ascii"""
if string is None:
return None
@ -161,8 +160,7 @@ def intern_string(string):
def intern_dict(dictionary):
"""Takes a dictionary and interns well known keys and their values
"""
"""Takes a dictionary and interns well known keys and their values"""
return {
KNOWN_KEYS.get(key, key): _intern_known_values(key, value)
for key, value in dictionary.items()

View file

@ -122,7 +122,8 @@ class _LruCachedFunction(Generic[F]):
def lru_cache(
max_entries: int = 1000, cache_context: bool = False,
max_entries: int = 1000,
cache_context: bool = False,
) -> Callable[[F], _LruCachedFunction[F]]:
"""A method decorator that applies a memoizing cache around the function.
@ -156,7 +157,9 @@ def lru_cache(
def func(orig: F) -> _LruCachedFunction[F]:
desc = LruCacheDescriptor(
orig, max_entries=max_entries, cache_context=cache_context,
orig,
max_entries=max_entries,
cache_context=cache_context,
)
return cast(_LruCachedFunction[F], desc)
@ -170,14 +173,18 @@ class LruCacheDescriptor(_CacheDescriptorBase):
sentinel = object()
def __init__(
self, orig, max_entries: int = 1000, cache_context: bool = False,
self,
orig,
max_entries: int = 1000,
cache_context: bool = False,
):
super().__init__(orig, num_args=None, cache_context=cache_context)
self.max_entries = max_entries
def __get__(self, obj, owner):
cache = LruCache(
cache_name=self.orig.__name__, max_size=self.max_entries,
cache_name=self.orig.__name__,
max_size=self.max_entries,
) # type: LruCache[CacheKey, Any]
get_cache_key = self.cache_key_builder
@ -212,7 +219,7 @@ class LruCacheDescriptor(_CacheDescriptorBase):
class DeferredCacheDescriptor(_CacheDescriptorBase):
""" A method decorator that applies a memoizing cache around the function.
"""A method decorator that applies a memoizing cache around the function.
This caches deferreds, rather than the results themselves. Deferreds that
fail are removed from the cache.

View file

@ -84,8 +84,7 @@ class StreamChangeCache:
return False
def has_entity_changed(self, entity: EntityType, stream_pos: int) -> bool:
"""Returns True if the entity may have been updated since stream_pos
"""
"""Returns True if the entity may have been updated since stream_pos"""
assert isinstance(stream_pos, int)
if stream_pos < self._earliest_known_stream_pos:
@ -133,8 +132,7 @@ class StreamChangeCache:
return result
def has_any_entity_changed(self, stream_pos: int) -> bool:
"""Returns if any entity has changed
"""
"""Returns if any entity has changed"""
assert type(stream_pos) is int
if not self._cache:

View file

@ -108,7 +108,10 @@ class Signal:
return await maybe_awaitable(observer(*args, **kwargs))
except Exception as e:
logger.warning(
"%s signal observer %s failed: %r", self.name, observer, e,
"%s signal observer %s failed: %r",
self.name,
observer,
e,
)
deferreds = [run_in_background(do, o) for o in self.observers]

View file

@ -83,15 +83,13 @@ class BackgroundFileConsumer:
self._producer.resumeProducing()
def unregisterProducer(self):
"""Part of IProducer interface
"""
"""Part of IProducer interface"""
self._producer = None
if not self._finished_deferred.called:
self._bytes_queue.put_nowait(None)
def write(self, bytes):
"""Part of IProducer interface
"""
"""Part of IProducer interface"""
if self._write_exception:
raise self._write_exception
@ -107,8 +105,7 @@ class BackgroundFileConsumer:
self._producer.pauseProducing()
def _writer(self):
"""This is run in a background thread to write to the file.
"""
"""This is run in a background thread to write to the file."""
try:
while self._producer or not self._bytes_queue.empty():
# If we've paused the producer check if we should resume the
@ -135,13 +132,11 @@ class BackgroundFileConsumer:
self._file_obj.close()
def wait(self):
"""Returns a deferred that resolves when finished writing to file
"""
"""Returns a deferred that resolves when finished writing to file"""
return make_deferred_yieldable(self._finished_deferred)
def _resume_paused_producer(self):
"""Gets called if we should resume producing after being paused
"""
"""Gets called if we should resume producing after being paused"""
if self._paused_producer and self._producer:
self._paused_producer = False
self._producer.resumeProducing()

View file

@ -62,7 +62,8 @@ def chunk_seq(iseq: ISeq, maxlen: int) -> Iterable[ISeq]:
def sorted_topologically(
nodes: Iterable[T], graph: Mapping[T, Collection[T]],
nodes: Iterable[T],
graph: Mapping[T, Collection[T]],
) -> Generator[T, None, None]:
"""Given a set of nodes and a graph, yield the nodes in toplogical order.

View file

@ -15,7 +15,7 @@
class JsonEncodedObject:
""" A common base class for defining protocol units that are represented
"""A common base class for defining protocol units that are represented
as JSON.
Attributes:
@ -39,7 +39,7 @@ class JsonEncodedObject:
"""
def __init__(self, **kwargs):
""" Takes the dict of `kwargs` and loads all keys that are *valid*
"""Takes the dict of `kwargs` and loads all keys that are *valid*
(i.e., are included in the `valid_keys` list) into the dictionary`
instance variable.
@ -61,7 +61,7 @@ class JsonEncodedObject:
self.unrecognized_keys[k] = v
def get_dict(self):
""" Converts this protocol unit into a :py:class:`dict`, ready to be
"""Converts this protocol unit into a :py:class:`dict`, ready to be
encoded as JSON.
The keys it encodes are: `valid_keys` - `internal_keys`

View file

@ -161,8 +161,7 @@ class Measure:
return self._logging_context.get_resource_usage()
def _update_in_flight(self, metrics):
"""Gets called when processing in flight metrics
"""
"""Gets called when processing in flight metrics"""
duration = self.clock.time() - self.start
metrics.real_time_max = max(metrics.real_time_max, duration)

View file

@ -25,7 +25,7 @@ from synapse.config._util import json_error_to_config_error
def load_module(provider: dict, config_path: Iterable[str]) -> Tuple[Type, Any]:
""" Loads a synapse module with its config
"""Loads a synapse module with its config
Args:
provider: a dict with keys 'module' (the module name) and 'config'

View file

@ -204,16 +204,13 @@ def _check_yield_points(f: Callable, changes: List[str]):
# We don't raise here as its perfectly valid for contexts to
# change in a function, as long as it sets the correct context
# on resolving (which is checked separately).
err = (
"%s changed context from %s to %s, happened between lines %d and %d in %s"
% (
frame.f_code.co_name,
expected_context,
current_context(),
last_yield_line_no,
frame.f_lineno,
frame.f_code.co_filename,
)
err = "%s changed context from %s to %s, happened between lines %d and %d in %s" % (
frame.f_code.co_name,
expected_context,
current_context(),
last_yield_line_no,
frame.f_lineno,
frame.f_code.co_filename,
)
changes.append(err)