Fix pep8 codestyle warnings

This commit is contained in:
Mark Haines 2014-11-20 17:26:36 +00:00
parent dfdda2c871
commit db9ce032a4
21 changed files with 71 additions and 47 deletions

View File

@ -272,7 +272,7 @@ class Auth(object):
key = (RoomCreateEvent.TYPE, "", ) key = (RoomCreateEvent.TYPE, "", )
create_event = event.old_state_events.get(key) create_event = event.old_state_events.get(key)
if (create_event is not None and if (create_event is not None and
create_event.content["creator"] == user_id): create_event.content["creator"] == user_id):
return 100 return 100
return level return level

View File

@ -84,4 +84,4 @@ class EventValidator(object):
template[key][0] template[key][0]
) )
if msg: if msg:
return msg return msg

View File

@ -257,13 +257,16 @@ def setup():
else: else:
reactor.run() reactor.run()
def run(): def run():
with LoggingContext("run"): with LoggingContext("run"):
reactor.run() reactor.run()
def main(): def main():
with LoggingContext("main"): with LoggingContext("main"):
setup() setup()
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -21,11 +21,12 @@ import signal
SYNAPSE = ["python", "-m", "synapse.app.homeserver"] SYNAPSE = ["python", "-m", "synapse.app.homeserver"]
CONFIGFILE="homeserver.yaml" CONFIGFILE = "homeserver.yaml"
PIDFILE="homeserver.pid" PIDFILE = "homeserver.pid"
GREEN = "\x1b[1;32m"
NORMAL = "\x1b[m"
GREEN="\x1b[1;32m"
NORMAL="\x1b[m"
def start(): def start():
if not os.path.exists(CONFIGFILE): if not os.path.exists(CONFIGFILE):
@ -43,12 +44,14 @@ def start():
subprocess.check_call(args) subprocess.check_call(args)
print GREEN + "started" + NORMAL print GREEN + "started" + NORMAL
def stop(): def stop():
if os.path.exists(PIDFILE): if os.path.exists(PIDFILE):
pid = int(open(PIDFILE).read()) pid = int(open(PIDFILE).read())
os.kill(pid, signal.SIGTERM) os.kill(pid, signal.SIGTERM)
print GREEN + "stopped" + NORMAL print GREEN + "stopped" + NORMAL
def main(): def main():
action = sys.argv[1] if sys.argv[1:] else "usage" action = sys.argv[1] if sys.argv[1:] else "usage"
if action == "start": if action == "start":
@ -62,5 +65,6 @@ def main():
sys.stderr.write("Usage: %s [start|stop|restart]\n" % (sys.argv[0],)) sys.stderr.write("Usage: %s [start|stop|restart]\n" % (sys.argv[0],))
sys.exit(1) sys.exit(1)
if __name__=='__main__':
if __name__ == "__main__":
main() main()

View File

@ -427,7 +427,9 @@ class ReplicationLayer(object):
time_now = self._clock.time_msec() time_now = self._clock.time_msec()
defer.returnValue((200, { defer.returnValue((200, {
"state": [p.get_pdu_json(time_now) for p in res_pdus["state"]], "state": [p.get_pdu_json(time_now) for p in res_pdus["state"]],
"auth_chain": [p.get_pdu_json(time_now) for p in res_pdus["auth_chain"]], "auth_chain": [
p.get_pdu_json(time_now) for p in res_pdus["auth_chain"]
],
})) }))
@defer.inlineCallbacks @defer.inlineCallbacks
@ -438,7 +440,9 @@ class ReplicationLayer(object):
( (
200, 200,
{ {
"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus], "auth_chain": [
a.get_pdu_json(time_now) for a in auth_pdus
],
} }
) )
) )
@ -459,7 +463,7 @@ class ReplicationLayer(object):
@defer.inlineCallbacks @defer.inlineCallbacks
def send_join(self, destination, pdu): def send_join(self, destination, pdu):
time_now = self._clock.time_msec() time_now = self._clock.time_msec()
_, content = yield self.transport_layer.send_join( _, content = yield self.transport_layer.send_join(
destination, destination,
pdu.room_id, pdu.room_id,

View File

@ -25,7 +25,6 @@ import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class Edu(JsonEncodedObject): class Edu(JsonEncodedObject):
""" An Edu represents a piece of data sent from one homeserver to another. """ An Edu represents a piece of data sent from one homeserver to another.

View File

@ -128,8 +128,9 @@ class DirectoryHandler(BaseHandler):
"servers": result.servers, "servers": result.servers,
}) })
else: else:
raise SynapseError(404, "Room alias \"%s\" not found" % (room_alias,)) raise SynapseError(
404, "Room alias \"%s\" not found" % (room_alias,)
)
@defer.inlineCallbacks @defer.inlineCallbacks
def send_room_alias_update_event(self, user_id, room_id): def send_room_alias_update_event(self, user_id, room_id):

View File

@ -122,7 +122,8 @@ class FederationHandler(BaseHandler):
event.origin, redacted_pdu_json event.origin, redacted_pdu_json
) )
except SynapseError as e: except SynapseError as e:
logger.warn("Signature check failed for %s redacted to %s", logger.warn(
"Signature check failed for %s redacted to %s",
encode_canonical_json(pdu.get_pdu_json()), encode_canonical_json(pdu.get_pdu_json()),
encode_canonical_json(redacted_pdu_json), encode_canonical_json(redacted_pdu_json),
) )
@ -390,7 +391,8 @@ class FederationHandler(BaseHandler):
event.outlier = False event.outlier = False
is_new_state = yield self.state_handler.annotate_event_with_state(event) state_handler = self.state_handler
is_new_state = yield state_handler.annotate_event_with_state(event)
self.auth.check(event, raises=True) self.auth.check(event, raises=True)
# FIXME (erikj): All this is duplicated above :( # FIXME (erikj): All this is duplicated above :(

View File

@ -298,7 +298,7 @@ class MessageHandler(BaseHandler):
@defer.inlineCallbacks @defer.inlineCallbacks
def room_initial_sync(self, user_id, room_id, pagin_config=None, def room_initial_sync(self, user_id, room_id, pagin_config=None,
feedback=False): feedback=False):
yield self.auth.check_joined_room(room_id, user_id) yield self.auth.check_joined_room(room_id, user_id)
# TODO(paul): I wish I was called with user objects not user_id # TODO(paul): I wish I was called with user objects not user_id
@ -342,8 +342,8 @@ class MessageHandler(BaseHandler):
) )
presence.append(member_presence) presence.append(member_presence)
except Exception: except Exception:
logger.exception("Failed to get member presence of %r", logger.exception(
m.user_id "Failed to get member presence of %r", m.user_id
) )
defer.returnValue({ defer.returnValue({

View File

@ -178,7 +178,9 @@ class RoomCreationHandler(BaseHandler):
if room_alias: if room_alias:
result["room_alias"] = room_alias.to_string() result["room_alias"] = room_alias.to_string()
yield directory_handler.send_room_alias_update_event(user_id, room_id) yield directory_handler.send_room_alias_update_event(
user_id, room_id
)
defer.returnValue(result) defer.returnValue(result)
@ -211,7 +213,6 @@ class RoomCreationHandler(BaseHandler):
**event_keys **event_keys
) )
power_levels_event = self.event_factory.create_event( power_levels_event = self.event_factory.create_event(
etype=RoomPowerLevelsEvent.TYPE, etype=RoomPowerLevelsEvent.TYPE,
content={ content={

View File

@ -131,12 +131,14 @@ class ContentRepoResource(resource.Resource):
request.setHeader('Content-Type', content_type) request.setHeader('Content-Type', content_type)
# cache for at least a day. # cache for at least a day.
# XXX: we might want to turn this off for data we don't want to recommend # XXX: we might want to turn this off for data we don't want to
# caching as it's sensitive or private - or at least select private. # recommend caching as it's sensitive or private - or at least
# don't bother setting Expires as all our matrix clients are smart enough to # select private. don't bother setting Expires as all our matrix
# be happy with Cache-Control (right?) # clients are smart enough to be happy with Cache-Control (right?)
request.setHeader('Cache-Control', 'public,max-age=86400,s-maxage=86400') request.setHeader(
"Cache-Control", "public,max-age=86400,s-maxage=86400"
)
d = FileSender().beginFileTransfer(f, request) d = FileSender().beginFileTransfer(f, request)
# after the file has been sent, clean up and finish the request # after the file has been sent, clean up and finish the request

View File

@ -138,8 +138,7 @@ class JsonResource(HttpServer, resource.Resource):
) )
except CodeMessageException as e: except CodeMessageException as e:
if isinstance(e, SynapseError): if isinstance(e, SynapseError):
logger.info("%s SynapseError: %s - %s", request, e.code, logger.info("%s SynapseError: %s - %s", request, e.code, e.msg)
e.msg)
else: else:
logger.exception(e) logger.exception(e)
self._send_response( self._send_response(

View File

@ -214,6 +214,7 @@ class Notifier(object):
timeout, timeout,
deferred, deferred,
) )
def _timeout_listener(): def _timeout_listener():
# TODO (erikj): We should probably set to_token to the current # TODO (erikj): We should probably set to_token to the current
# max rather than reusing from_token. # max rather than reusing from_token.

View File

@ -26,7 +26,6 @@ import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class EventStreamRestServlet(RestServlet): class EventStreamRestServlet(RestServlet):
PATTERN = client_path_pattern("/events$") PATTERN = client_path_pattern("/events$")

View File

@ -91,6 +91,7 @@ class SQLBaseStore(object):
def runInteraction(self, desc, func, *args, **kwargs): def runInteraction(self, desc, func, *args, **kwargs):
"""Wraps the .runInteraction() method on the underlying db_pool.""" """Wraps the .runInteraction() method on the underlying db_pool."""
current_context = LoggingContext.current_context() current_context = LoggingContext.current_context()
def inner_func(txn, *args, **kwargs): def inner_func(txn, *args, **kwargs):
with LoggingContext("runInteraction") as context: with LoggingContext("runInteraction") as context:
current_context.copy_to(context) current_context.copy_to(context)

View File

@ -75,7 +75,9 @@ class RegistrationStore(SQLBaseStore):
"VALUES (?,?,?)", "VALUES (?,?,?)",
[user_id, password_hash, now]) [user_id, password_hash, now])
except IntegrityError: except IntegrityError:
raise StoreError(400, "User ID already taken.", errcode=Codes.USER_IN_USE) raise StoreError(
400, "User ID already taken.", errcode=Codes.USER_IN_USE
)
# it's possible for this to get a conflict, but only for a single user # it's possible for this to get a conflict, but only for a single user
# since tokens are namespaced based on their user ID # since tokens are namespaced based on their user ID
@ -83,8 +85,8 @@ class RegistrationStore(SQLBaseStore):
"VALUES (?,?)", [txn.lastrowid, token]) "VALUES (?,?)", [txn.lastrowid, token])
def get_user_by_id(self, user_id): def get_user_by_id(self, user_id):
query = ("SELECT users.name, users.password_hash FROM users " query = ("SELECT users.name, users.password_hash FROM users"
"WHERE users.name = ?") " WHERE users.name = ?")
return self._execute( return self._execute(
self.cursor_to_dict, self.cursor_to_dict,
query, user_id query, user_id
@ -120,10 +122,10 @@ class RegistrationStore(SQLBaseStore):
def _query_for_auth(self, txn, token): def _query_for_auth(self, txn, token):
sql = ( sql = (
"SELECT users.name, users.admin, access_tokens.device_id " "SELECT users.name, users.admin, access_tokens.device_id"
"FROM users " " FROM users"
"INNER JOIN access_tokens on users.id = access_tokens.user_id " " INNER JOIN access_tokens on users.id = access_tokens.user_id"
"WHERE token = ?" " WHERE token = ?"
) )
cursor = txn.execute(sql, (token,)) cursor = txn.execute(sql, (token,))

View File

@ -27,7 +27,9 @@ import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
OpsLevel = collections.namedtuple("OpsLevel", ("ban_level", "kick_level", "redact_level")) OpsLevel = collections.namedtuple("OpsLevel", (
"ban_level", "kick_level", "redact_level")
)
class RoomStore(SQLBaseStore): class RoomStore(SQLBaseStore):

View File

@ -36,7 +36,7 @@ class SignatureStore(SQLBaseStore):
return dict(txn.fetchall()) return dict(txn.fetchall())
def _store_event_content_hash_txn(self, txn, event_id, algorithm, def _store_event_content_hash_txn(self, txn, event_id, algorithm,
hash_bytes): hash_bytes):
"""Store a hash for a Event """Store a hash for a Event
Args: Args:
txn (cursor): txn (cursor):
@ -84,7 +84,7 @@ class SignatureStore(SQLBaseStore):
return dict(txn.fetchall()) return dict(txn.fetchall())
def _store_event_reference_hash_txn(self, txn, event_id, algorithm, def _store_event_reference_hash_txn(self, txn, event_id, algorithm,
hash_bytes): hash_bytes):
"""Store a hash for a PDU """Store a hash for a PDU
Args: Args:
txn (cursor): txn (cursor):
@ -127,7 +127,7 @@ class SignatureStore(SQLBaseStore):
return res return res
def _store_event_signature_txn(self, txn, event_id, signature_name, key_id, def _store_event_signature_txn(self, txn, event_id, signature_name, key_id,
signature_bytes): signature_bytes):
"""Store a signature from the origin server for a PDU. """Store a signature from the origin server for a PDU.
Args: Args:
txn (cursor): txn (cursor):
@ -169,7 +169,7 @@ class SignatureStore(SQLBaseStore):
return results return results
def _store_prev_event_hash_txn(self, txn, event_id, prev_event_id, def _store_prev_event_hash_txn(self, txn, event_id, prev_event_id,
algorithm, hash_bytes): algorithm, hash_bytes):
self._simple_insert_txn( self._simple_insert_txn(
txn, txn,
"event_edge_hashes", "event_edge_hashes",
@ -180,4 +180,4 @@ class SignatureStore(SQLBaseStore):
"hash": buffer(hash_bytes), "hash": buffer(hash_bytes),
}, },
or_ignore=True, or_ignore=True,
) )

View File

@ -213,8 +213,8 @@ class StreamStore(SQLBaseStore):
# Tokens really represent positions between elements, but we use # Tokens really represent positions between elements, but we use
# the convention of pointing to the event before the gap. Hence # the convention of pointing to the event before the gap. Hence
# we have a bit of asymmetry when it comes to equalities. # we have a bit of asymmetry when it comes to equalities.
from_comp = '<=' if direction =='b' else '>' from_comp = '<=' if direction == 'b' else '>'
to_comp = '>' if direction =='b' else '<=' to_comp = '>' if direction == 'b' else '<='
order = "DESC" if direction == 'b' else "ASC" order = "DESC" if direction == 'b' else "ASC"
args = [room_id] args = [room_id]
@ -235,9 +235,10 @@ class StreamStore(SQLBaseStore):
) )
sql = ( sql = (
"SELECT *, (%(redacted)s) AS redacted FROM events " "SELECT *, (%(redacted)s) AS redacted FROM events"
"WHERE outlier = 0 AND room_id = ? AND %(bounds)s " " WHERE outlier = 0 AND room_id = ? AND %(bounds)s"
"ORDER BY topological_ordering %(order)s, stream_ordering %(order)s %(limit)s " " ORDER BY topological_ordering %(order)s,"
" stream_ordering %(order)s %(limit)s"
) % { ) % {
"redacted": del_sql, "redacted": del_sql,
"bounds": bounds, "bounds": bounds,

View File

@ -37,6 +37,7 @@ class Clock(object):
def call_later(self, delay, callback): def call_later(self, delay, callback):
current_context = LoggingContext.current_context() current_context = LoggingContext.current_context()
def wrapped_callback(): def wrapped_callback():
LoggingContext.thread_local.current_context = current_context LoggingContext.thread_local.current_context = current_context
callback() callback()

View File

@ -18,6 +18,7 @@ from twisted.internet import defer, reactor
from .logcontext import PreserveLoggingContext from .logcontext import PreserveLoggingContext
@defer.inlineCallbacks @defer.inlineCallbacks
def sleep(seconds): def sleep(seconds):
d = defer.Deferred() d = defer.Deferred()
@ -25,6 +26,7 @@ def sleep(seconds):
with PreserveLoggingContext(): with PreserveLoggingContext():
yield d yield d
def run_on_reactor(): def run_on_reactor():
""" This will cause the rest of the function to be invoked upon the next """ This will cause the rest of the function to be invoked upon the next
iteration of the main loop iteration of the main loop