This commit is contained in:
Erik Johnston 2014-11-10 13:46:44 +00:00
parent c46088405a
commit 5d439b127b
8 changed files with 22 additions and 20 deletions

View File

@ -369,7 +369,6 @@ class Auth(object):
] ]
event.auth_events = zip(auth_events, hashes) event.auth_events = zip(auth_events, hashes)
@log_function @log_function
def _can_send_event(self, event): def _can_send_event(self, event):
key = (RoomPowerLevelsEvent.TYPE, "", ) key = (RoomPowerLevelsEvent.TYPE, "", )

View File

@ -153,6 +153,7 @@ class RoomPowerLevelsEvent(SynapseStateEvent):
def get_content_template(self): def get_content_template(self):
return {} return {}
class RoomAliasesEvent(SynapseStateEvent): class RoomAliasesEvent(SynapseStateEvent):
TYPE = "m.room.aliases" TYPE = "m.room.aliases"

View File

@ -549,7 +549,6 @@ class ReplicationLayer(object):
origin, pdu.room_id, pdu.event_id, origin, pdu.room_id, pdu.event_id,
) )
if not backfilled: if not backfilled:
ret = yield self.handler.on_receive_pdu( ret = yield self.handler.on_receive_pdu(
pdu, pdu,

View File

@ -284,7 +284,7 @@ class TransportLayer(object):
origin = None origin = None
if request.method == "PUT": if request.method == "PUT":
#TODO: Handle other method types? other content types? # TODO: Handle other method types? other content types?
try: try:
content_bytes = request.content.read() content_bytes = request.content.read()
content = json.loads(content_bytes) content = json.loads(content_bytes)
@ -296,11 +296,13 @@ class TransportLayer(object):
try: try:
params = auth.split(" ")[1].split(",") params = auth.split(" ")[1].split(",")
param_dict = dict(kv.split("=") for kv in params) param_dict = dict(kv.split("=") for kv in params)
def strip_quotes(value): def strip_quotes(value):
if value.startswith("\""): if value.startswith("\""):
return value[1:-1] return value[1:-1]
else: else:
return value return value
origin = strip_quotes(param_dict["origin"]) origin = strip_quotes(param_dict["origin"])
key = strip_quotes(param_dict["key"]) key = strip_quotes(param_dict["key"])
sig = strip_quotes(param_dict["sig"]) sig = strip_quotes(param_dict["sig"])
@ -321,7 +323,7 @@ class TransportLayer(object):
if auth.startswith("X-Matrix"): if auth.startswith("X-Matrix"):
(origin, key, sig) = parse_auth_header(auth) (origin, key, sig) = parse_auth_header(auth)
json_request["origin"] = origin json_request["origin"] = origin
json_request["signatures"].setdefault(origin,{})[key] = sig json_request["signatures"].setdefault(origin, {})[key] = sig
if not json_request["signatures"]: if not json_request["signatures"]:
raise SynapseError( raise SynapseError(
@ -515,7 +517,8 @@ class TransportLayer(object):
return return
try: try:
code, response = yield self.received_handler.on_incoming_transaction( handler = self.received_handler
code, response = yield handler.on_incoming_transaction(
transaction_data transaction_data
) )
except: except:

View File

@ -192,7 +192,9 @@ class Transaction(JsonEncodedObject):
transaction_id and origin_server_ts keys. transaction_id and origin_server_ts keys.
""" """
if "origin_server_ts" not in kwargs: if "origin_server_ts" not in kwargs:
raise KeyError("Require 'origin_server_ts' to construct a Transaction") raise KeyError(
"Require 'origin_server_ts' to construct a Transaction"
)
if "transaction_id" not in kwargs: if "transaction_id" not in kwargs:
raise KeyError( raise KeyError(
"Require 'transaction_id' to construct a Transaction" "Require 'transaction_id' to construct a Transaction"
@ -204,6 +206,3 @@ class Transaction(JsonEncodedObject):
kwargs["pdus"] = [p.get_dict() for p in pdus] kwargs["pdus"] = [p.get_dict() for p in pdus]
return Transaction(**kwargs) return Transaction(**kwargs)

View File

@ -521,6 +521,9 @@ class FederationHandler(BaseHandler):
@log_function @log_function
def _on_user_joined(self, user, room_id): def _on_user_joined(self, user, room_id):
waiters = self.waiting_for_join_list.get((user.to_string(), room_id), []) waiters = self.waiting_for_join_list.get(
(user.to_string(), room_id),
[]
)
while waiters: while waiters:
waiters.pop().callback(None) waiters.pop().callback(None)

View File

@ -494,11 +494,13 @@ def prepare_database(db_conn):
user_version = row[0] user_version = row[0]
if user_version > SCHEMA_VERSION: if user_version > SCHEMA_VERSION:
raise ValueError("Cannot use this database as it is too " + raise ValueError(
"Cannot use this database as it is too " +
"new for the server to understand" "new for the server to understand"
) )
elif user_version < SCHEMA_VERSION: elif user_version < SCHEMA_VERSION:
logging.info("Upgrading database from version %d", logging.info(
"Upgrading database from version %d",
user_version user_version
) )
@ -520,4 +522,3 @@ def prepare_database(db_conn):
c.execute("PRAGMA user_version = %d" % SCHEMA_VERSION) c.execute("PRAGMA user_version = %d" % SCHEMA_VERSION)
c.close() c.close()

View File

@ -215,7 +215,7 @@ class EventFederationStore(SQLBaseStore):
min_depth = self._simple_select_one_onecol_txn( min_depth = self._simple_select_one_onecol_txn(
txn, txn,
table="room_depth", table="room_depth",
keyvalues={"room_id": room_id,}, keyvalues={"room_id": room_id},
retcol="min_depth", retcol="min_depth",
allow_none=True, allow_none=True,
) )
@ -267,10 +267,8 @@ class EventFederationStore(SQLBaseStore):
} }
) )
# We only insert as a forward extremity the new pdu if there are
# no other pdus that reference it as a prev pdu
# We only insert as a forward extremity the new pdu if there are no
# other pdus that reference it as a prev pdu
query = ( query = (
"INSERT OR IGNORE INTO %(table)s (event_id, room_id) " "INSERT OR IGNORE INTO %(table)s (event_id, room_id) "
"SELECT ?, ? WHERE NOT EXISTS (" "SELECT ?, ? WHERE NOT EXISTS ("
@ -312,7 +310,6 @@ class EventFederationStore(SQLBaseStore):
) )
txn.execute(query) txn.execute(query)
def get_backfill_events(self, room_id, event_list, limit): def get_backfill_events(self, room_id, event_list, limit):
"""Get a list of Events for a given topic that occured before (and """Get a list of Events for a given topic that occured before (and
including) the pdus in pdu_list. Return a list of max size `limit`. including) the pdus in pdu_list. Return a list of max size `limit`.