Merge branch 'master' into pushers

This commit is contained in:
David Baker 2014-12-18 15:06:11 +00:00
commit 0a6664493a
13 changed files with 63 additions and 12 deletions

View File

@ -1,3 +1,24 @@
Changes in synapse 0.5.4a (2014-12-13)
======================================
* Fix bug while generating the error message when a file path specified in
the config doesn't exist.
Changes in synapse 0.5.4 (2014-12-03)
=====================================
* Fix presence bug where some rooms did not display presence updates for
remote users.
* Do not log SQL timing log lines when started with "-v"
* Fix potential memory leak.
Changes in synapse 0.5.3c (2014-12-02)
======================================
* Change the default value for the `content_addr` option to use the HTTP
listener, as by default the HTTPS listener will be using a self-signed
certificate.
Changes in synapse 0.5.3 (2014-11-27) Changes in synapse 0.5.3 (2014-11-27)
===================================== =====================================

View File

@ -1 +1 @@
0.5.3a 0.5.4a

View File

@ -16,4 +16,4 @@
""" This is a reference implementation of a synapse home server. """ This is a reference implementation of a synapse home server.
""" """
__version__ = "0.5.3a" __version__ = "0.5.4a"

View File

@ -44,9 +44,9 @@ class Config(object):
) )
if not os.path.exists(file_path): if not os.path.exists(file_path):
raise ConfigError( raise ConfigError(
"File % config for %s doesn't exist." "File %s config for %s doesn't exist."
" Try running again with --generate-config" " Try running again with --generate-config"
% (config_name,) % (file_path, config_name,)
) )
return cls.abspath(file_path) return cls.abspath(file_path)

View File

@ -52,12 +52,18 @@ class LoggingConfig(Config):
if self.log_config is None: if self.log_config is None:
level = logging.INFO level = logging.INFO
level_for_storage = logging.INFO
if self.verbosity: if self.verbosity:
level = logging.DEBUG level = logging.DEBUG
if self.verbosity > 1:
level_for_storage = logging.DEBUG
# FIXME: we need a logging.WARN for a -q quiet option # FIXME: we need a logging.WARN for a -q quiet option
logger = logging.getLogger('') logger = logging.getLogger('')
logger.setLevel(level) logger.setLevel(level)
logging.getLogger('synapse.storage').setLevel(level_for_storage)
formatter = logging.Formatter(log_format) formatter = logging.Formatter(log_format)
if self.log_file: if self.log_file:
handler = logging.FileHandler(self.log_file) handler = logging.FileHandler(self.log_file)

View File

@ -35,8 +35,11 @@ class ServerConfig(Config):
if not args.content_addr: if not args.content_addr:
host = args.server_name host = args.server_name
if ':' not in host: if ':' not in host:
host = "%s:%d" % (host, args.bind_port) host = "%s:%d" % (host, args.unsecure_port)
args.content_addr = "https://%s" % (host,) else:
host = host.split(':')[0]
host = "%s:%d" % (host, args.unsecure_port)
args.content_addr = "http://%s" % (host,)
self.content_addr = args.content_addr self.content_addr = args.content_addr

View File

@ -853,7 +853,10 @@ class _TransactionQueue(object):
# Ensures we don't continue until all callbacks on that # Ensures we don't continue until all callbacks on that
# deferred have fired # deferred have fired
yield deferred try:
yield deferred
except:
pass
logger.debug("TX [%s] Yielded to callbacks", destination) logger.debug("TX [%s] Yielded to callbacks", destination)
@ -865,7 +868,8 @@ class _TransactionQueue(object):
logger.exception(e) logger.exception(e)
for deferred in deferreds: for deferred in deferreds:
deferred.errback(e) if not deferred.called:
deferred.errback(e)
finally: finally:
# We want to be *very* sure we delete this after we stop processing # We want to be *very* sure we delete this after we stop processing

View File

@ -651,11 +651,16 @@ class PresenceHandler(BaseHandler):
logger.debug("Incoming presence update from %s", user) logger.debug("Incoming presence update from %s", user)
observers = set(self._remote_recvmap.get(user, set())) observers = set(self._remote_recvmap.get(user, set()))
if observers:
logger.debug(" | %d interested local observers %r", len(observers), observers)
rm_handler = self.homeserver.get_handlers().room_member_handler rm_handler = self.homeserver.get_handlers().room_member_handler
room_ids = yield rm_handler.get_rooms_for_user(user) room_ids = yield rm_handler.get_rooms_for_user(user)
if room_ids:
logger.debug(" | %d interested room IDs %r", len(room_ids), room_ids)
if not observers and not room_ids: if not observers and not room_ids:
logger.debug(" | no interested observers or room IDs")
continue continue
state = dict(push) state = dict(push)

View File

@ -519,7 +519,11 @@ class RoomMemberHandler(BaseHandler):
user_id=user.to_string(), membership_list=membership_list user_id=user.to_string(), membership_list=membership_list
) )
defer.returnValue([r.room_id for r in rooms]) # For some reason the list of events contains duplicates
# TODO(paul): work out why because I really don't think it should
room_ids = set(r.room_id for r in rooms)
defer.returnValue(room_ids)
@defer.inlineCallbacks @defer.inlineCallbacks
def _do_local_membership_update(self, event, membership, snapshot, def _do_local_membership_update(self, event, membership, snapshot,

View File

@ -327,7 +327,9 @@ class RoomMessageListRestServlet(RestServlet):
@defer.inlineCallbacks @defer.inlineCallbacks
def on_GET(self, request, room_id): def on_GET(self, request, room_id):
user = yield self.auth.get_user_by_req(request) user = yield self.auth.get_user_by_req(request)
pagination_config = PaginationConfig.from_request(request) pagination_config = PaginationConfig.from_request(request,
default_limit=10,
)
with_feedback = "feedback" in request.args with_feedback = "feedback" in request.args
handler = self.handlers.message_handler handler = self.handlers.message_handler
msgs = yield handler.get_messages( msgs = yield handler.get_messages(

View File

@ -218,7 +218,9 @@ class RoomMemberStore(SQLBaseStore):
"ON m.event_id = c.event_id " "ON m.event_id = c.event_id "
"WHERE m.membership = 'join' " "WHERE m.membership = 'join' "
"AND (%(clause)s) " "AND (%(clause)s) "
"GROUP BY m.room_id HAVING COUNT(m.room_id) = ?" # TODO(paul): We've got duplicate rows in the database somewhere
# so we have to DISTINCT m.user_id here
"GROUP BY m.room_id HAVING COUNT(DISTINCT m.user_id) = ?"
) % {"clause": user_list_clause} ) % {"clause": user_list_clause}
args = list(user_id_list) args = list(user_id_list)

View File

@ -47,7 +47,8 @@ class PaginationConfig(object):
self.limit = int(limit) if limit is not None else None self.limit = int(limit) if limit is not None else None
@classmethod @classmethod
def from_request(cls, request, raise_invalid_params=True): def from_request(cls, request, raise_invalid_params=True,
default_limit=None):
def get_param(name, default=None): def get_param(name, default=None):
lst = request.args.get(name, []) lst = request.args.get(name, [])
if len(lst) > 1: if len(lst) > 1:
@ -84,6 +85,9 @@ class PaginationConfig(object):
if limit is not None and not limit.isdigit(): if limit is not None and not limit.isdigit():
raise SynapseError(400, "'limit' parameter must be an integer.") raise SynapseError(400, "'limit' parameter must be an integer.")
if limit is None:
limit = default_limit
try: try:
return PaginationConfig(from_tok, to_tok, direction, limit) return PaginationConfig(from_tok, to_tok, direction, limit)
except: except: