mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2024-12-11 14:34:19 -05:00
Merge branch 'develop' of github.com:matrix-org/synapse into erikj/dictionary_cache
This commit is contained in:
commit
7dec0b2bee
@ -657,6 +657,7 @@ def run(hs):
|
|||||||
|
|
||||||
if hs.config.daemonize:
|
if hs.config.daemonize:
|
||||||
|
|
||||||
|
if hs.config.print_pidfile:
|
||||||
print hs.config.pid_file
|
print hs.config.pid_file
|
||||||
|
|
||||||
daemon = Daemonize(
|
daemon = Daemonize(
|
||||||
|
@ -138,6 +138,11 @@ class Config(object):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="Generate a config file for the server name"
|
help="Generate a config file for the server name"
|
||||||
)
|
)
|
||||||
|
config_parser.add_argument(
|
||||||
|
"--generate-keys",
|
||||||
|
action="store_true",
|
||||||
|
help="Generate any missing key files then exit"
|
||||||
|
)
|
||||||
config_parser.add_argument(
|
config_parser.add_argument(
|
||||||
"-H", "--server-name",
|
"-H", "--server-name",
|
||||||
help="The server name to generate a config file for"
|
help="The server name to generate a config file for"
|
||||||
@ -230,4 +235,8 @@ class Config(object):
|
|||||||
|
|
||||||
obj.invoke_all("read_arguments", args)
|
obj.invoke_all("read_arguments", args)
|
||||||
|
|
||||||
|
if config_args.generate_keys:
|
||||||
|
obj.invoke_all("generate_files", config)
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
return obj
|
return obj
|
||||||
|
@ -24,6 +24,7 @@ class ServerConfig(Config):
|
|||||||
self.web_client = config["web_client"]
|
self.web_client = config["web_client"]
|
||||||
self.soft_file_limit = config["soft_file_limit"]
|
self.soft_file_limit = config["soft_file_limit"]
|
||||||
self.daemonize = config.get("daemonize")
|
self.daemonize = config.get("daemonize")
|
||||||
|
self.print_pidfile = config.get("print_pidfile")
|
||||||
self.use_frozen_dicts = config.get("use_frozen_dicts", True)
|
self.use_frozen_dicts = config.get("use_frozen_dicts", True)
|
||||||
|
|
||||||
self.listeners = config.get("listeners", [])
|
self.listeners = config.get("listeners", [])
|
||||||
@ -208,12 +209,18 @@ class ServerConfig(Config):
|
|||||||
self.manhole = args.manhole
|
self.manhole = args.manhole
|
||||||
if args.daemonize is not None:
|
if args.daemonize is not None:
|
||||||
self.daemonize = args.daemonize
|
self.daemonize = args.daemonize
|
||||||
|
if args.print_pidfile is not None:
|
||||||
|
self.print_pidfile = args.print_pidfile
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
server_group = parser.add_argument_group("server")
|
server_group = parser.add_argument_group("server")
|
||||||
server_group.add_argument("-D", "--daemonize", action='store_true',
|
server_group.add_argument("-D", "--daemonize", action='store_true',
|
||||||
default=None,
|
default=None,
|
||||||
help="Daemonize the home server")
|
help="Daemonize the home server")
|
||||||
|
server_group.add_argument("--print-pidfile", action='store_true',
|
||||||
|
default=None,
|
||||||
|
help="Print the path to the pidfile just"
|
||||||
|
" before daemonizing")
|
||||||
server_group.add_argument("--manhole", metavar="PORT", dest="manhole",
|
server_group.add_argument("--manhole", metavar="PORT", dest="manhole",
|
||||||
type=int,
|
type=int,
|
||||||
help="Turn on the twisted telnet manhole"
|
help="Turn on the twisted telnet manhole"
|
||||||
|
@ -354,6 +354,11 @@ def _upgrade_existing_database(cur, current_version, applied_delta_files,
|
|||||||
)
|
)
|
||||||
logger.debug("Running script %s", relative_path)
|
logger.debug("Running script %s", relative_path)
|
||||||
module.run_upgrade(cur, database_engine)
|
module.run_upgrade(cur, database_engine)
|
||||||
|
elif ext == ".pyc":
|
||||||
|
# Sometimes .pyc files turn up anyway even though we've
|
||||||
|
# disabled their generation; e.g. from distribution package
|
||||||
|
# installers. Silently skip it
|
||||||
|
pass
|
||||||
elif ext == ".sql":
|
elif ext == ".sql":
|
||||||
# A plain old .sql file, just read and execute it
|
# A plain old .sql file, just read and execute it
|
||||||
logger.debug("Applying schema %s", relative_path)
|
logger.debug("Applying schema %s", relative_path)
|
||||||
|
@ -186,8 +186,8 @@ class CacheDescriptor(object):
|
|||||||
try:
|
try:
|
||||||
cached_result_d = self.cache.get(keyargs)
|
cached_result_d = self.cache.get(keyargs)
|
||||||
|
|
||||||
|
observer = cached_result_d.observe()
|
||||||
if DEBUG_CACHES:
|
if DEBUG_CACHES:
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def check_result(cached_result):
|
def check_result(cached_result):
|
||||||
actual_result = yield self.function_to_call(obj, *args, **kwargs)
|
actual_result = yield self.function_to_call(obj, *args, **kwargs)
|
||||||
@ -198,9 +198,10 @@ class CacheDescriptor(object):
|
|||||||
cached_result, actual_result,
|
cached_result, actual_result,
|
||||||
)
|
)
|
||||||
raise ValueError("Stale cache entry")
|
raise ValueError("Stale cache entry")
|
||||||
cached_result_d.observe().addCallback(check_result)
|
defer.returnValue(cached_result)
|
||||||
|
observer.addCallback(check_result)
|
||||||
|
|
||||||
return cached_result_d.observe()
|
return observer
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# Get the sequence number of the cache before reading from the
|
# Get the sequence number of the cache before reading from the
|
||||||
# database so that we can tell if the cache is invalidated
|
# database so that we can tell if the cache is invalidated
|
||||||
|
Loading…
Reference in New Issue
Block a user