2014-08-31 11:06:39 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2014-08-31 11:06:39 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2019-07-18 11:40:08 -04:00
|
|
|
|
2014-08-31 11:06:39 -04:00
|
|
|
import logging
|
|
|
|
import logging.config
|
2015-04-30 11:52:57 -04:00
|
|
|
import os
|
2018-06-20 10:33:14 -04:00
|
|
|
import sys
|
2018-07-09 02:09:20 -04:00
|
|
|
from string import Template
|
2015-04-30 11:52:57 -04:00
|
|
|
|
2018-06-20 10:33:14 -04:00
|
|
|
import yaml
|
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
from twisted.logger import STDLibLogObserver, globalLogBeginner
|
|
|
|
|
2018-06-20 10:33:14 -04:00
|
|
|
import synapse
|
2019-02-08 12:25:57 -05:00
|
|
|
from synapse.app import _base as appbase
|
2019-08-28 07:18:53 -04:00
|
|
|
from synapse.logging._structured import (
|
|
|
|
reload_structured_logging,
|
|
|
|
setup_structured_logging,
|
|
|
|
)
|
2019-07-03 10:07:04 -04:00
|
|
|
from synapse.logging.context import LoggingContextFilter
|
2018-06-20 10:33:14 -04:00
|
|
|
from synapse.util.versionstring import get_version_string
|
2018-07-09 02:09:20 -04:00
|
|
|
|
2018-06-20 10:33:14 -04:00
|
|
|
from ._base import Config
|
2015-04-30 11:52:57 -04:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
DEFAULT_LOG_CONFIG = Template(
|
|
|
|
"""
|
2015-04-30 11:52:57 -04:00
|
|
|
version: 1
|
|
|
|
|
|
|
|
formatters:
|
2018-01-09 06:27:19 -05:00
|
|
|
precise:
|
|
|
|
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - \
|
|
|
|
%(request)s - %(message)s'
|
2015-04-30 11:52:57 -04:00
|
|
|
|
|
|
|
filters:
|
2018-01-09 06:27:19 -05:00
|
|
|
context:
|
2019-07-03 10:07:04 -04:00
|
|
|
(): synapse.logging.context.LoggingContextFilter
|
2018-01-09 06:27:19 -05:00
|
|
|
request: ""
|
2015-04-30 11:52:57 -04:00
|
|
|
|
|
|
|
handlers:
|
2018-01-09 06:27:19 -05:00
|
|
|
file:
|
|
|
|
class: logging.handlers.RotatingFileHandler
|
|
|
|
formatter: precise
|
|
|
|
filename: ${log_file}
|
|
|
|
maxBytes: 104857600
|
|
|
|
backupCount: 10
|
|
|
|
filters: [context]
|
2018-11-02 11:28:07 -04:00
|
|
|
encoding: utf8
|
2018-01-09 06:27:19 -05:00
|
|
|
console:
|
|
|
|
class: logging.StreamHandler
|
|
|
|
formatter: precise
|
|
|
|
filters: [context]
|
2015-04-30 11:52:57 -04:00
|
|
|
|
|
|
|
loggers:
|
|
|
|
synapse:
|
|
|
|
level: INFO
|
|
|
|
|
|
|
|
synapse.storage.SQL:
|
2017-03-10 10:23:20 -05:00
|
|
|
# beware: increasing this to DEBUG will make synapse log sensitive
|
|
|
|
# information such as access tokens.
|
2015-04-30 11:52:57 -04:00
|
|
|
level: INFO
|
|
|
|
|
|
|
|
root:
|
|
|
|
level: INFO
|
|
|
|
handlers: [file, console]
|
2019-06-20 05:32:02 -04:00
|
|
|
"""
|
|
|
|
)
|
2014-08-31 11:06:39 -04:00
|
|
|
|
2014-10-30 06:13:46 -04:00
|
|
|
|
2014-08-31 11:06:39 -04:00
|
|
|
class LoggingConfig(Config):
|
2019-06-24 06:34:45 -04:00
|
|
|
def read_config(self, config, **kwargs):
|
2015-04-29 23:24:44 -04:00
|
|
|
self.log_config = self.abspath(config.get("log_config"))
|
2019-07-18 11:40:08 -04:00
|
|
|
self.no_redirect_stdio = config.get("no_redirect_stdio", False)
|
2015-04-29 23:24:44 -04:00
|
|
|
|
2019-06-21 19:00:20 -04:00
|
|
|
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
2018-12-21 10:04:57 -05:00
|
|
|
log_config = os.path.join(config_dir_path, server_name + ".log.config")
|
2019-06-20 05:32:02 -04:00
|
|
|
return (
|
|
|
|
"""\
|
2019-03-04 12:14:58 -05:00
|
|
|
## Logging ##
|
|
|
|
|
2015-04-29 23:24:44 -04:00
|
|
|
# A yaml python logging config file
|
2019-02-19 08:54:29 -05:00
|
|
|
#
|
2015-04-30 11:52:57 -04:00
|
|
|
log_config: "%(log_config)s"
|
2019-06-20 05:32:02 -04:00
|
|
|
"""
|
|
|
|
% locals()
|
|
|
|
)
|
2015-04-29 23:24:44 -04:00
|
|
|
|
|
|
|
def read_arguments(self, args):
|
2017-03-10 10:38:29 -05:00
|
|
|
if args.no_redirect_stdio is not None:
|
|
|
|
self.no_redirect_stdio = args.no_redirect_stdio
|
2015-04-29 23:24:44 -04:00
|
|
|
|
2019-07-15 08:15:34 -04:00
|
|
|
@staticmethod
|
|
|
|
def add_arguments(parser):
|
2014-08-31 11:06:39 -04:00
|
|
|
logging_group = parser.add_argument_group("logging")
|
2017-03-10 10:38:29 -05:00
|
|
|
logging_group.add_argument(
|
2019-06-20 05:32:02 -04:00
|
|
|
"-n",
|
|
|
|
"--no-redirect-stdio",
|
|
|
|
action="store_true",
|
|
|
|
default=None,
|
|
|
|
help="Do not redirect stdout/stderr to the log",
|
2017-03-10 10:38:29 -05:00
|
|
|
)
|
2014-08-31 11:06:39 -04:00
|
|
|
|
2019-06-21 18:39:08 -04:00
|
|
|
def generate_files(self, config, config_dir_path):
|
2015-04-30 11:52:57 -04:00
|
|
|
log_config = config.get("log_config")
|
|
|
|
if log_config and not os.path.exists(log_config):
|
2018-01-09 06:28:33 -05:00
|
|
|
log_file = self.abspath("homeserver.log")
|
2019-06-21 12:14:56 -04:00
|
|
|
print(
|
|
|
|
"Generating log config file %s which will log to %s"
|
|
|
|
% (log_config, log_file)
|
|
|
|
)
|
2018-04-28 06:29:02 -04:00
|
|
|
with open(log_config, "w") as log_config_file:
|
2019-06-20 05:32:02 -04:00
|
|
|
log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file))
|
2015-04-30 11:52:57 -04:00
|
|
|
|
2016-06-16 06:06:12 -04:00
|
|
|
|
2019-08-28 07:18:53 -04:00
|
|
|
def _setup_stdlib_logging(config, log_config):
|
|
|
|
"""
|
|
|
|
Set up Python stdlib logging.
|
2017-03-10 10:16:50 -05:00
|
|
|
"""
|
2017-10-26 11:45:20 -04:00
|
|
|
if log_config is None:
|
2019-07-18 11:40:08 -04:00
|
|
|
log_format = (
|
|
|
|
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"
|
|
|
|
" - %(message)s"
|
|
|
|
)
|
2016-06-16 06:06:12 -04:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
logger = logging.getLogger("")
|
2019-07-18 11:40:08 -04:00
|
|
|
logger.setLevel(logging.INFO)
|
|
|
|
logging.getLogger("synapse.storage.SQL").setLevel(logging.INFO)
|
2016-06-16 06:06:12 -04:00
|
|
|
|
|
|
|
formatter = logging.Formatter(log_format)
|
2017-10-26 11:45:20 -04:00
|
|
|
|
2019-07-18 11:40:08 -04:00
|
|
|
handler = logging.StreamHandler()
|
2016-06-16 06:06:12 -04:00
|
|
|
handler.setFormatter(formatter)
|
|
|
|
handler.addFilter(LoggingContextFilter(request=""))
|
|
|
|
logger.addHandler(handler)
|
|
|
|
else:
|
2019-08-28 07:18:53 -04:00
|
|
|
logging.config.dictConfig(log_config)
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2019-08-28 07:18:53 -04:00
|
|
|
# Route Twisted's native logging through to the standard library logging
|
|
|
|
# system.
|
2016-12-30 06:05:37 -05:00
|
|
|
observer = STDLibLogObserver()
|
2018-09-13 05:59:32 -04:00
|
|
|
|
|
|
|
def _log(event):
|
|
|
|
|
|
|
|
if "log_text" in event:
|
|
|
|
if event["log_text"].startswith("DNSDatagramProtocol starting on "):
|
|
|
|
return
|
|
|
|
|
|
|
|
if event["log_text"].startswith("(UDP Port "):
|
|
|
|
return
|
|
|
|
|
|
|
|
if event["log_text"].startswith("Timing out client"):
|
|
|
|
return
|
|
|
|
|
|
|
|
return observer(event)
|
|
|
|
|
2017-03-10 10:38:29 -05:00
|
|
|
globalLogBeginner.beginLoggingTo(
|
2019-06-20 05:32:02 -04:00
|
|
|
[_log], redirectStandardIO=not config.no_redirect_stdio
|
2017-03-10 10:38:29 -05:00
|
|
|
)
|
2019-02-14 12:10:36 -05:00
|
|
|
if not config.no_redirect_stdio:
|
|
|
|
print("Redirected stdout/stderr to logs")
|
2019-08-28 07:18:53 -04:00
|
|
|
|
|
|
|
|
|
|
|
def _reload_stdlib_logging(*args, log_config=None):
|
|
|
|
logger = logging.getLogger("")
|
|
|
|
|
|
|
|
if not log_config:
|
|
|
|
logger.warn("Reloaded a blank config?")
|
|
|
|
|
|
|
|
logging.config.dictConfig(log_config)
|
|
|
|
|
|
|
|
|
|
|
|
def setup_logging(hs, config, use_worker_options=False):
|
|
|
|
"""
|
|
|
|
Set up the logging subsystem.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
config (LoggingConfig | synapse.config.workers.WorkerConfig):
|
|
|
|
configuration data
|
|
|
|
|
|
|
|
use_worker_options (bool): True to use the 'worker_log_config' option
|
|
|
|
instead of 'log_config'.
|
|
|
|
"""
|
|
|
|
log_config = config.worker_log_config if use_worker_options else config.log_config
|
|
|
|
|
|
|
|
def read_config(*args, callback=None):
|
|
|
|
if log_config is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
with open(log_config, "rb") as f:
|
|
|
|
log_config_body = yaml.safe_load(f.read())
|
|
|
|
|
|
|
|
if callback:
|
|
|
|
callback(log_config=log_config_body)
|
|
|
|
logging.info("Reloaded log config from %s due to SIGHUP", log_config)
|
|
|
|
|
|
|
|
return log_config_body
|
|
|
|
|
|
|
|
log_config_body = read_config()
|
|
|
|
|
|
|
|
if log_config_body and log_config_body.get("structured") is True:
|
|
|
|
setup_structured_logging(hs, config, log_config_body)
|
|
|
|
appbase.register_sighup(read_config, callback=reload_structured_logging)
|
|
|
|
else:
|
|
|
|
_setup_stdlib_logging(config, log_config_body)
|
|
|
|
appbase.register_sighup(read_config, callback=_reload_stdlib_logging)
|
|
|
|
|
|
|
|
# make sure that the first thing we log is a thing we can grep backwards
|
|
|
|
# for
|
|
|
|
logging.warn("***** STARTING SERVER *****")
|
|
|
|
logging.warn("Server %s version %s", sys.argv[0], get_version_string(synapse))
|
|
|
|
logging.info("Server hostname: %s", config.server_name)
|