Fix for structured logging tests stomping on logs (#6023)

This commit is contained in:
Amber Brown 2019-09-13 02:29:55 +10:00 committed by GitHub
parent 3d882a7ba5
commit b617864cd9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 154 additions and 41 deletions

View File

@ -38,14 +38,16 @@ exclude sytest-blacklist
include pyproject.toml include pyproject.toml
recursive-include changelog.d * recursive-include changelog.d *
prune .buildkite
prune .circleci
prune .codecov.yml
prune .coveragerc
prune .github prune .github
prune debian
prune demo/etc prune demo/etc
prune docker prune docker
prune .circleci prune mypy.ini
prune .coveragerc prune stubs
prune debian
prune .codecov.yml
prune .buildkite
exclude jenkins* exclude jenkins*
recursive-exclude jenkins *.sh recursive-exclude jenkins *.sh

1
changelog.d/6023.misc Normal file
View File

@ -0,0 +1 @@
Fix the structured logging tests stomping on the global log configuration for subsequent tests.

54
mypy.ini Normal file
View File

@ -0,0 +1,54 @@
[mypy]
namespace_packages=True
plugins=mypy_zope:plugin
follow_imports=skip
mypy_path=stubs
[mypy-synapse.config.homeserver]
# this is a mess because of the metaclass shenanigans
ignore_errors = True
[mypy-zope]
ignore_missing_imports = True
[mypy-constantly]
ignore_missing_imports = True
[mypy-twisted.*]
ignore_missing_imports = True
[mypy-treq.*]
ignore_missing_imports = True
[mypy-hyperlink]
ignore_missing_imports = True
[mypy-h11]
ignore_missing_imports = True
[mypy-opentracing]
ignore_missing_imports = True
[mypy-OpenSSL]
ignore_missing_imports = True
[mypy-netaddr]
ignore_missing_imports = True
[mypy-saml2.*]
ignore_missing_imports = True
[mypy-unpaddedbase64]
ignore_missing_imports = True
[mypy-canonicaljson]
ignore_missing_imports = True
[mypy-jaeger_client]
ignore_missing_imports = True
[mypy-jsonschema]
ignore_missing_imports = True
[mypy-signedjson.*]
ignore_missing_imports = True

View File

@ -21,7 +21,12 @@ from string import Template
import yaml import yaml
from twisted.logger import STDLibLogObserver, globalLogBeginner from twisted.logger import (
ILogObserver,
LogBeginner,
STDLibLogObserver,
globalLogBeginner,
)
import synapse import synapse
from synapse.app import _base as appbase from synapse.app import _base as appbase
@ -124,7 +129,7 @@ class LoggingConfig(Config):
log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file)) log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file))
def _setup_stdlib_logging(config, log_config): def _setup_stdlib_logging(config, log_config, logBeginner: LogBeginner):
""" """
Set up Python stdlib logging. Set up Python stdlib logging.
""" """
@ -165,12 +170,12 @@ def _setup_stdlib_logging(config, log_config):
return observer(event) return observer(event)
globalLogBeginner.beginLoggingTo( logBeginner.beginLoggingTo([_log], redirectStandardIO=not config.no_redirect_stdio)
[_log], redirectStandardIO=not config.no_redirect_stdio
)
if not config.no_redirect_stdio: if not config.no_redirect_stdio:
print("Redirected stdout/stderr to logs") print("Redirected stdout/stderr to logs")
return observer
def _reload_stdlib_logging(*args, log_config=None): def _reload_stdlib_logging(*args, log_config=None):
logger = logging.getLogger("") logger = logging.getLogger("")
@ -181,7 +186,9 @@ def _reload_stdlib_logging(*args, log_config=None):
logging.config.dictConfig(log_config) logging.config.dictConfig(log_config)
def setup_logging(hs, config, use_worker_options=False): def setup_logging(
hs, config, use_worker_options=False, logBeginner: LogBeginner = globalLogBeginner
) -> ILogObserver:
""" """
Set up the logging subsystem. Set up the logging subsystem.
@ -191,6 +198,12 @@ def setup_logging(hs, config, use_worker_options=False):
use_worker_options (bool): True to use the 'worker_log_config' option use_worker_options (bool): True to use the 'worker_log_config' option
instead of 'log_config'. instead of 'log_config'.
logBeginner: The Twisted logBeginner to use.
Returns:
The "root" Twisted Logger observer, suitable for sending logs to from a
Logger instance.
""" """
log_config = config.worker_log_config if use_worker_options else config.log_config log_config = config.worker_log_config if use_worker_options else config.log_config
@ -210,10 +223,12 @@ def setup_logging(hs, config, use_worker_options=False):
log_config_body = read_config() log_config_body = read_config()
if log_config_body and log_config_body.get("structured") is True: if log_config_body and log_config_body.get("structured") is True:
setup_structured_logging(hs, config, log_config_body) logger = setup_structured_logging(
hs, config, log_config_body, logBeginner=logBeginner
)
appbase.register_sighup(read_config, callback=reload_structured_logging) appbase.register_sighup(read_config, callback=reload_structured_logging)
else: else:
_setup_stdlib_logging(config, log_config_body) logger = _setup_stdlib_logging(config, log_config_body, logBeginner=logBeginner)
appbase.register_sighup(read_config, callback=_reload_stdlib_logging) appbase.register_sighup(read_config, callback=_reload_stdlib_logging)
# make sure that the first thing we log is a thing we can grep backwards # make sure that the first thing we log is a thing we can grep backwards
@ -221,3 +236,5 @@ def setup_logging(hs, config, use_worker_options=False):
logging.warn("***** STARTING SERVER *****") logging.warn("***** STARTING SERVER *****")
logging.warn("Server %s version %s", sys.argv[0], get_version_string(synapse)) logging.warn("Server %s version %s", sys.argv[0], get_version_string(synapse))
logging.info("Server hostname: %s", config.server_name) logging.info("Server hostname: %s", config.server_name)
return logger

View File

@ -18,6 +18,7 @@ import os.path
import sys import sys
import typing import typing
import warnings import warnings
from typing import List
import attr import attr
from constantly import NamedConstant, Names, ValueConstant, Values from constantly import NamedConstant, Names, ValueConstant, Values
@ -33,7 +34,6 @@ from twisted.logger import (
LogLevelFilterPredicate, LogLevelFilterPredicate,
LogPublisher, LogPublisher,
eventAsText, eventAsText,
globalLogBeginner,
jsonFileLogObserver, jsonFileLogObserver,
) )
@ -134,7 +134,7 @@ class PythonStdlibToTwistedLogger(logging.Handler):
) )
def SynapseFileLogObserver(outFile: typing.io.TextIO) -> FileLogObserver: def SynapseFileLogObserver(outFile: typing.IO[str]) -> FileLogObserver:
""" """
A log observer that formats events like the traditional log formatter and A log observer that formats events like the traditional log formatter and
sends them to `outFile`. sends them to `outFile`.
@ -265,7 +265,7 @@ def setup_structured_logging(
hs, hs,
config, config,
log_config: dict, log_config: dict,
logBeginner: LogBeginner = globalLogBeginner, logBeginner: LogBeginner,
redirect_stdlib_logging: bool = True, redirect_stdlib_logging: bool = True,
) -> LogPublisher: ) -> LogPublisher:
""" """
@ -286,7 +286,7 @@ def setup_structured_logging(
if "drains" not in log_config: if "drains" not in log_config:
raise ConfigError("The logging configuration requires a list of drains.") raise ConfigError("The logging configuration requires a list of drains.")
observers = [] observers = [] # type: List[ILogObserver]
for observer in parse_drain_configs(log_config["drains"]): for observer in parse_drain_configs(log_config["drains"]):
# Pipe drains # Pipe drains

View File

@ -21,10 +21,11 @@ import sys
from collections import deque from collections import deque
from ipaddress import IPv4Address, IPv6Address, ip_address from ipaddress import IPv4Address, IPv6Address, ip_address
from math import floor from math import floor
from typing.io import TextIO from typing import IO
import attr import attr
from simplejson import dumps from simplejson import dumps
from zope.interface import implementer
from twisted.application.internet import ClientService from twisted.application.internet import ClientService
from twisted.internet.endpoints import ( from twisted.internet.endpoints import (
@ -33,7 +34,7 @@ from twisted.internet.endpoints import (
TCP6ClientEndpoint, TCP6ClientEndpoint,
) )
from twisted.internet.protocol import Factory, Protocol from twisted.internet.protocol import Factory, Protocol
from twisted.logger import FileLogObserver, Logger from twisted.logger import FileLogObserver, ILogObserver, Logger
from twisted.python.failure import Failure from twisted.python.failure import Failure
@ -129,7 +130,7 @@ def flatten_event(event: dict, metadata: dict, include_time: bool = False):
return new_event return new_event
def TerseJSONToConsoleLogObserver(outFile: TextIO, metadata: dict) -> FileLogObserver: def TerseJSONToConsoleLogObserver(outFile: IO[str], metadata: dict) -> FileLogObserver:
""" """
A log observer that formats events to a flattened JSON representation. A log observer that formats events to a flattened JSON representation.
@ -146,6 +147,7 @@ def TerseJSONToConsoleLogObserver(outFile: TextIO, metadata: dict) -> FileLogObs
@attr.s @attr.s
@implementer(ILogObserver)
class TerseJSONToTCPLogObserver(object): class TerseJSONToTCPLogObserver(object):
""" """
An IObserver that writes JSON logs to a TCP target. An IObserver that writes JSON logs to a TCP target.

View File

@ -223,8 +223,8 @@ try:
from jaeger_client import Config as JaegerConfig from jaeger_client import Config as JaegerConfig
from synapse.logging.scopecontextmanager import LogContextScopeManager from synapse.logging.scopecontextmanager import LogContextScopeManager
except ImportError: except ImportError:
JaegerConfig = None JaegerConfig = None # type: ignore
LogContextScopeManager = None LogContextScopeManager = None # type: ignore
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -20,6 +20,7 @@ import os
import platform import platform
import threading import threading
import time import time
from typing import Dict, Union
import six import six
@ -42,9 +43,7 @@ logger = logging.getLogger(__name__)
METRICS_PREFIX = "/_synapse/metrics" METRICS_PREFIX = "/_synapse/metrics"
running_on_pypy = platform.python_implementation() == "PyPy" running_on_pypy = platform.python_implementation() == "PyPy"
all_metrics = [] all_gauges = {} # type: Dict[str, Union[LaterGauge, InFlightGauge, BucketCollector]]
all_collectors = []
all_gauges = {}
HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat") HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat")

View File

@ -36,7 +36,9 @@ from twisted.web.resource import Resource
try: try:
from prometheus_client.samples import Sample from prometheus_client.samples import Sample
except ImportError: except ImportError:
Sample = namedtuple("Sample", ["name", "labels", "value", "timestamp", "exemplar"]) Sample = namedtuple(
"Sample", ["name", "labels", "value", "timestamp", "exemplar"]
) # type: ignore
CONTENT_TYPE_LATEST = str("text/plain; version=0.0.4; charset=utf-8") CONTENT_TYPE_LATEST = str("text/plain; version=0.0.4; charset=utf-8")

View File

@ -15,6 +15,7 @@
# limitations under the License. # limitations under the License.
import logging import logging
from typing import Set
from pkg_resources import ( from pkg_resources import (
DistributionNotFound, DistributionNotFound,
@ -97,7 +98,7 @@ CONDITIONAL_REQUIREMENTS = {
"jwt": ["pyjwt>=1.6.4"], "jwt": ["pyjwt>=1.6.4"],
} }
ALL_OPTIONAL_REQUIREMENTS = set() ALL_OPTIONAL_REQUIREMENTS = set() # type: Set[str]
for name, optional_deps in CONDITIONAL_REQUIREMENTS.items(): for name, optional_deps in CONDITIONAL_REQUIREMENTS.items():
# Exclude systemd as it's a system-based requirement. # Exclude systemd as it's a system-based requirement.
@ -174,8 +175,8 @@ def check_requirements(for_feature=None):
pass pass
if deps_needed: if deps_needed:
for e in errors: for err in errors:
logging.error(e) logging.error(err)
raise DependencyException(deps_needed) raise DependencyException(deps_needed)

View File

@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import logging
import os import os
import os.path import os.path
import shutil import shutil
@ -33,7 +34,20 @@ class FakeBeginner(object):
self.observers = observers self.observers = observers
class StructuredLoggingTestCase(HomeserverTestCase): class StructuredLoggingTestBase(object):
"""
Test base that registers a cleanup handler to reset the stdlib log handler
to 'unset'.
"""
def prepare(self, reactor, clock, hs):
def _cleanup():
logging.getLogger("synapse").setLevel(logging.NOTSET)
self.addCleanup(_cleanup)
class StructuredLoggingTestCase(StructuredLoggingTestBase, HomeserverTestCase):
""" """
Tests for Synapse's structured logging support. Tests for Synapse's structured logging support.
""" """
@ -139,7 +153,9 @@ class StructuredLoggingTestCase(HomeserverTestCase):
self.assertEqual(logs[0]["request"], "somereq") self.assertEqual(logs[0]["request"], "somereq")
class StructuredLoggingConfigurationFileTestCase(HomeserverTestCase): class StructuredLoggingConfigurationFileTestCase(
StructuredLoggingTestBase, HomeserverTestCase
):
def make_homeserver(self, reactor, clock): def make_homeserver(self, reactor, clock):
tempdir = self.mktemp() tempdir = self.mktemp()
@ -179,10 +195,11 @@ class StructuredLoggingConfigurationFileTestCase(HomeserverTestCase):
""" """
When a structured logging config is given, Synapse will use it. When a structured logging config is given, Synapse will use it.
""" """
setup_logging(self.hs, self.hs.config) beginner = FakeBeginner()
publisher = setup_logging(self.hs, self.hs.config, logBeginner=beginner)
# Make a logger and send an event # Make a logger and send an event
logger = Logger(namespace="tests.logging.test_structured") logger = Logger(namespace="tests.logging.test_structured", observer=publisher)
with LoggingContext("testcontext", request="somereq"): with LoggingContext("testcontext", request="somereq"):
logger.info("Hello there, {name}!", name="steve") logger.info("Hello there, {name}!", name="steve")

View File

@ -23,10 +23,10 @@ from synapse.logging._structured import setup_structured_logging
from tests.server import connect_client from tests.server import connect_client
from tests.unittest import HomeserverTestCase from tests.unittest import HomeserverTestCase
from .test_structured import FakeBeginner from .test_structured import FakeBeginner, StructuredLoggingTestBase
class TerseJSONTCPTestCase(HomeserverTestCase): class TerseJSONTCPTestCase(StructuredLoggingTestBase, HomeserverTestCase):
def test_log_output(self): def test_log_output(self):
""" """
The Terse JSON outputter delivers simplified structured logs over TCP. The Terse JSON outputter delivers simplified structured logs over TCP.

30
tox.ini
View File

@ -2,6 +2,7 @@
envlist = packaging, py35, py36, py37, check_codestyle, check_isort envlist = packaging, py35, py36, py37, check_codestyle, check_isort
[base] [base]
basepython = python3.7
deps = deps =
mock mock
python-subunit python-subunit
@ -137,18 +138,35 @@ commands = {toxinidir}/scripts-dev/generate_sample_config --check
skip_install = True skip_install = True
deps = deps =
coverage coverage
whitelist_externals =
bash
commands= commands=
coverage combine coverage combine
coverage report coverage report
[testenv:cov-erase]
skip_install = True
deps =
coverage
commands=
coverage erase
[testenv:cov-html]
skip_install = True
deps =
coverage
commands=
coverage html
[testenv:mypy] [testenv:mypy]
basepython = python3.5 basepython = python3.7
skip_install = True
deps = deps =
{[base]deps} {[base]deps}
mypy mypy
mypy-zope
typeshed
env =
MYPYPATH = stubs/
extras = all extras = all
commands = mypy --ignore-missing-imports \ commands = mypy --show-traceback \
synapse/logging/_structured.py \ synapse/logging/ \
synapse/logging/_terse_json.py synapse/config/