Fix for structured logging tests stomping on logs (#6023)

This commit is contained in:
Amber Brown 2019-09-13 02:29:55 +10:00 committed by GitHub
parent 3d882a7ba5
commit b617864cd9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 154 additions and 41 deletions

View file

@ -38,14 +38,16 @@ exclude sytest-blacklist
include pyproject.toml
recursive-include changelog.d *
prune .buildkite
prune .circleci
prune .codecov.yml
prune .coveragerc
prune .github
prune debian
prune demo/etc
prune docker
prune .circleci
prune .coveragerc
prune debian
prune .codecov.yml
prune .buildkite
prune mypy.ini
prune stubs
exclude jenkins*
recursive-exclude jenkins *.sh

1
changelog.d/6023.misc Normal file
View file

@ -0,0 +1 @@
Fix the structured logging tests stomping on the global log configuration for subsequent tests.

54
mypy.ini Normal file
View file

@ -0,0 +1,54 @@
[mypy]
namespace_packages=True
plugins=mypy_zope:plugin
follow_imports=skip
mypy_path=stubs
[mypy-synapse.config.homeserver]
# this is a mess because of the metaclass shenanigans
ignore_errors = True
[mypy-zope]
ignore_missing_imports = True
[mypy-constantly]
ignore_missing_imports = True
[mypy-twisted.*]
ignore_missing_imports = True
[mypy-treq.*]
ignore_missing_imports = True
[mypy-hyperlink]
ignore_missing_imports = True
[mypy-h11]
ignore_missing_imports = True
[mypy-opentracing]
ignore_missing_imports = True
[mypy-OpenSSL]
ignore_missing_imports = True
[mypy-netaddr]
ignore_missing_imports = True
[mypy-saml2.*]
ignore_missing_imports = True
[mypy-unpaddedbase64]
ignore_missing_imports = True
[mypy-canonicaljson]
ignore_missing_imports = True
[mypy-jaeger_client]
ignore_missing_imports = True
[mypy-jsonschema]
ignore_missing_imports = True
[mypy-signedjson.*]
ignore_missing_imports = True

View file

@ -21,7 +21,12 @@ from string import Template
import yaml
from twisted.logger import STDLibLogObserver, globalLogBeginner
from twisted.logger import (
ILogObserver,
LogBeginner,
STDLibLogObserver,
globalLogBeginner,
)
import synapse
from synapse.app import _base as appbase
@ -124,7 +129,7 @@ class LoggingConfig(Config):
log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file))
def _setup_stdlib_logging(config, log_config):
def _setup_stdlib_logging(config, log_config, logBeginner: LogBeginner):
"""
Set up Python stdlib logging.
"""
@ -165,12 +170,12 @@ def _setup_stdlib_logging(config, log_config):
return observer(event)
globalLogBeginner.beginLoggingTo(
[_log], redirectStandardIO=not config.no_redirect_stdio
)
logBeginner.beginLoggingTo([_log], redirectStandardIO=not config.no_redirect_stdio)
if not config.no_redirect_stdio:
print("Redirected stdout/stderr to logs")
return observer
def _reload_stdlib_logging(*args, log_config=None):
logger = logging.getLogger("")
@ -181,7 +186,9 @@ def _reload_stdlib_logging(*args, log_config=None):
logging.config.dictConfig(log_config)
def setup_logging(hs, config, use_worker_options=False):
def setup_logging(
hs, config, use_worker_options=False, logBeginner: LogBeginner = globalLogBeginner
) -> ILogObserver:
"""
Set up the logging subsystem.
@ -191,6 +198,12 @@ def setup_logging(hs, config, use_worker_options=False):
use_worker_options (bool): True to use the 'worker_log_config' option
instead of 'log_config'.
logBeginner: The Twisted logBeginner to use.
Returns:
The "root" Twisted Logger observer, suitable for sending logs to from a
Logger instance.
"""
log_config = config.worker_log_config if use_worker_options else config.log_config
@ -210,10 +223,12 @@ def setup_logging(hs, config, use_worker_options=False):
log_config_body = read_config()
if log_config_body and log_config_body.get("structured") is True:
setup_structured_logging(hs, config, log_config_body)
logger = setup_structured_logging(
hs, config, log_config_body, logBeginner=logBeginner
)
appbase.register_sighup(read_config, callback=reload_structured_logging)
else:
_setup_stdlib_logging(config, log_config_body)
logger = _setup_stdlib_logging(config, log_config_body, logBeginner=logBeginner)
appbase.register_sighup(read_config, callback=_reload_stdlib_logging)
# make sure that the first thing we log is a thing we can grep backwards
@ -221,3 +236,5 @@ def setup_logging(hs, config, use_worker_options=False):
logging.warn("***** STARTING SERVER *****")
logging.warn("Server %s version %s", sys.argv[0], get_version_string(synapse))
logging.info("Server hostname: %s", config.server_name)
return logger

View file

@ -18,6 +18,7 @@ import os.path
import sys
import typing
import warnings
from typing import List
import attr
from constantly import NamedConstant, Names, ValueConstant, Values
@ -33,7 +34,6 @@ from twisted.logger import (
LogLevelFilterPredicate,
LogPublisher,
eventAsText,
globalLogBeginner,
jsonFileLogObserver,
)
@ -134,7 +134,7 @@ class PythonStdlibToTwistedLogger(logging.Handler):
)
def SynapseFileLogObserver(outFile: typing.io.TextIO) -> FileLogObserver:
def SynapseFileLogObserver(outFile: typing.IO[str]) -> FileLogObserver:
"""
A log observer that formats events like the traditional log formatter and
sends them to `outFile`.
@ -265,7 +265,7 @@ def setup_structured_logging(
hs,
config,
log_config: dict,
logBeginner: LogBeginner = globalLogBeginner,
logBeginner: LogBeginner,
redirect_stdlib_logging: bool = True,
) -> LogPublisher:
"""
@ -286,7 +286,7 @@ def setup_structured_logging(
if "drains" not in log_config:
raise ConfigError("The logging configuration requires a list of drains.")
observers = []
observers = [] # type: List[ILogObserver]
for observer in parse_drain_configs(log_config["drains"]):
# Pipe drains

View file

@ -21,10 +21,11 @@ import sys
from collections import deque
from ipaddress import IPv4Address, IPv6Address, ip_address
from math import floor
from typing.io import TextIO
from typing import IO
import attr
from simplejson import dumps
from zope.interface import implementer
from twisted.application.internet import ClientService
from twisted.internet.endpoints import (
@ -33,7 +34,7 @@ from twisted.internet.endpoints import (
TCP6ClientEndpoint,
)
from twisted.internet.protocol import Factory, Protocol
from twisted.logger import FileLogObserver, Logger
from twisted.logger import FileLogObserver, ILogObserver, Logger
from twisted.python.failure import Failure
@ -129,7 +130,7 @@ def flatten_event(event: dict, metadata: dict, include_time: bool = False):
return new_event
def TerseJSONToConsoleLogObserver(outFile: TextIO, metadata: dict) -> FileLogObserver:
def TerseJSONToConsoleLogObserver(outFile: IO[str], metadata: dict) -> FileLogObserver:
"""
A log observer that formats events to a flattened JSON representation.
@ -146,6 +147,7 @@ def TerseJSONToConsoleLogObserver(outFile: TextIO, metadata: dict) -> FileLogObs
@attr.s
@implementer(ILogObserver)
class TerseJSONToTCPLogObserver(object):
"""
An IObserver that writes JSON logs to a TCP target.

View file

@ -223,8 +223,8 @@ try:
from jaeger_client import Config as JaegerConfig
from synapse.logging.scopecontextmanager import LogContextScopeManager
except ImportError:
JaegerConfig = None
LogContextScopeManager = None
JaegerConfig = None # type: ignore
LogContextScopeManager = None # type: ignore
logger = logging.getLogger(__name__)

View file

@ -20,6 +20,7 @@ import os
import platform
import threading
import time
from typing import Dict, Union
import six
@ -42,9 +43,7 @@ logger = logging.getLogger(__name__)
METRICS_PREFIX = "/_synapse/metrics"
running_on_pypy = platform.python_implementation() == "PyPy"
all_metrics = []
all_collectors = []
all_gauges = {}
all_gauges = {} # type: Dict[str, Union[LaterGauge, InFlightGauge, BucketCollector]]
HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat")

View file

@ -36,7 +36,9 @@ from twisted.web.resource import Resource
try:
from prometheus_client.samples import Sample
except ImportError:
Sample = namedtuple("Sample", ["name", "labels", "value", "timestamp", "exemplar"])
Sample = namedtuple(
"Sample", ["name", "labels", "value", "timestamp", "exemplar"]
) # type: ignore
CONTENT_TYPE_LATEST = str("text/plain; version=0.0.4; charset=utf-8")

View file

@ -15,6 +15,7 @@
# limitations under the License.
import logging
from typing import Set
from pkg_resources import (
DistributionNotFound,
@ -97,7 +98,7 @@ CONDITIONAL_REQUIREMENTS = {
"jwt": ["pyjwt>=1.6.4"],
}
ALL_OPTIONAL_REQUIREMENTS = set()
ALL_OPTIONAL_REQUIREMENTS = set() # type: Set[str]
for name, optional_deps in CONDITIONAL_REQUIREMENTS.items():
# Exclude systemd as it's a system-based requirement.
@ -174,8 +175,8 @@ def check_requirements(for_feature=None):
pass
if deps_needed:
for e in errors:
logging.error(e)
for err in errors:
logging.error(err)
raise DependencyException(deps_needed)

View file

@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import os.path
import shutil
@ -33,7 +34,20 @@ class FakeBeginner(object):
self.observers = observers
class StructuredLoggingTestCase(HomeserverTestCase):
class StructuredLoggingTestBase(object):
"""
Test base that registers a cleanup handler to reset the stdlib log handler
to 'unset'.
"""
def prepare(self, reactor, clock, hs):
def _cleanup():
logging.getLogger("synapse").setLevel(logging.NOTSET)
self.addCleanup(_cleanup)
class StructuredLoggingTestCase(StructuredLoggingTestBase, HomeserverTestCase):
"""
Tests for Synapse's structured logging support.
"""
@ -139,7 +153,9 @@ class StructuredLoggingTestCase(HomeserverTestCase):
self.assertEqual(logs[0]["request"], "somereq")
class StructuredLoggingConfigurationFileTestCase(HomeserverTestCase):
class StructuredLoggingConfigurationFileTestCase(
StructuredLoggingTestBase, HomeserverTestCase
):
def make_homeserver(self, reactor, clock):
tempdir = self.mktemp()
@ -179,10 +195,11 @@ class StructuredLoggingConfigurationFileTestCase(HomeserverTestCase):
"""
When a structured logging config is given, Synapse will use it.
"""
setup_logging(self.hs, self.hs.config)
beginner = FakeBeginner()
publisher = setup_logging(self.hs, self.hs.config, logBeginner=beginner)
# Make a logger and send an event
logger = Logger(namespace="tests.logging.test_structured")
logger = Logger(namespace="tests.logging.test_structured", observer=publisher)
with LoggingContext("testcontext", request="somereq"):
logger.info("Hello there, {name}!", name="steve")

View file

@ -23,10 +23,10 @@ from synapse.logging._structured import setup_structured_logging
from tests.server import connect_client
from tests.unittest import HomeserverTestCase
from .test_structured import FakeBeginner
from .test_structured import FakeBeginner, StructuredLoggingTestBase
class TerseJSONTCPTestCase(HomeserverTestCase):
class TerseJSONTCPTestCase(StructuredLoggingTestBase, HomeserverTestCase):
def test_log_output(self):
"""
The Terse JSON outputter delivers simplified structured logs over TCP.

30
tox.ini
View file

@ -2,6 +2,7 @@
envlist = packaging, py35, py36, py37, check_codestyle, check_isort
[base]
basepython = python3.7
deps =
mock
python-subunit
@ -137,18 +138,35 @@ commands = {toxinidir}/scripts-dev/generate_sample_config --check
skip_install = True
deps =
coverage
whitelist_externals =
bash
commands=
coverage combine
coverage report
[testenv:cov-erase]
skip_install = True
deps =
coverage
commands=
coverage erase
[testenv:cov-html]
skip_install = True
deps =
coverage
commands=
coverage html
[testenv:mypy]
basepython = python3.5
basepython = python3.7
skip_install = True
deps =
{[base]deps}
mypy
mypy-zope
typeshed
env =
MYPYPATH = stubs/
extras = all
commands = mypy --ignore-missing-imports \
synapse/logging/_structured.py \
synapse/logging/_terse_json.py
commands = mypy --show-traceback \
synapse/logging/ \
synapse/config/