2018-03-04 06:50:13 +08:00
|
|
|
""" Access and control log capturing. """
|
2017-09-13 17:25:17 +08:00
|
|
|
from __future__ import absolute_import, division, print_function
|
|
|
|
|
|
|
|
import logging
|
|
|
|
from contextlib import closing, contextmanager
|
2018-01-23 07:57:56 +08:00
|
|
|
import re
|
2017-09-27 04:58:26 +08:00
|
|
|
import six
|
2017-09-13 17:25:17 +08:00
|
|
|
|
2018-01-23 07:57:56 +08:00
|
|
|
from _pytest.config import create_terminal_writer
|
2017-09-13 17:25:17 +08:00
|
|
|
import pytest
|
|
|
|
import py
|
|
|
|
|
|
|
|
|
|
|
|
DEFAULT_LOG_FORMAT = '%(filename)-25s %(lineno)4d %(levelname)-8s %(message)s'
|
|
|
|
DEFAULT_LOG_DATE_FORMAT = '%H:%M:%S'
|
|
|
|
|
|
|
|
|
2018-01-23 07:57:56 +08:00
|
|
|
class ColoredLevelFormatter(logging.Formatter):
|
|
|
|
"""
|
|
|
|
Colorize the %(levelname)..s part of the log format passed to __init__.
|
|
|
|
"""
|
|
|
|
|
|
|
|
LOGLEVEL_COLOROPTS = {
|
|
|
|
logging.CRITICAL: {'red'},
|
|
|
|
logging.ERROR: {'red', 'bold'},
|
|
|
|
logging.WARNING: {'yellow'},
|
|
|
|
logging.WARN: {'yellow'},
|
|
|
|
logging.INFO: {'green'},
|
|
|
|
logging.DEBUG: {'purple'},
|
|
|
|
logging.NOTSET: set(),
|
|
|
|
}
|
|
|
|
LEVELNAME_FMT_REGEX = re.compile(r'%\(levelname\)([+-]?\d*s)')
|
|
|
|
|
|
|
|
def __init__(self, terminalwriter, *args, **kwargs):
|
|
|
|
super(ColoredLevelFormatter, self).__init__(
|
|
|
|
*args, **kwargs)
|
|
|
|
if six.PY2:
|
|
|
|
self._original_fmt = self._fmt
|
|
|
|
else:
|
|
|
|
self._original_fmt = self._style._fmt
|
|
|
|
self._level_to_fmt_mapping = {}
|
|
|
|
|
|
|
|
levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt)
|
|
|
|
if not levelname_fmt_match:
|
|
|
|
return
|
|
|
|
levelname_fmt = levelname_fmt_match.group()
|
|
|
|
|
|
|
|
for level, color_opts in self.LOGLEVEL_COLOROPTS.items():
|
|
|
|
formatted_levelname = levelname_fmt % {
|
|
|
|
'levelname': logging.getLevelName(level)}
|
|
|
|
|
|
|
|
# add ANSI escape sequences around the formatted levelname
|
|
|
|
color_kwargs = {name: True for name in color_opts}
|
|
|
|
colorized_formatted_levelname = terminalwriter.markup(
|
|
|
|
formatted_levelname, **color_kwargs)
|
|
|
|
self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub(
|
|
|
|
colorized_formatted_levelname,
|
|
|
|
self._fmt)
|
|
|
|
|
|
|
|
def format(self, record):
|
|
|
|
fmt = self._level_to_fmt_mapping.get(
|
|
|
|
record.levelno, self._original_fmt)
|
|
|
|
if six.PY2:
|
|
|
|
self._fmt = fmt
|
|
|
|
else:
|
|
|
|
self._style._fmt = fmt
|
|
|
|
return super(ColoredLevelFormatter, self).format(record)
|
|
|
|
|
|
|
|
|
2017-09-27 04:58:26 +08:00
|
|
|
def get_option_ini(config, *names):
|
|
|
|
for name in names:
|
|
|
|
ret = config.getoption(name) # 'default' arg won't work as expected
|
|
|
|
if ret is None:
|
|
|
|
ret = config.getini(name)
|
|
|
|
if ret:
|
|
|
|
return ret
|
2017-09-13 17:25:17 +08:00
|
|
|
|
2017-09-28 02:22:44 +08:00
|
|
|
|
2017-09-13 17:25:17 +08:00
|
|
|
def pytest_addoption(parser):
|
|
|
|
"""Add options to control log capturing."""
|
2017-09-13 21:32:23 +08:00
|
|
|
group = parser.getgroup('logging')
|
|
|
|
|
2017-09-14 00:09:27 +08:00
|
|
|
def add_option_ini(option, dest, default=None, type=None, **kwargs):
|
|
|
|
parser.addini(dest, default=default, type=type,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='default value for ' + option)
|
|
|
|
group.addoption(option, dest=dest, **kwargs)
|
|
|
|
|
|
|
|
add_option_ini(
|
2017-09-13 17:25:17 +08:00
|
|
|
'--no-print-logs',
|
|
|
|
dest='log_print', action='store_const', const=False, default=True,
|
2017-09-14 00:09:27 +08:00
|
|
|
type='bool',
|
2017-09-13 21:32:23 +08:00
|
|
|
help='disable printing caught logs on failed tests.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-level',
|
|
|
|
dest='log_level', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='logging level used by the logging module')
|
|
|
|
add_option_ini(
|
2017-09-13 17:25:17 +08:00
|
|
|
'--log-format',
|
|
|
|
dest='log_format', default=DEFAULT_LOG_FORMAT,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log format as used by the logging module.')
|
|
|
|
add_option_ini(
|
2017-09-13 17:25:17 +08:00
|
|
|
'--log-date-format',
|
|
|
|
dest='log_date_format', default=DEFAULT_LOG_DATE_FORMAT,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log date format as used by the logging module.')
|
2018-01-16 05:02:34 +08:00
|
|
|
parser.addini(
|
|
|
|
'log_cli', default=False, type='bool',
|
|
|
|
help='enable log display during test run (also known as "live logging").')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-cli-level',
|
|
|
|
dest='log_cli_level', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='cli logging level.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-cli-format',
|
|
|
|
dest='log_cli_format', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log format as used by the logging module.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-cli-date-format',
|
|
|
|
dest='log_cli_date_format', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log date format as used by the logging module.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-file',
|
|
|
|
dest='log_file', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='path to a file when logging will be written to.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-file-level',
|
|
|
|
dest='log_file_level', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log file logging level.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-file-format',
|
|
|
|
dest='log_file_format', default=DEFAULT_LOG_FORMAT,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log format as used by the logging module.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-file-date-format',
|
|
|
|
dest='log_file_date_format', default=DEFAULT_LOG_DATE_FORMAT,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log date format as used by the logging module.')
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
|
|
|
|
@contextmanager
|
2018-01-17 06:00:27 +08:00
|
|
|
def catching_logs(handler, formatter=None, level=None):
|
2017-09-13 17:25:17 +08:00
|
|
|
"""Context manager that prepares the whole logging machinery properly."""
|
2017-12-14 04:05:32 +08:00
|
|
|
root_logger = logging.getLogger()
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
if formatter is not None:
|
|
|
|
handler.setFormatter(formatter)
|
2018-01-17 06:00:27 +08:00
|
|
|
if level is not None:
|
|
|
|
handler.setLevel(level)
|
2017-09-13 17:25:17 +08:00
|
|
|
|
2017-12-07 22:35:23 +08:00
|
|
|
# Adding the same handler twice would confuse logging system.
|
|
|
|
# Just don't do that.
|
2017-12-14 04:05:32 +08:00
|
|
|
add_new_handler = handler not in root_logger.handlers
|
2017-12-07 22:35:23 +08:00
|
|
|
|
|
|
|
if add_new_handler:
|
2017-12-14 04:05:32 +08:00
|
|
|
root_logger.addHandler(handler)
|
2018-01-17 06:00:27 +08:00
|
|
|
if level is not None:
|
|
|
|
orig_level = root_logger.level
|
2018-03-14 16:44:58 +08:00
|
|
|
root_logger.setLevel(min(orig_level, level))
|
2017-12-07 22:35:23 +08:00
|
|
|
try:
|
|
|
|
yield handler
|
|
|
|
finally:
|
2018-01-17 06:00:27 +08:00
|
|
|
if level is not None:
|
|
|
|
root_logger.setLevel(orig_level)
|
2017-12-07 22:35:23 +08:00
|
|
|
if add_new_handler:
|
2017-12-14 04:05:32 +08:00
|
|
|
root_logger.removeHandler(handler)
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
|
2017-09-14 05:56:35 +08:00
|
|
|
class LogCaptureHandler(logging.StreamHandler):
|
|
|
|
"""A logging handler that stores log records and the log text."""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
"""Creates a new log handler."""
|
|
|
|
logging.StreamHandler.__init__(self, py.io.TextIO())
|
|
|
|
self.records = []
|
|
|
|
|
|
|
|
def emit(self, record):
|
|
|
|
"""Keep the log records in a list in addition to the log text."""
|
|
|
|
self.records.append(record)
|
|
|
|
logging.StreamHandler.emit(self, record)
|
|
|
|
|
2018-03-13 06:11:45 +08:00
|
|
|
def reset(self):
|
|
|
|
self.records = []
|
|
|
|
self.stream = py.io.TextIO()
|
|
|
|
|
2017-09-14 05:56:35 +08:00
|
|
|
|
2017-09-13 17:25:17 +08:00
|
|
|
class LogCaptureFixture(object):
|
|
|
|
"""Provides access and control of log capturing."""
|
|
|
|
|
|
|
|
def __init__(self, item):
|
|
|
|
"""Creates a new funcarg."""
|
|
|
|
self._item = item
|
2018-01-17 06:32:59 +08:00
|
|
|
self._initial_log_levels = {} # type: Dict[str, int] # dict of log name -> log level
|
|
|
|
|
|
|
|
def _finalize(self):
|
|
|
|
"""Finalizes the fixture.
|
|
|
|
|
|
|
|
This restores the log levels changed by :meth:`set_level`.
|
|
|
|
"""
|
|
|
|
# restore log levels
|
|
|
|
for logger_name, level in self._initial_log_levels.items():
|
|
|
|
logger = logging.getLogger(logger_name)
|
|
|
|
logger.setLevel(level)
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
@property
|
|
|
|
def handler(self):
|
2018-03-13 06:11:45 +08:00
|
|
|
"""
|
|
|
|
:rtype: LogCaptureHandler
|
|
|
|
"""
|
2017-09-13 17:25:17 +08:00
|
|
|
return self._item.catch_log_handler
|
|
|
|
|
2018-01-25 04:59:08 +08:00
|
|
|
def get_records(self, when):
|
2018-01-15 22:01:01 +08:00
|
|
|
"""
|
2018-01-25 04:59:08 +08:00
|
|
|
Get the logging records for one of the possible test phases.
|
|
|
|
|
|
|
|
:param str when:
|
|
|
|
Which test phase to obtain the records from. Valid values are: "setup", "call" and "teardown".
|
|
|
|
|
|
|
|
:rtype: List[logging.LogRecord]
|
|
|
|
:return: the list of captured records at the given stage
|
|
|
|
|
|
|
|
.. versionadded:: 3.4
|
2018-01-15 22:01:01 +08:00
|
|
|
"""
|
2018-01-25 04:59:08 +08:00
|
|
|
handler = self._item.catch_log_handlers.get(when)
|
|
|
|
if handler:
|
|
|
|
return handler.records
|
|
|
|
else:
|
|
|
|
return []
|
2018-01-15 22:01:01 +08:00
|
|
|
|
2017-09-13 17:25:17 +08:00
|
|
|
@property
|
|
|
|
def text(self):
|
|
|
|
"""Returns the log text."""
|
|
|
|
return self.handler.stream.getvalue()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def records(self):
|
|
|
|
"""Returns the list of log records."""
|
|
|
|
return self.handler.records
|
|
|
|
|
|
|
|
@property
|
|
|
|
def record_tuples(self):
|
|
|
|
"""Returns a list of a striped down version of log records intended
|
|
|
|
for use in assertion comparison.
|
|
|
|
|
|
|
|
The format of the tuple is:
|
|
|
|
|
|
|
|
(logger_name, log_level, message)
|
|
|
|
"""
|
|
|
|
return [(r.name, r.levelno, r.getMessage()) for r in self.records]
|
|
|
|
|
|
|
|
def clear(self):
|
2018-03-13 06:11:45 +08:00
|
|
|
"""Reset the list of log records and the captured log text."""
|
|
|
|
self.handler.reset()
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
def set_level(self, level, logger=None):
|
2018-01-17 06:32:59 +08:00
|
|
|
"""Sets the level for capturing of logs. The level will be restored to its previous value at the end of
|
|
|
|
the test.
|
2017-09-13 17:25:17 +08:00
|
|
|
|
2018-01-17 06:00:27 +08:00
|
|
|
:param int level: the logger to level.
|
|
|
|
:param str logger: the logger to update the level. If not given, the root logger level is updated.
|
2018-01-17 06:32:59 +08:00
|
|
|
|
|
|
|
.. versionchanged:: 3.4
|
|
|
|
The levels of the loggers changed by this function will be restored to their initial values at the
|
|
|
|
end of the test.
|
2017-09-13 17:25:17 +08:00
|
|
|
"""
|
2018-01-17 06:32:59 +08:00
|
|
|
logger_name = logger
|
|
|
|
logger = logging.getLogger(logger_name)
|
2018-01-18 04:00:52 +08:00
|
|
|
# save the original log-level to restore it during teardown
|
2018-01-17 06:32:59 +08:00
|
|
|
self._initial_log_levels.setdefault(logger_name, logger.level)
|
2017-09-17 20:28:18 +08:00
|
|
|
logger.setLevel(level)
|
2017-09-13 17:25:17 +08:00
|
|
|
|
2017-09-17 20:11:35 +08:00
|
|
|
@contextmanager
|
2017-09-13 17:25:17 +08:00
|
|
|
def at_level(self, level, logger=None):
|
2018-01-17 06:32:59 +08:00
|
|
|
"""Context manager that sets the level for capturing of logs. After the end of the 'with' statement the
|
|
|
|
level is restored to its original value.
|
2017-09-13 17:25:17 +08:00
|
|
|
|
2018-01-17 06:32:59 +08:00
|
|
|
:param int level: the logger to level.
|
|
|
|
:param str logger: the logger to update the level. If not given, the root logger level is updated.
|
2017-09-13 17:25:17 +08:00
|
|
|
"""
|
2018-01-17 06:32:59 +08:00
|
|
|
logger = logging.getLogger(logger)
|
2017-09-17 20:11:35 +08:00
|
|
|
orig_level = logger.level
|
|
|
|
logger.setLevel(level)
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
finally:
|
|
|
|
logger.setLevel(orig_level)
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def caplog(request):
|
|
|
|
"""Access and control log capturing.
|
|
|
|
|
|
|
|
Captured logs are available through the following methods::
|
|
|
|
|
|
|
|
* caplog.text() -> string containing formatted log output
|
|
|
|
* caplog.records() -> list of logging.LogRecord instances
|
|
|
|
* caplog.record_tuples() -> list of (logger_name, level, message) tuples
|
2018-03-16 11:04:28 +08:00
|
|
|
* caplog.clear() -> clear captured records and formatted log output string
|
2017-09-13 17:25:17 +08:00
|
|
|
"""
|
2018-01-17 06:32:59 +08:00
|
|
|
result = LogCaptureFixture(request.node)
|
|
|
|
yield result
|
|
|
|
result._finalize()
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
|
2017-09-27 04:58:26 +08:00
|
|
|
def get_actual_log_level(config, *setting_names):
|
2017-09-13 17:25:17 +08:00
|
|
|
"""Return the actual logging level."""
|
2017-09-27 04:58:26 +08:00
|
|
|
|
|
|
|
for setting_name in setting_names:
|
|
|
|
log_level = config.getoption(setting_name)
|
|
|
|
if log_level is None:
|
|
|
|
log_level = config.getini(setting_name)
|
|
|
|
if log_level:
|
|
|
|
break
|
|
|
|
else:
|
2017-09-13 17:25:17 +08:00
|
|
|
return
|
2017-09-27 04:58:26 +08:00
|
|
|
|
|
|
|
if isinstance(log_level, six.string_types):
|
2017-09-13 17:25:17 +08:00
|
|
|
log_level = log_level.upper()
|
|
|
|
try:
|
|
|
|
return int(getattr(logging, log_level, log_level))
|
|
|
|
except ValueError:
|
|
|
|
# Python logging does not recognise this as a logging level
|
|
|
|
raise pytest.UsageError(
|
|
|
|
"'{0}' is not recognized as a logging level name for "
|
|
|
|
"'{1}'. Please consider passing the "
|
|
|
|
"logging level num instead.".format(
|
|
|
|
log_level,
|
|
|
|
setting_name))
|
|
|
|
|
|
|
|
|
|
|
|
def pytest_configure(config):
|
2018-01-16 05:02:34 +08:00
|
|
|
config.pluginmanager.register(LoggingPlugin(config), 'logging-plugin')
|
|
|
|
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def _dummy_context_manager():
|
|
|
|
yield
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
|
|
|
|
class LoggingPlugin(object):
|
|
|
|
"""Attaches to the logging module and captures log messages for each test.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, config):
|
|
|
|
"""Creates a new plugin to capture log messages.
|
|
|
|
|
|
|
|
The formatter can be safely shared across all handlers so
|
|
|
|
create a single one for the entire test session here.
|
|
|
|
"""
|
2018-01-17 06:47:27 +08:00
|
|
|
self._config = config
|
2018-01-18 04:38:30 +08:00
|
|
|
|
|
|
|
# enable verbose output automatically if live logging is enabled
|
2018-02-08 19:48:51 +08:00
|
|
|
if self._log_cli_enabled() and not config.getoption('verbose'):
|
2018-01-18 04:38:30 +08:00
|
|
|
# sanity check: terminal reporter should not have been loaded at this point
|
|
|
|
assert self._config.pluginmanager.get_plugin('terminalreporter') is None
|
|
|
|
config.option.verbose = 1
|
|
|
|
|
2017-09-14 00:09:27 +08:00
|
|
|
self.print_logs = get_option_ini(config, 'log_print')
|
2018-01-18 04:40:02 +08:00
|
|
|
self.formatter = logging.Formatter(get_option_ini(config, 'log_format'),
|
|
|
|
get_option_ini(config, 'log_date_format'))
|
2018-01-17 06:00:27 +08:00
|
|
|
self.log_level = get_actual_log_level(config, 'log_level')
|
2017-09-15 18:45:25 +08:00
|
|
|
|
2017-09-15 09:04:23 +08:00
|
|
|
log_file = get_option_ini(config, 'log_file')
|
|
|
|
if log_file:
|
2018-01-17 06:00:27 +08:00
|
|
|
self.log_file_level = get_actual_log_level(config, 'log_file_level')
|
2017-09-27 04:58:26 +08:00
|
|
|
|
2018-01-18 04:40:02 +08:00
|
|
|
log_file_format = get_option_ini(config, 'log_file_format', 'log_format')
|
|
|
|
log_file_date_format = get_option_ini(config, 'log_file_date_format', 'log_date_format')
|
|
|
|
# Each pytest runtests session will write to a clean logfile
|
|
|
|
self.log_file_handler = logging.FileHandler(log_file, mode='w')
|
|
|
|
log_file_formatter = logging.Formatter(log_file_format, datefmt=log_file_date_format)
|
2017-09-13 17:25:17 +08:00
|
|
|
self.log_file_handler.setFormatter(log_file_formatter)
|
|
|
|
else:
|
|
|
|
self.log_file_handler = None
|
|
|
|
|
2018-01-21 00:19:45 +08:00
|
|
|
# initialized during pytest_runtestloop
|
|
|
|
self.log_cli_handler = None
|
|
|
|
|
2018-02-08 19:48:51 +08:00
|
|
|
def _log_cli_enabled(self):
|
|
|
|
"""Return True if log_cli should be considered enabled, either explicitly
|
|
|
|
or because --log-cli-level was given in the command-line.
|
|
|
|
"""
|
|
|
|
return self._config.getoption('--log-cli-level') is not None or \
|
2018-02-08 23:22:32 +08:00
|
|
|
self._config.getini('log_cli')
|
2018-02-06 02:07:40 +08:00
|
|
|
|
2017-09-13 17:25:17 +08:00
|
|
|
@contextmanager
|
|
|
|
def _runtest_for(self, item, when):
|
|
|
|
"""Implements the internals of pytest_runtest_xxx() hook."""
|
|
|
|
with catching_logs(LogCaptureHandler(),
|
2018-01-17 06:00:27 +08:00
|
|
|
formatter=self.formatter, level=self.log_level) as log_handler:
|
2018-01-23 07:00:52 +08:00
|
|
|
if self.log_cli_handler:
|
2018-01-23 07:20:48 +08:00
|
|
|
self.log_cli_handler.set_when(when)
|
2018-01-31 21:48:55 +08:00
|
|
|
|
|
|
|
if item is None:
|
|
|
|
yield # run the test
|
|
|
|
return
|
|
|
|
|
2018-01-15 22:01:01 +08:00
|
|
|
if not hasattr(item, 'catch_log_handlers'):
|
|
|
|
item.catch_log_handlers = {}
|
|
|
|
item.catch_log_handlers[when] = log_handler
|
2017-09-13 17:25:17 +08:00
|
|
|
item.catch_log_handler = log_handler
|
|
|
|
try:
|
|
|
|
yield # run test
|
|
|
|
finally:
|
|
|
|
del item.catch_log_handler
|
2018-01-15 22:01:01 +08:00
|
|
|
if when == 'teardown':
|
|
|
|
del item.catch_log_handlers
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
if self.print_logs:
|
|
|
|
# Add a captured log section to the report.
|
|
|
|
log = log_handler.stream.getvalue().strip()
|
|
|
|
item.add_report_section(when, 'log', log)
|
|
|
|
|
2017-09-15 10:43:51 +08:00
|
|
|
@pytest.hookimpl(hookwrapper=True)
|
2017-09-13 17:25:17 +08:00
|
|
|
def pytest_runtest_setup(self, item):
|
|
|
|
with self._runtest_for(item, 'setup'):
|
|
|
|
yield
|
|
|
|
|
2017-09-15 10:43:51 +08:00
|
|
|
@pytest.hookimpl(hookwrapper=True)
|
2017-09-13 17:25:17 +08:00
|
|
|
def pytest_runtest_call(self, item):
|
|
|
|
with self._runtest_for(item, 'call'):
|
|
|
|
yield
|
|
|
|
|
2017-09-15 10:43:51 +08:00
|
|
|
@pytest.hookimpl(hookwrapper=True)
|
2017-09-13 17:25:17 +08:00
|
|
|
def pytest_runtest_teardown(self, item):
|
|
|
|
with self._runtest_for(item, 'teardown'):
|
|
|
|
yield
|
|
|
|
|
2018-01-31 21:48:55 +08:00
|
|
|
@pytest.hookimpl(hookwrapper=True)
|
2018-01-23 07:20:48 +08:00
|
|
|
def pytest_runtest_logstart(self):
|
|
|
|
if self.log_cli_handler:
|
|
|
|
self.log_cli_handler.reset()
|
2018-01-31 21:48:55 +08:00
|
|
|
with self._runtest_for(None, 'start'):
|
|
|
|
yield
|
|
|
|
|
|
|
|
@pytest.hookimpl(hookwrapper=True)
|
|
|
|
def pytest_runtest_logfinish(self):
|
|
|
|
with self._runtest_for(None, 'finish'):
|
|
|
|
yield
|
2018-01-23 07:20:48 +08:00
|
|
|
|
2017-09-15 10:43:51 +08:00
|
|
|
@pytest.hookimpl(hookwrapper=True)
|
2017-09-13 17:25:17 +08:00
|
|
|
def pytest_runtestloop(self, session):
|
|
|
|
"""Runs all collected test items."""
|
2018-01-17 06:47:27 +08:00
|
|
|
self._setup_cli_logging()
|
2018-01-16 05:02:34 +08:00
|
|
|
with self.live_logs_context:
|
2017-09-13 17:25:17 +08:00
|
|
|
if self.log_file_handler is not None:
|
2017-09-15 19:38:49 +08:00
|
|
|
with closing(self.log_file_handler):
|
|
|
|
with catching_logs(self.log_file_handler,
|
|
|
|
level=self.log_file_level):
|
|
|
|
yield # run all the tests
|
2017-09-13 17:25:17 +08:00
|
|
|
else:
|
|
|
|
yield # run all the tests
|
2018-01-17 06:47:27 +08:00
|
|
|
|
|
|
|
def _setup_cli_logging(self):
|
2018-01-24 05:02:32 +08:00
|
|
|
"""Sets up the handler and logger for the Live Logs feature, if enabled.
|
2018-01-17 06:47:27 +08:00
|
|
|
|
|
|
|
This must be done right before starting the loop so we can access the terminal reporter plugin.
|
|
|
|
"""
|
|
|
|
terminal_reporter = self._config.pluginmanager.get_plugin('terminalreporter')
|
2018-02-08 19:48:51 +08:00
|
|
|
if self._log_cli_enabled() and terminal_reporter is not None:
|
2018-01-19 07:10:35 +08:00
|
|
|
capture_manager = self._config.pluginmanager.get_plugin('capturemanager')
|
|
|
|
log_cli_handler = _LiveLoggingStreamHandler(terminal_reporter, capture_manager)
|
2018-01-18 04:40:02 +08:00
|
|
|
log_cli_format = get_option_ini(self._config, 'log_cli_format', 'log_format')
|
|
|
|
log_cli_date_format = get_option_ini(self._config, 'log_cli_date_format', 'log_date_format')
|
2018-01-23 07:57:56 +08:00
|
|
|
if self._config.option.color != 'no' and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search(log_cli_format):
|
|
|
|
log_cli_formatter = ColoredLevelFormatter(create_terminal_writer(self._config),
|
|
|
|
log_cli_format, datefmt=log_cli_date_format)
|
|
|
|
else:
|
|
|
|
log_cli_formatter = logging.Formatter(log_cli_format, datefmt=log_cli_date_format)
|
2018-01-17 06:47:27 +08:00
|
|
|
log_cli_level = get_actual_log_level(self._config, 'log_cli_level', 'log_level')
|
2018-01-19 07:10:35 +08:00
|
|
|
self.log_cli_handler = log_cli_handler
|
2018-01-18 04:40:02 +08:00
|
|
|
self.live_logs_context = catching_logs(log_cli_handler, formatter=log_cli_formatter, level=log_cli_level)
|
2018-01-17 06:47:27 +08:00
|
|
|
else:
|
|
|
|
self.live_logs_context = _dummy_context_manager()
|
2018-01-18 04:38:30 +08:00
|
|
|
|
|
|
|
|
|
|
|
class _LiveLoggingStreamHandler(logging.StreamHandler):
|
|
|
|
"""
|
|
|
|
Custom StreamHandler used by the live logging feature: it will write a newline before the first log message
|
|
|
|
in each test.
|
2018-01-19 07:10:35 +08:00
|
|
|
|
|
|
|
During live logging we must also explicitly disable stdout/stderr capturing otherwise it will get captured
|
|
|
|
and won't appear in the terminal.
|
2018-01-18 04:38:30 +08:00
|
|
|
"""
|
|
|
|
|
2018-01-19 07:10:35 +08:00
|
|
|
def __init__(self, terminal_reporter, capture_manager):
|
|
|
|
"""
|
|
|
|
:param _pytest.terminal.TerminalReporter terminal_reporter:
|
|
|
|
:param _pytest.capture.CaptureManager capture_manager:
|
|
|
|
"""
|
|
|
|
logging.StreamHandler.__init__(self, stream=terminal_reporter)
|
|
|
|
self.capture_manager = capture_manager
|
2018-01-23 07:20:48 +08:00
|
|
|
self.reset()
|
|
|
|
self.set_when(None)
|
2018-02-09 07:23:12 +08:00
|
|
|
self._test_outcome_written = False
|
2018-01-19 07:10:35 +08:00
|
|
|
|
2018-01-23 07:20:48 +08:00
|
|
|
def reset(self):
|
|
|
|
"""Reset the handler; should be called before the start of each test"""
|
|
|
|
self._first_record_emitted = False
|
2018-01-23 07:00:52 +08:00
|
|
|
|
2018-01-23 07:20:48 +08:00
|
|
|
def set_when(self, when):
|
|
|
|
"""Prepares for the given test phase (setup/call/teardown)"""
|
2018-01-23 07:00:52 +08:00
|
|
|
self._when = when
|
|
|
|
self._section_name_shown = False
|
2018-02-09 07:23:12 +08:00
|
|
|
if when == 'start':
|
|
|
|
self._test_outcome_written = False
|
2018-01-19 07:10:35 +08:00
|
|
|
|
2018-01-18 04:38:30 +08:00
|
|
|
def emit(self, record):
|
2018-01-19 07:10:35 +08:00
|
|
|
if self.capture_manager is not None:
|
|
|
|
self.capture_manager.suspend_global_capture()
|
|
|
|
try:
|
2018-02-09 07:23:12 +08:00
|
|
|
if not self._first_record_emitted:
|
2018-01-19 07:10:35 +08:00
|
|
|
self.stream.write('\n')
|
|
|
|
self._first_record_emitted = True
|
2018-02-09 07:23:12 +08:00
|
|
|
elif self._when in ('teardown', 'finish'):
|
|
|
|
if not self._test_outcome_written:
|
|
|
|
self._test_outcome_written = True
|
|
|
|
self.stream.write('\n')
|
2018-01-31 22:53:58 +08:00
|
|
|
if not self._section_name_shown and self._when:
|
2018-01-23 07:43:35 +08:00
|
|
|
self.stream.section('live log ' + self._when, sep='-', bold=True)
|
2018-01-23 07:00:52 +08:00
|
|
|
self._section_name_shown = True
|
2018-01-19 07:10:35 +08:00
|
|
|
logging.StreamHandler.emit(self, record)
|
|
|
|
finally:
|
|
|
|
if self.capture_manager is not None:
|
|
|
|
self.capture_manager.resume_global_capture()
|