2017-09-13 17:25:17 +08:00
|
|
|
from __future__ import absolute_import, division, print_function
|
|
|
|
|
|
|
|
import logging
|
|
|
|
from contextlib import closing, contextmanager
|
|
|
|
import sys
|
2017-09-27 04:58:26 +08:00
|
|
|
import six
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
import pytest
|
|
|
|
import py
|
|
|
|
|
|
|
|
|
|
|
|
DEFAULT_LOG_FORMAT = '%(filename)-25s %(lineno)4d %(levelname)-8s %(message)s'
|
|
|
|
DEFAULT_LOG_DATE_FORMAT = '%H:%M:%S'
|
|
|
|
|
|
|
|
|
2017-09-27 04:58:26 +08:00
|
|
|
def get_option_ini(config, *names):
|
|
|
|
for name in names:
|
|
|
|
ret = config.getoption(name) # 'default' arg won't work as expected
|
|
|
|
if ret is None:
|
|
|
|
ret = config.getini(name)
|
|
|
|
if ret:
|
|
|
|
return ret
|
2017-09-13 17:25:17 +08:00
|
|
|
|
2017-09-28 02:22:44 +08:00
|
|
|
|
2017-09-13 17:25:17 +08:00
|
|
|
def pytest_addoption(parser):
|
|
|
|
"""Add options to control log capturing."""
|
2017-09-13 21:32:23 +08:00
|
|
|
group = parser.getgroup('logging')
|
|
|
|
|
2017-09-14 00:09:27 +08:00
|
|
|
def add_option_ini(option, dest, default=None, type=None, **kwargs):
|
|
|
|
parser.addini(dest, default=default, type=type,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='default value for ' + option)
|
|
|
|
group.addoption(option, dest=dest, **kwargs)
|
|
|
|
|
|
|
|
add_option_ini(
|
2017-09-13 17:25:17 +08:00
|
|
|
'--no-print-logs',
|
|
|
|
dest='log_print', action='store_const', const=False, default=True,
|
2017-09-14 00:09:27 +08:00
|
|
|
type='bool',
|
2017-09-13 21:32:23 +08:00
|
|
|
help='disable printing caught logs on failed tests.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-level',
|
|
|
|
dest='log_level', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='logging level used by the logging module')
|
|
|
|
add_option_ini(
|
2017-09-13 17:25:17 +08:00
|
|
|
'--log-format',
|
|
|
|
dest='log_format', default=DEFAULT_LOG_FORMAT,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log format as used by the logging module.')
|
|
|
|
add_option_ini(
|
2017-09-13 17:25:17 +08:00
|
|
|
'--log-date-format',
|
|
|
|
dest='log_date_format', default=DEFAULT_LOG_DATE_FORMAT,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log date format as used by the logging module.')
|
2018-01-16 05:02:34 +08:00
|
|
|
parser.addini(
|
|
|
|
'log_cli', default=False, type='bool',
|
|
|
|
help='enable log display during test run (also known as "live logging").')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-cli-level',
|
|
|
|
dest='log_cli_level', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='cli logging level.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-cli-format',
|
|
|
|
dest='log_cli_format', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log format as used by the logging module.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-cli-date-format',
|
|
|
|
dest='log_cli_date_format', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log date format as used by the logging module.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-file',
|
|
|
|
dest='log_file', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='path to a file when logging will be written to.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-file-level',
|
|
|
|
dest='log_file_level', default=None,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log file logging level.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-file-format',
|
|
|
|
dest='log_file_format', default=DEFAULT_LOG_FORMAT,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log format as used by the logging module.')
|
2017-09-13 17:25:17 +08:00
|
|
|
add_option_ini(
|
|
|
|
'--log-file-date-format',
|
|
|
|
dest='log_file_date_format', default=DEFAULT_LOG_DATE_FORMAT,
|
2017-09-13 21:32:23 +08:00
|
|
|
help='log date format as used by the logging module.')
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
|
|
|
|
@contextmanager
|
2018-01-17 06:00:27 +08:00
|
|
|
def catching_logs(handler, formatter=None, level=None):
|
2017-09-13 17:25:17 +08:00
|
|
|
"""Context manager that prepares the whole logging machinery properly."""
|
2017-12-14 04:05:32 +08:00
|
|
|
root_logger = logging.getLogger()
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
if formatter is not None:
|
|
|
|
handler.setFormatter(formatter)
|
2018-01-17 06:00:27 +08:00
|
|
|
if level is not None:
|
|
|
|
handler.setLevel(level)
|
2017-09-13 17:25:17 +08:00
|
|
|
|
2017-12-07 22:35:23 +08:00
|
|
|
# Adding the same handler twice would confuse logging system.
|
|
|
|
# Just don't do that.
|
2017-12-14 04:05:32 +08:00
|
|
|
add_new_handler = handler not in root_logger.handlers
|
2017-12-07 22:35:23 +08:00
|
|
|
|
|
|
|
if add_new_handler:
|
2017-12-14 04:05:32 +08:00
|
|
|
root_logger.addHandler(handler)
|
2018-01-17 06:00:27 +08:00
|
|
|
if level is not None:
|
|
|
|
orig_level = root_logger.level
|
|
|
|
root_logger.setLevel(level)
|
2017-12-07 22:35:23 +08:00
|
|
|
try:
|
|
|
|
yield handler
|
|
|
|
finally:
|
2018-01-17 06:00:27 +08:00
|
|
|
if level is not None:
|
|
|
|
root_logger.setLevel(orig_level)
|
2017-12-07 22:35:23 +08:00
|
|
|
if add_new_handler:
|
2017-12-14 04:05:32 +08:00
|
|
|
root_logger.removeHandler(handler)
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
|
2017-09-14 05:56:35 +08:00
|
|
|
class LogCaptureHandler(logging.StreamHandler):
|
|
|
|
"""A logging handler that stores log records and the log text."""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
"""Creates a new log handler."""
|
|
|
|
logging.StreamHandler.__init__(self, py.io.TextIO())
|
|
|
|
self.records = []
|
|
|
|
|
|
|
|
def emit(self, record):
|
|
|
|
"""Keep the log records in a list in addition to the log text."""
|
|
|
|
self.records.append(record)
|
|
|
|
logging.StreamHandler.emit(self, record)
|
|
|
|
|
|
|
|
|
2017-09-13 17:25:17 +08:00
|
|
|
class LogCaptureFixture(object):
|
|
|
|
"""Provides access and control of log capturing."""
|
|
|
|
|
|
|
|
def __init__(self, item):
|
|
|
|
"""Creates a new funcarg."""
|
|
|
|
self._item = item
|
|
|
|
|
|
|
|
@property
|
|
|
|
def handler(self):
|
|
|
|
return self._item.catch_log_handler
|
|
|
|
|
2018-01-15 22:01:01 +08:00
|
|
|
def get_handler(self, when):
|
|
|
|
"""
|
|
|
|
Get the handler for a specified state of the tests.
|
|
|
|
Valid values for the when parameter are: 'setup', 'call' and 'teardown'.
|
|
|
|
"""
|
|
|
|
return self._item.catch_log_handlers.get(when)
|
|
|
|
|
2017-09-13 17:25:17 +08:00
|
|
|
@property
|
|
|
|
def text(self):
|
|
|
|
"""Returns the log text."""
|
|
|
|
return self.handler.stream.getvalue()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def records(self):
|
|
|
|
"""Returns the list of log records."""
|
|
|
|
return self.handler.records
|
|
|
|
|
|
|
|
@property
|
|
|
|
def record_tuples(self):
|
|
|
|
"""Returns a list of a striped down version of log records intended
|
|
|
|
for use in assertion comparison.
|
|
|
|
|
|
|
|
The format of the tuple is:
|
|
|
|
|
|
|
|
(logger_name, log_level, message)
|
|
|
|
"""
|
|
|
|
return [(r.name, r.levelno, r.getMessage()) for r in self.records]
|
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
"""Reset the list of log records."""
|
|
|
|
self.handler.records = []
|
|
|
|
|
|
|
|
def set_level(self, level, logger=None):
|
|
|
|
"""Sets the level for capturing of logs.
|
|
|
|
|
2018-01-17 06:00:27 +08:00
|
|
|
:param int level: the logger to level.
|
|
|
|
:param str logger: the logger to update the level. If not given, the root logger level is updated.
|
2017-09-13 17:25:17 +08:00
|
|
|
"""
|
2018-01-17 06:00:27 +08:00
|
|
|
logger = logging.getLogger(logger)
|
2017-09-17 20:28:18 +08:00
|
|
|
logger.setLevel(level)
|
2017-09-13 17:25:17 +08:00
|
|
|
|
2017-09-17 20:11:35 +08:00
|
|
|
@contextmanager
|
2017-09-13 17:25:17 +08:00
|
|
|
def at_level(self, level, logger=None):
|
|
|
|
"""Context manager that sets the level for capturing of logs.
|
|
|
|
|
|
|
|
By default, the level is set on the handler used to capture
|
|
|
|
logs. Specify a logger name to instead set the level of any
|
|
|
|
logger.
|
|
|
|
"""
|
2017-09-17 20:11:35 +08:00
|
|
|
if logger is None:
|
|
|
|
logger = self.handler
|
|
|
|
else:
|
|
|
|
logger = logging.getLogger(logger)
|
2017-09-13 17:25:17 +08:00
|
|
|
|
2017-09-17 20:11:35 +08:00
|
|
|
orig_level = logger.level
|
|
|
|
logger.setLevel(level)
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
finally:
|
|
|
|
logger.setLevel(orig_level)
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def caplog(request):
|
|
|
|
"""Access and control log capturing.
|
|
|
|
|
|
|
|
Captured logs are available through the following methods::
|
|
|
|
|
|
|
|
* caplog.text() -> string containing formatted log output
|
|
|
|
* caplog.records() -> list of logging.LogRecord instances
|
|
|
|
* caplog.record_tuples() -> list of (logger_name, level, message) tuples
|
|
|
|
"""
|
2017-09-17 20:28:18 +08:00
|
|
|
return LogCaptureFixture(request.node)
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
|
2017-09-27 04:58:26 +08:00
|
|
|
def get_actual_log_level(config, *setting_names):
|
2017-09-13 17:25:17 +08:00
|
|
|
"""Return the actual logging level."""
|
2017-09-27 04:58:26 +08:00
|
|
|
|
|
|
|
for setting_name in setting_names:
|
|
|
|
log_level = config.getoption(setting_name)
|
|
|
|
if log_level is None:
|
|
|
|
log_level = config.getini(setting_name)
|
|
|
|
if log_level:
|
|
|
|
break
|
|
|
|
else:
|
2017-09-13 17:25:17 +08:00
|
|
|
return
|
2017-09-27 04:58:26 +08:00
|
|
|
|
|
|
|
if isinstance(log_level, six.string_types):
|
2017-09-13 17:25:17 +08:00
|
|
|
log_level = log_level.upper()
|
|
|
|
try:
|
|
|
|
return int(getattr(logging, log_level, log_level))
|
|
|
|
except ValueError:
|
|
|
|
# Python logging does not recognise this as a logging level
|
|
|
|
raise pytest.UsageError(
|
|
|
|
"'{0}' is not recognized as a logging level name for "
|
|
|
|
"'{1}'. Please consider passing the "
|
|
|
|
"logging level num instead.".format(
|
|
|
|
log_level,
|
|
|
|
setting_name))
|
|
|
|
|
|
|
|
|
|
|
|
def pytest_configure(config):
|
2018-01-16 05:02:34 +08:00
|
|
|
config.pluginmanager.register(LoggingPlugin(config), 'logging-plugin')
|
|
|
|
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def _dummy_context_manager():
|
|
|
|
yield
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
|
|
|
|
class LoggingPlugin(object):
|
|
|
|
"""Attaches to the logging module and captures log messages for each test.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, config):
|
|
|
|
"""Creates a new plugin to capture log messages.
|
|
|
|
|
|
|
|
The formatter can be safely shared across all handlers so
|
|
|
|
create a single one for the entire test session here.
|
|
|
|
"""
|
2017-09-14 00:09:27 +08:00
|
|
|
self.print_logs = get_option_ini(config, 'log_print')
|
2017-09-13 17:25:17 +08:00
|
|
|
self.formatter = logging.Formatter(
|
2017-10-12 07:48:39 +08:00
|
|
|
get_option_ini(config, 'log_format'),
|
|
|
|
get_option_ini(config, 'log_date_format'))
|
2018-01-17 06:00:27 +08:00
|
|
|
self.log_level = get_actual_log_level(config, 'log_level')
|
2017-09-15 18:45:25 +08:00
|
|
|
|
2018-01-16 05:02:34 +08:00
|
|
|
if config.getini('log_cli'):
|
|
|
|
log_cli_handler = logging.StreamHandler(sys.stderr)
|
|
|
|
log_cli_format = get_option_ini(
|
|
|
|
config, 'log_cli_format', 'log_format')
|
|
|
|
log_cli_date_format = get_option_ini(
|
|
|
|
config, 'log_cli_date_format', 'log_date_format')
|
|
|
|
log_cli_formatter = logging.Formatter(
|
|
|
|
log_cli_format,
|
|
|
|
datefmt=log_cli_date_format)
|
2018-01-17 06:00:27 +08:00
|
|
|
log_cli_level = get_actual_log_level(config, 'log_cli_level', 'log_level')
|
2018-01-16 05:02:34 +08:00
|
|
|
self.log_cli_handler = log_cli_handler # needed for a single unittest
|
|
|
|
self.live_logs_context = catching_logs(log_cli_handler,
|
|
|
|
formatter=log_cli_formatter,
|
|
|
|
level=log_cli_level)
|
|
|
|
else:
|
|
|
|
self.log_cli_handler = None
|
|
|
|
self.live_logs_context = _dummy_context_manager()
|
2017-09-15 09:04:23 +08:00
|
|
|
|
|
|
|
log_file = get_option_ini(config, 'log_file')
|
|
|
|
if log_file:
|
2018-01-17 06:00:27 +08:00
|
|
|
self.log_file_level = get_actual_log_level(config, 'log_file_level')
|
2017-09-27 04:58:26 +08:00
|
|
|
|
|
|
|
log_file_format = get_option_ini(
|
|
|
|
config, 'log_file_format', 'log_format')
|
|
|
|
log_file_date_format = get_option_ini(
|
|
|
|
config, 'log_file_date_format', 'log_date_format')
|
2017-09-13 17:25:17 +08:00
|
|
|
self.log_file_handler = logging.FileHandler(
|
2017-09-15 09:04:23 +08:00
|
|
|
log_file,
|
2017-09-13 17:25:17 +08:00
|
|
|
# Each pytest runtests session will write to a clean logfile
|
2017-10-12 07:48:39 +08:00
|
|
|
mode='w')
|
2017-09-13 17:25:17 +08:00
|
|
|
log_file_formatter = logging.Formatter(
|
2017-10-12 07:48:39 +08:00
|
|
|
log_file_format,
|
|
|
|
datefmt=log_file_date_format)
|
2017-09-13 17:25:17 +08:00
|
|
|
self.log_file_handler.setFormatter(log_file_formatter)
|
|
|
|
else:
|
|
|
|
self.log_file_handler = None
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def _runtest_for(self, item, when):
|
|
|
|
"""Implements the internals of pytest_runtest_xxx() hook."""
|
|
|
|
with catching_logs(LogCaptureHandler(),
|
2018-01-17 06:00:27 +08:00
|
|
|
formatter=self.formatter, level=self.log_level) as log_handler:
|
2018-01-15 22:01:01 +08:00
|
|
|
if not hasattr(item, 'catch_log_handlers'):
|
|
|
|
item.catch_log_handlers = {}
|
|
|
|
item.catch_log_handlers[when] = log_handler
|
2017-09-13 17:25:17 +08:00
|
|
|
item.catch_log_handler = log_handler
|
|
|
|
try:
|
|
|
|
yield # run test
|
|
|
|
finally:
|
|
|
|
del item.catch_log_handler
|
2018-01-15 22:01:01 +08:00
|
|
|
if when == 'teardown':
|
|
|
|
del item.catch_log_handlers
|
2017-09-13 17:25:17 +08:00
|
|
|
|
|
|
|
if self.print_logs:
|
|
|
|
# Add a captured log section to the report.
|
|
|
|
log = log_handler.stream.getvalue().strip()
|
|
|
|
item.add_report_section(when, 'log', log)
|
|
|
|
|
2017-09-15 10:43:51 +08:00
|
|
|
@pytest.hookimpl(hookwrapper=True)
|
2017-09-13 17:25:17 +08:00
|
|
|
def pytest_runtest_setup(self, item):
|
|
|
|
with self._runtest_for(item, 'setup'):
|
|
|
|
yield
|
|
|
|
|
2017-09-15 10:43:51 +08:00
|
|
|
@pytest.hookimpl(hookwrapper=True)
|
2017-09-13 17:25:17 +08:00
|
|
|
def pytest_runtest_call(self, item):
|
|
|
|
with self._runtest_for(item, 'call'):
|
|
|
|
yield
|
|
|
|
|
2017-09-15 10:43:51 +08:00
|
|
|
@pytest.hookimpl(hookwrapper=True)
|
2017-09-13 17:25:17 +08:00
|
|
|
def pytest_runtest_teardown(self, item):
|
|
|
|
with self._runtest_for(item, 'teardown'):
|
|
|
|
yield
|
|
|
|
|
2017-09-15 10:43:51 +08:00
|
|
|
@pytest.hookimpl(hookwrapper=True)
|
2017-09-13 17:25:17 +08:00
|
|
|
def pytest_runtestloop(self, session):
|
|
|
|
"""Runs all collected test items."""
|
2018-01-16 05:02:34 +08:00
|
|
|
with self.live_logs_context:
|
2017-09-13 17:25:17 +08:00
|
|
|
if self.log_file_handler is not None:
|
2017-09-15 19:38:49 +08:00
|
|
|
with closing(self.log_file_handler):
|
|
|
|
with catching_logs(self.log_file_handler,
|
|
|
|
level=self.log_file_level):
|
|
|
|
yield # run all the tests
|
2017-09-13 17:25:17 +08:00
|
|
|
else:
|
|
|
|
yield # run all the tests
|