2015-07-11 16:13:27 +08:00
|
|
|
"""
|
|
|
|
merged implementation of the cache provider
|
|
|
|
|
2017-02-15 23:00:18 +08:00
|
|
|
the name cache was not chosen to ensure pluggy automatically
|
2015-07-11 16:13:27 +08:00
|
|
|
ignores the external pytest-cache
|
|
|
|
"""
|
2017-03-17 09:21:30 +08:00
|
|
|
from __future__ import absolute_import, division, print_function
|
2015-07-11 16:13:27 +08:00
|
|
|
import py
|
|
|
|
import pytest
|
|
|
|
import json
|
2017-07-07 18:07:06 +08:00
|
|
|
import os
|
2015-07-11 16:13:27 +08:00
|
|
|
from os.path import sep as _sep, altsep as _altsep
|
|
|
|
|
|
|
|
|
2015-07-24 15:59:59 +08:00
|
|
|
class Cache(object):
|
2015-07-11 16:13:27 +08:00
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
2017-07-07 18:07:06 +08:00
|
|
|
self._cachedir = Cache.cache_dir_from_config(config)
|
2015-07-11 16:13:27 +08:00
|
|
|
self.trace = config.trace.root.get("cache")
|
2015-09-17 02:41:22 +08:00
|
|
|
if config.getvalue("cacheclear"):
|
2015-07-11 16:13:27 +08:00
|
|
|
self.trace("clearing cachedir")
|
|
|
|
if self._cachedir.check():
|
|
|
|
self._cachedir.remove()
|
|
|
|
self._cachedir.mkdir()
|
|
|
|
|
2017-07-07 18:07:06 +08:00
|
|
|
@staticmethod
|
|
|
|
def cache_dir_from_config(config):
|
|
|
|
cache_dir = config.getini("cache_dir")
|
|
|
|
cache_dir = os.path.expanduser(cache_dir)
|
|
|
|
cache_dir = os.path.expandvars(cache_dir)
|
|
|
|
if os.path.isabs(cache_dir):
|
|
|
|
return py.path.local(cache_dir)
|
|
|
|
else:
|
|
|
|
return config.rootdir.join(cache_dir)
|
|
|
|
|
2015-07-11 16:13:27 +08:00
|
|
|
def makedir(self, name):
|
|
|
|
""" return a directory path object with the given name. If the
|
|
|
|
directory does not yet exist, it will be created. You can use it
|
|
|
|
to manage files likes e. g. store/retrieve database
|
|
|
|
dumps across test sessions.
|
|
|
|
|
|
|
|
:param name: must be a string not containing a ``/`` separator.
|
|
|
|
Make sure the name contains your plugin or application
|
|
|
|
identifiers to prevent clashes with other cache users.
|
|
|
|
"""
|
|
|
|
if _sep in name or _altsep is not None and _altsep in name:
|
|
|
|
raise ValueError("name is not allowed to contain path separators")
|
|
|
|
return self._cachedir.ensure_dir("d", name)
|
|
|
|
|
|
|
|
def _getvaluepath(self, key):
|
|
|
|
return self._cachedir.join('v', *key.split('/'))
|
|
|
|
|
|
|
|
def get(self, key, default):
|
|
|
|
""" return cached value for the given key. If no value
|
|
|
|
was yet cached or the value cannot be read, the specified
|
|
|
|
default is returned.
|
|
|
|
|
|
|
|
:param key: must be a ``/`` separated value. Usually the first
|
|
|
|
name is the name of your plugin or your application.
|
|
|
|
:param default: must be provided in case of a cache-miss or
|
|
|
|
invalid cache values.
|
|
|
|
|
|
|
|
"""
|
|
|
|
path = self._getvaluepath(key)
|
|
|
|
if path.check():
|
|
|
|
try:
|
|
|
|
with path.open("r") as f:
|
|
|
|
return json.load(f)
|
|
|
|
except ValueError:
|
|
|
|
self.trace("cache-invalid at %s" % (path,))
|
|
|
|
return default
|
|
|
|
|
|
|
|
def set(self, key, value):
|
|
|
|
""" save value for the given key.
|
|
|
|
|
|
|
|
:param key: must be a ``/`` separated value. Usually the first
|
|
|
|
name is the name of your plugin or your application.
|
|
|
|
:param value: must be of any combination of basic
|
|
|
|
python types, including nested types
|
|
|
|
like e. g. lists of dictionaries.
|
|
|
|
"""
|
|
|
|
path = self._getvaluepath(key)
|
2015-09-22 22:28:19 +08:00
|
|
|
try:
|
|
|
|
path.dirpath().ensure_dir()
|
|
|
|
except (py.error.EEXIST, py.error.EACCES):
|
|
|
|
self.config.warn(
|
2015-09-23 02:24:37 +08:00
|
|
|
code='I9', message='could not create cache path %s' % (path,)
|
2015-09-22 22:28:19 +08:00
|
|
|
)
|
|
|
|
return
|
|
|
|
try:
|
|
|
|
f = path.open('w')
|
|
|
|
except py.error.ENOTDIR:
|
|
|
|
self.config.warn(
|
|
|
|
code='I9', message='cache could not write path %s' % (path,))
|
|
|
|
else:
|
|
|
|
with f:
|
|
|
|
self.trace("cache-write %s: %r" % (key, value,))
|
|
|
|
json.dump(value, f, indent=2, sort_keys=True)
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
|
2018-01-25 04:23:42 +08:00
|
|
|
class LFPlugin(object):
|
2015-07-11 16:13:27 +08:00
|
|
|
""" Plugin which implements the --lf (run last-failing) option """
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2015-07-11 16:13:27 +08:00
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
|
|
|
active_keys = 'lf', 'failedfirst'
|
|
|
|
self.active = any(config.getvalue(key) for key in active_keys)
|
2017-07-27 08:06:08 +08:00
|
|
|
self.lastfailed = config.cache.get("cache/lastfailed", {})
|
2017-07-28 02:06:35 +08:00
|
|
|
self._previously_failed_count = None
|
2015-07-11 16:13:27 +08:00
|
|
|
|
2017-07-28 02:06:35 +08:00
|
|
|
def pytest_report_collectionfinish(self):
|
2015-07-11 16:13:27 +08:00
|
|
|
if self.active:
|
2017-07-28 02:06:35 +08:00
|
|
|
if not self._previously_failed_count:
|
2015-07-11 16:13:27 +08:00
|
|
|
mode = "run all (no recorded failures)"
|
|
|
|
else:
|
2017-07-28 02:06:35 +08:00
|
|
|
noun = 'failure' if self._previously_failed_count == 1 else 'failures'
|
2018-01-25 21:49:58 +08:00
|
|
|
suffix = " first" if self.config.getvalue(
|
|
|
|
"failedfirst") else ""
|
2017-07-28 02:06:35 +08:00
|
|
|
mode = "rerun previous {count} {noun}{suffix}".format(
|
|
|
|
count=self._previously_failed_count, suffix=suffix, noun=noun
|
|
|
|
)
|
2015-07-11 16:13:27 +08:00
|
|
|
return "run-last-failure: %s" % mode
|
|
|
|
|
|
|
|
def pytest_runtest_logreport(self, report):
|
2017-07-28 05:43:04 +08:00
|
|
|
if (report.when == 'call' and report.passed) or report.skipped:
|
2017-07-27 08:06:08 +08:00
|
|
|
self.lastfailed.pop(report.nodeid, None)
|
|
|
|
elif report.failed:
|
2015-07-11 16:13:27 +08:00
|
|
|
self.lastfailed[report.nodeid] = True
|
|
|
|
|
|
|
|
def pytest_collectreport(self, report):
|
|
|
|
passed = report.outcome in ('passed', 'skipped')
|
|
|
|
if passed:
|
|
|
|
if report.nodeid in self.lastfailed:
|
|
|
|
self.lastfailed.pop(report.nodeid)
|
|
|
|
self.lastfailed.update(
|
|
|
|
(item.nodeid, True)
|
|
|
|
for item in report.result)
|
|
|
|
else:
|
|
|
|
self.lastfailed[report.nodeid] = True
|
|
|
|
|
|
|
|
def pytest_collection_modifyitems(self, session, config, items):
|
|
|
|
if self.active and self.lastfailed:
|
|
|
|
previously_failed = []
|
|
|
|
previously_passed = []
|
|
|
|
for item in items:
|
|
|
|
if item.nodeid in self.lastfailed:
|
|
|
|
previously_failed.append(item)
|
|
|
|
else:
|
|
|
|
previously_passed.append(item)
|
2017-07-28 02:06:35 +08:00
|
|
|
self._previously_failed_count = len(previously_failed)
|
2017-07-27 08:06:08 +08:00
|
|
|
if not previously_failed:
|
2015-07-11 16:13:27 +08:00
|
|
|
# running a subset of all tests with recorded failures outside
|
|
|
|
# of the set of tests currently executing
|
2017-07-27 08:06:08 +08:00
|
|
|
return
|
|
|
|
if self.config.getvalue("lf"):
|
2015-07-11 16:13:27 +08:00
|
|
|
items[:] = previously_failed
|
|
|
|
config.hook.pytest_deselected(items=previously_passed)
|
2017-04-10 16:42:19 +08:00
|
|
|
else:
|
|
|
|
items[:] = previously_failed + previously_passed
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
def pytest_sessionfinish(self, session):
|
|
|
|
config = self.config
|
2015-09-17 02:41:22 +08:00
|
|
|
if config.getvalue("cacheshow") or hasattr(config, "slaveinput"):
|
2015-07-11 16:13:27 +08:00
|
|
|
return
|
2017-07-27 08:06:08 +08:00
|
|
|
|
|
|
|
saved_lastfailed = config.cache.get("cache/lastfailed", {})
|
|
|
|
if saved_lastfailed != self.lastfailed:
|
2016-02-16 13:42:04 +08:00
|
|
|
config.cache.set("cache/lastfailed", self.lastfailed)
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
|
|
|
|
def pytest_addoption(parser):
|
|
|
|
group = parser.getgroup("general")
|
|
|
|
group.addoption(
|
2016-01-28 05:28:38 +08:00
|
|
|
'--lf', '--last-failed', action='store_true', dest="lf",
|
2015-07-11 16:13:27 +08:00
|
|
|
help="rerun only the tests that failed "
|
|
|
|
"at the last run (or all if none failed)")
|
|
|
|
group.addoption(
|
2016-01-28 05:28:38 +08:00
|
|
|
'--ff', '--failed-first', action='store_true', dest="failedfirst",
|
2015-07-11 16:13:27 +08:00
|
|
|
help="run all tests but run the last failures first. "
|
|
|
|
"This may re-order tests and thus lead to "
|
|
|
|
"repeated fixture setup/teardown")
|
|
|
|
group.addoption(
|
2015-09-17 02:41:22 +08:00
|
|
|
'--cache-show', action='store_true', dest="cacheshow",
|
2015-07-11 16:13:27 +08:00
|
|
|
help="show cache contents, don't perform collection or tests")
|
|
|
|
group.addoption(
|
2015-09-17 02:41:22 +08:00
|
|
|
'--cache-clear', action='store_true', dest="cacheclear",
|
2015-07-11 16:13:27 +08:00
|
|
|
help="remove all cache contents at start of test run.")
|
2017-07-07 18:07:06 +08:00
|
|
|
parser.addini(
|
2018-01-25 21:49:58 +08:00
|
|
|
"cache_dir", default='.pytest_cache',
|
2017-07-07 18:07:06 +08:00
|
|
|
help="cache directory path.")
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
|
|
|
|
def pytest_cmdline_main(config):
|
2015-09-17 02:41:22 +08:00
|
|
|
if config.option.cacheshow:
|
2015-07-11 16:13:27 +08:00
|
|
|
from _pytest.main import wrap_session
|
2015-09-17 02:41:22 +08:00
|
|
|
return wrap_session(config, cacheshow)
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.hookimpl(tryfirst=True)
|
|
|
|
def pytest_configure(config):
|
|
|
|
config.cache = Cache(config)
|
|
|
|
config.pluginmanager.register(LFPlugin(config), "lfplugin")
|
|
|
|
|
|
|
|
|
2015-07-25 02:32:50 +08:00
|
|
|
@pytest.fixture
|
|
|
|
def cache(request):
|
2015-09-23 10:39:48 +08:00
|
|
|
"""
|
|
|
|
Return a cache object that can persist state between testing sessions.
|
|
|
|
|
|
|
|
cache.get(key, default)
|
|
|
|
cache.set(key, value)
|
|
|
|
|
2015-09-30 11:27:06 +08:00
|
|
|
Keys must be a ``/`` separated value, where the first part is usually the
|
|
|
|
name of your plugin or application to avoid clashes with other cache users.
|
2015-09-23 10:39:48 +08:00
|
|
|
|
|
|
|
Values can be any object handled by the json stdlib module.
|
|
|
|
"""
|
2015-07-25 02:32:50 +08:00
|
|
|
return request.config.cache
|
|
|
|
|
2015-09-23 10:39:48 +08:00
|
|
|
|
2015-07-11 16:13:27 +08:00
|
|
|
def pytest_report_header(config):
|
|
|
|
if config.option.verbose:
|
|
|
|
relpath = py.path.local().bestrelpath(config.cache._cachedir)
|
|
|
|
return "cachedir: %s" % relpath
|
|
|
|
|
|
|
|
|
2015-09-17 02:41:22 +08:00
|
|
|
def cacheshow(config, session):
|
2015-07-11 16:13:27 +08:00
|
|
|
from pprint import pprint
|
|
|
|
tw = py.io.TerminalWriter()
|
|
|
|
tw.line("cachedir: " + str(config.cache._cachedir))
|
|
|
|
if not config.cache._cachedir.check():
|
|
|
|
tw.line("cache is empty")
|
|
|
|
return 0
|
|
|
|
dummy = object()
|
|
|
|
basedir = config.cache._cachedir
|
|
|
|
vdir = basedir.join("v")
|
|
|
|
tw.sep("-", "cache values")
|
2017-05-23 14:01:39 +08:00
|
|
|
for valpath in sorted(vdir.visit(lambda x: x.isfile())):
|
2015-07-11 16:13:27 +08:00
|
|
|
key = valpath.relto(vdir).replace(valpath.sep, "/")
|
|
|
|
val = config.cache.get(key, dummy)
|
|
|
|
if val is dummy:
|
|
|
|
tw.line("%s contains unreadable content, "
|
2017-07-17 07:25:07 +08:00
|
|
|
"will be ignored" % key)
|
2015-07-11 16:13:27 +08:00
|
|
|
else:
|
|
|
|
tw.line("%s contains:" % key)
|
|
|
|
stream = py.io.TextIO()
|
|
|
|
pprint(val, stream=stream)
|
|
|
|
for line in stream.getvalue().splitlines():
|
|
|
|
tw.line(" " + line)
|
|
|
|
|
|
|
|
ddir = basedir.join("d")
|
|
|
|
if ddir.isdir() and ddir.listdir():
|
|
|
|
tw.sep("-", "cache directories")
|
2017-05-23 14:01:39 +08:00
|
|
|
for p in sorted(basedir.join("d").visit()):
|
2017-07-17 07:25:09 +08:00
|
|
|
# if p.check(dir=1):
|
2015-07-11 16:13:27 +08:00
|
|
|
# print("%s/" % p.relto(basedir))
|
|
|
|
if p.isfile():
|
|
|
|
key = p.relto(basedir)
|
|
|
|
tw.line("%s is a file of length %d" % (
|
|
|
|
key, p.size()))
|
|
|
|
return 0
|