2015-07-11 16:13:27 +08:00
|
|
|
"""
|
|
|
|
merged implementation of the cache provider
|
|
|
|
|
2017-02-15 23:00:18 +08:00
|
|
|
the name cache was not chosen to ensure pluggy automatically
|
2015-07-11 16:13:27 +08:00
|
|
|
ignores the external pytest-cache
|
|
|
|
"""
|
2017-03-17 09:21:30 +08:00
|
|
|
from __future__ import absolute_import, division, print_function
|
2018-02-24 03:49:17 +08:00
|
|
|
|
|
|
|
from collections import OrderedDict
|
|
|
|
|
2015-07-11 16:13:27 +08:00
|
|
|
import py
|
2018-02-24 03:49:17 +08:00
|
|
|
import six
|
2018-02-14 03:49:28 +08:00
|
|
|
|
2015-07-11 16:13:27 +08:00
|
|
|
import pytest
|
|
|
|
import json
|
2017-07-07 18:07:06 +08:00
|
|
|
import os
|
2015-07-11 16:13:27 +08:00
|
|
|
from os.path import sep as _sep, altsep as _altsep
|
|
|
|
|
|
|
|
|
2015-07-24 15:59:59 +08:00
|
|
|
class Cache(object):
|
2018-05-23 22:48:46 +08:00
|
|
|
|
2015-07-11 16:13:27 +08:00
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
2017-07-07 18:07:06 +08:00
|
|
|
self._cachedir = Cache.cache_dir_from_config(config)
|
2015-07-11 16:13:27 +08:00
|
|
|
self.trace = config.trace.root.get("cache")
|
2018-01-27 21:45:46 +08:00
|
|
|
if config.getoption("cacheclear"):
|
2015-07-11 16:13:27 +08:00
|
|
|
self.trace("clearing cachedir")
|
|
|
|
if self._cachedir.check():
|
|
|
|
self._cachedir.remove()
|
|
|
|
self._cachedir.mkdir()
|
|
|
|
|
2017-07-07 18:07:06 +08:00
|
|
|
@staticmethod
|
|
|
|
def cache_dir_from_config(config):
|
|
|
|
cache_dir = config.getini("cache_dir")
|
|
|
|
cache_dir = os.path.expanduser(cache_dir)
|
|
|
|
cache_dir = os.path.expandvars(cache_dir)
|
|
|
|
if os.path.isabs(cache_dir):
|
|
|
|
return py.path.local(cache_dir)
|
|
|
|
else:
|
|
|
|
return config.rootdir.join(cache_dir)
|
|
|
|
|
2015-07-11 16:13:27 +08:00
|
|
|
def makedir(self, name):
|
|
|
|
""" return a directory path object with the given name. If the
|
|
|
|
directory does not yet exist, it will be created. You can use it
|
|
|
|
to manage files likes e. g. store/retrieve database
|
|
|
|
dumps across test sessions.
|
|
|
|
|
|
|
|
:param name: must be a string not containing a ``/`` separator.
|
|
|
|
Make sure the name contains your plugin or application
|
|
|
|
identifiers to prevent clashes with other cache users.
|
|
|
|
"""
|
|
|
|
if _sep in name or _altsep is not None and _altsep in name:
|
|
|
|
raise ValueError("name is not allowed to contain path separators")
|
|
|
|
return self._cachedir.ensure_dir("d", name)
|
|
|
|
|
|
|
|
def _getvaluepath(self, key):
|
2018-05-23 22:48:46 +08:00
|
|
|
return self._cachedir.join("v", *key.split("/"))
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
def get(self, key, default):
|
|
|
|
""" return cached value for the given key. If no value
|
|
|
|
was yet cached or the value cannot be read, the specified
|
|
|
|
default is returned.
|
|
|
|
|
|
|
|
:param key: must be a ``/`` separated value. Usually the first
|
|
|
|
name is the name of your plugin or your application.
|
|
|
|
:param default: must be provided in case of a cache-miss or
|
|
|
|
invalid cache values.
|
|
|
|
|
|
|
|
"""
|
|
|
|
path = self._getvaluepath(key)
|
|
|
|
if path.check():
|
|
|
|
try:
|
|
|
|
with path.open("r") as f:
|
|
|
|
return json.load(f)
|
|
|
|
except ValueError:
|
|
|
|
self.trace("cache-invalid at %s" % (path,))
|
|
|
|
return default
|
|
|
|
|
|
|
|
def set(self, key, value):
|
|
|
|
""" save value for the given key.
|
|
|
|
|
|
|
|
:param key: must be a ``/`` separated value. Usually the first
|
|
|
|
name is the name of your plugin or your application.
|
|
|
|
:param value: must be of any combination of basic
|
|
|
|
python types, including nested types
|
|
|
|
like e. g. lists of dictionaries.
|
|
|
|
"""
|
|
|
|
path = self._getvaluepath(key)
|
2015-09-22 22:28:19 +08:00
|
|
|
try:
|
|
|
|
path.dirpath().ensure_dir()
|
|
|
|
except (py.error.EEXIST, py.error.EACCES):
|
|
|
|
self.config.warn(
|
2018-05-23 22:48:46 +08:00
|
|
|
code="I9", message="could not create cache path %s" % (path,)
|
2015-09-22 22:28:19 +08:00
|
|
|
)
|
|
|
|
return
|
|
|
|
try:
|
2018-05-23 22:48:46 +08:00
|
|
|
f = path.open("w")
|
2015-09-22 22:28:19 +08:00
|
|
|
except py.error.ENOTDIR:
|
|
|
|
self.config.warn(
|
2018-05-23 22:48:46 +08:00
|
|
|
code="I9", message="cache could not write path %s" % (path,)
|
|
|
|
)
|
2015-09-22 22:28:19 +08:00
|
|
|
else:
|
|
|
|
with f:
|
2018-05-23 22:48:46 +08:00
|
|
|
self.trace("cache-write %s: %r" % (key, value))
|
2015-09-22 22:28:19 +08:00
|
|
|
json.dump(value, f, indent=2, sort_keys=True)
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
|
2018-01-25 04:23:42 +08:00
|
|
|
class LFPlugin(object):
|
2015-07-11 16:13:27 +08:00
|
|
|
""" Plugin which implements the --lf (run last-failing) option """
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2015-07-11 16:13:27 +08:00
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
2018-05-23 22:48:46 +08:00
|
|
|
active_keys = "lf", "failedfirst"
|
2018-01-27 21:45:46 +08:00
|
|
|
self.active = any(config.getoption(key) for key in active_keys)
|
2017-07-27 08:06:08 +08:00
|
|
|
self.lastfailed = config.cache.get("cache/lastfailed", {})
|
2017-07-28 02:06:35 +08:00
|
|
|
self._previously_failed_count = None
|
2018-05-23 22:48:46 +08:00
|
|
|
self._no_failures_behavior = self.config.getoption("last_failed_no_failures")
|
2015-07-11 16:13:27 +08:00
|
|
|
|
2017-07-28 02:06:35 +08:00
|
|
|
def pytest_report_collectionfinish(self):
|
2015-07-11 16:13:27 +08:00
|
|
|
if self.active:
|
2017-07-28 02:06:35 +08:00
|
|
|
if not self._previously_failed_count:
|
2018-05-23 22:48:46 +08:00
|
|
|
mode = "run {} (no recorded failures)".format(
|
|
|
|
self._no_failures_behavior
|
|
|
|
)
|
2015-07-11 16:13:27 +08:00
|
|
|
else:
|
2018-05-23 22:48:46 +08:00
|
|
|
noun = "failure" if self._previously_failed_count == 1 else "failures"
|
|
|
|
suffix = " first" if self.config.getoption("failedfirst") else ""
|
2017-07-28 02:06:35 +08:00
|
|
|
mode = "rerun previous {count} {noun}{suffix}".format(
|
|
|
|
count=self._previously_failed_count, suffix=suffix, noun=noun
|
|
|
|
)
|
2015-07-11 16:13:27 +08:00
|
|
|
return "run-last-failure: %s" % mode
|
|
|
|
|
|
|
|
def pytest_runtest_logreport(self, report):
|
2018-05-23 22:48:46 +08:00
|
|
|
if (report.when == "call" and report.passed) or report.skipped:
|
2017-07-27 08:06:08 +08:00
|
|
|
self.lastfailed.pop(report.nodeid, None)
|
|
|
|
elif report.failed:
|
2015-07-11 16:13:27 +08:00
|
|
|
self.lastfailed[report.nodeid] = True
|
|
|
|
|
|
|
|
def pytest_collectreport(self, report):
|
2018-05-23 22:48:46 +08:00
|
|
|
passed = report.outcome in ("passed", "skipped")
|
2015-07-11 16:13:27 +08:00
|
|
|
if passed:
|
|
|
|
if report.nodeid in self.lastfailed:
|
|
|
|
self.lastfailed.pop(report.nodeid)
|
2018-05-23 22:48:46 +08:00
|
|
|
self.lastfailed.update((item.nodeid, True) for item in report.result)
|
2015-07-11 16:13:27 +08:00
|
|
|
else:
|
|
|
|
self.lastfailed[report.nodeid] = True
|
|
|
|
|
|
|
|
def pytest_collection_modifyitems(self, session, config, items):
|
2018-03-11 04:45:45 +08:00
|
|
|
if self.active:
|
|
|
|
if self.lastfailed:
|
|
|
|
previously_failed = []
|
|
|
|
previously_passed = []
|
|
|
|
for item in items:
|
|
|
|
if item.nodeid in self.lastfailed:
|
|
|
|
previously_failed.append(item)
|
|
|
|
else:
|
|
|
|
previously_passed.append(item)
|
|
|
|
self._previously_failed_count = len(previously_failed)
|
|
|
|
if not previously_failed:
|
|
|
|
# running a subset of all tests with recorded failures outside
|
|
|
|
# of the set of tests currently executing
|
|
|
|
return
|
|
|
|
if self.config.getoption("lf"):
|
|
|
|
items[:] = previously_failed
|
|
|
|
config.hook.pytest_deselected(items=previously_passed)
|
2015-07-11 16:13:27 +08:00
|
|
|
else:
|
2018-03-11 04:45:45 +08:00
|
|
|
items[:] = previously_failed + previously_passed
|
2018-05-23 22:48:46 +08:00
|
|
|
elif self._no_failures_behavior == "none":
|
2018-03-11 04:45:45 +08:00
|
|
|
config.hook.pytest_deselected(items=items)
|
|
|
|
items[:] = []
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
def pytest_sessionfinish(self, session):
|
|
|
|
config = self.config
|
2018-01-27 21:45:46 +08:00
|
|
|
if config.getoption("cacheshow") or hasattr(config, "slaveinput"):
|
2015-07-11 16:13:27 +08:00
|
|
|
return
|
2017-07-27 08:06:08 +08:00
|
|
|
|
|
|
|
saved_lastfailed = config.cache.get("cache/lastfailed", {})
|
|
|
|
if saved_lastfailed != self.lastfailed:
|
2016-02-16 13:42:04 +08:00
|
|
|
config.cache.set("cache/lastfailed", self.lastfailed)
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
|
2018-02-14 03:49:28 +08:00
|
|
|
class NFPlugin(object):
|
|
|
|
""" Plugin which implements the --nf (run new-first) option """
|
|
|
|
|
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
|
|
|
self.active = config.option.newfirst
|
2018-02-24 03:49:17 +08:00
|
|
|
self.cached_nodeids = config.cache.get("cache/nodeids", [])
|
2018-02-14 03:49:28 +08:00
|
|
|
|
|
|
|
def pytest_collection_modifyitems(self, session, config, items):
|
|
|
|
if self.active:
|
2018-02-24 03:49:17 +08:00
|
|
|
new_items = OrderedDict()
|
|
|
|
other_items = OrderedDict()
|
2018-02-14 03:49:28 +08:00
|
|
|
for item in items:
|
2018-02-24 03:49:17 +08:00
|
|
|
if item.nodeid not in self.cached_nodeids:
|
|
|
|
new_items[item.nodeid] = item
|
2018-02-14 03:49:28 +08:00
|
|
|
else:
|
2018-02-24 03:49:17 +08:00
|
|
|
other_items[item.nodeid] = item
|
2018-02-14 03:49:28 +08:00
|
|
|
|
2018-05-23 22:48:46 +08:00
|
|
|
items[:] = self._get_increasing_order(
|
|
|
|
six.itervalues(new_items)
|
|
|
|
) + self._get_increasing_order(
|
|
|
|
six.itervalues(other_items)
|
|
|
|
)
|
2018-02-24 03:49:17 +08:00
|
|
|
self.cached_nodeids = [x.nodeid for x in items if isinstance(x, pytest.Item)]
|
2018-02-14 03:49:28 +08:00
|
|
|
|
2018-02-24 03:49:17 +08:00
|
|
|
def _get_increasing_order(self, items):
|
|
|
|
return sorted(items, key=lambda item: item.fspath.mtime(), reverse=True)
|
2018-02-14 03:49:28 +08:00
|
|
|
|
|
|
|
def pytest_sessionfinish(self, session):
|
|
|
|
config = self.config
|
|
|
|
if config.getoption("cacheshow") or hasattr(config, "slaveinput"):
|
|
|
|
return
|
2018-02-24 03:49:17 +08:00
|
|
|
|
|
|
|
config.cache.set("cache/nodeids", self.cached_nodeids)
|
2018-02-14 03:49:28 +08:00
|
|
|
|
|
|
|
|
2015-07-11 16:13:27 +08:00
|
|
|
def pytest_addoption(parser):
|
|
|
|
group = parser.getgroup("general")
|
|
|
|
group.addoption(
|
2018-05-23 22:48:46 +08:00
|
|
|
"--lf",
|
|
|
|
"--last-failed",
|
|
|
|
action="store_true",
|
|
|
|
dest="lf",
|
2015-07-11 16:13:27 +08:00
|
|
|
help="rerun only the tests that failed "
|
2018-05-23 22:48:46 +08:00
|
|
|
"at the last run (or all if none failed)",
|
|
|
|
)
|
2015-07-11 16:13:27 +08:00
|
|
|
group.addoption(
|
2018-05-23 22:48:46 +08:00
|
|
|
"--ff",
|
|
|
|
"--failed-first",
|
|
|
|
action="store_true",
|
|
|
|
dest="failedfirst",
|
2015-07-11 16:13:27 +08:00
|
|
|
help="run all tests but run the last failures first. "
|
2018-05-23 22:48:46 +08:00
|
|
|
"This may re-order tests and thus lead to "
|
|
|
|
"repeated fixture setup/teardown",
|
|
|
|
)
|
2018-02-14 03:49:28 +08:00
|
|
|
group.addoption(
|
2018-05-23 22:48:46 +08:00
|
|
|
"--nf",
|
|
|
|
"--new-first",
|
|
|
|
action="store_true",
|
|
|
|
dest="newfirst",
|
2018-02-24 03:49:17 +08:00
|
|
|
help="run tests from new files first, then the rest of the tests "
|
2018-05-23 22:48:46 +08:00
|
|
|
"sorted by file mtime",
|
|
|
|
)
|
2015-07-11 16:13:27 +08:00
|
|
|
group.addoption(
|
2018-05-23 22:48:46 +08:00
|
|
|
"--cache-show",
|
|
|
|
action="store_true",
|
|
|
|
dest="cacheshow",
|
|
|
|
help="show cache contents, don't perform collection or tests",
|
|
|
|
)
|
2015-07-11 16:13:27 +08:00
|
|
|
group.addoption(
|
2018-05-23 22:48:46 +08:00
|
|
|
"--cache-clear",
|
|
|
|
action="store_true",
|
|
|
|
dest="cacheclear",
|
|
|
|
help="remove all cache contents at start of test run.",
|
|
|
|
)
|
|
|
|
parser.addini("cache_dir", default=".pytest_cache", help="cache directory path.")
|
2018-03-11 04:45:45 +08:00
|
|
|
group.addoption(
|
2018-05-23 22:48:46 +08:00
|
|
|
"--lfnf",
|
|
|
|
"--last-failed-no-failures",
|
|
|
|
action="store",
|
|
|
|
dest="last_failed_no_failures",
|
|
|
|
choices=("all", "none"),
|
|
|
|
default="all",
|
|
|
|
help="change the behavior when no test failed in the last run or no "
|
|
|
|
"information about the last failures was found in the cache",
|
2018-03-11 04:45:45 +08:00
|
|
|
)
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
|
|
|
|
def pytest_cmdline_main(config):
|
2015-09-17 02:41:22 +08:00
|
|
|
if config.option.cacheshow:
|
2015-07-11 16:13:27 +08:00
|
|
|
from _pytest.main import wrap_session
|
2018-05-23 22:48:46 +08:00
|
|
|
|
2015-09-17 02:41:22 +08:00
|
|
|
return wrap_session(config, cacheshow)
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.hookimpl(tryfirst=True)
|
|
|
|
def pytest_configure(config):
|
|
|
|
config.cache = Cache(config)
|
|
|
|
config.pluginmanager.register(LFPlugin(config), "lfplugin")
|
2018-02-14 03:49:28 +08:00
|
|
|
config.pluginmanager.register(NFPlugin(config), "nfplugin")
|
2015-07-11 16:13:27 +08:00
|
|
|
|
|
|
|
|
2015-07-25 02:32:50 +08:00
|
|
|
@pytest.fixture
|
|
|
|
def cache(request):
|
2015-09-23 10:39:48 +08:00
|
|
|
"""
|
|
|
|
Return a cache object that can persist state between testing sessions.
|
|
|
|
|
|
|
|
cache.get(key, default)
|
|
|
|
cache.set(key, value)
|
|
|
|
|
2015-09-30 11:27:06 +08:00
|
|
|
Keys must be a ``/`` separated value, where the first part is usually the
|
|
|
|
name of your plugin or application to avoid clashes with other cache users.
|
2015-09-23 10:39:48 +08:00
|
|
|
|
|
|
|
Values can be any object handled by the json stdlib module.
|
|
|
|
"""
|
2015-07-25 02:32:50 +08:00
|
|
|
return request.config.cache
|
|
|
|
|
2015-09-23 10:39:48 +08:00
|
|
|
|
2015-07-11 16:13:27 +08:00
|
|
|
def pytest_report_header(config):
|
|
|
|
if config.option.verbose:
|
|
|
|
relpath = py.path.local().bestrelpath(config.cache._cachedir)
|
|
|
|
return "cachedir: %s" % relpath
|
|
|
|
|
|
|
|
|
2015-09-17 02:41:22 +08:00
|
|
|
def cacheshow(config, session):
|
2015-07-11 16:13:27 +08:00
|
|
|
from pprint import pprint
|
2018-05-23 22:48:46 +08:00
|
|
|
|
2015-07-11 16:13:27 +08:00
|
|
|
tw = py.io.TerminalWriter()
|
|
|
|
tw.line("cachedir: " + str(config.cache._cachedir))
|
|
|
|
if not config.cache._cachedir.check():
|
|
|
|
tw.line("cache is empty")
|
|
|
|
return 0
|
|
|
|
dummy = object()
|
|
|
|
basedir = config.cache._cachedir
|
|
|
|
vdir = basedir.join("v")
|
|
|
|
tw.sep("-", "cache values")
|
2017-05-23 14:01:39 +08:00
|
|
|
for valpath in sorted(vdir.visit(lambda x: x.isfile())):
|
2015-07-11 16:13:27 +08:00
|
|
|
key = valpath.relto(vdir).replace(valpath.sep, "/")
|
|
|
|
val = config.cache.get(key, dummy)
|
|
|
|
if val is dummy:
|
2018-05-23 22:48:46 +08:00
|
|
|
tw.line("%s contains unreadable content, " "will be ignored" % key)
|
2015-07-11 16:13:27 +08:00
|
|
|
else:
|
|
|
|
tw.line("%s contains:" % key)
|
|
|
|
stream = py.io.TextIO()
|
|
|
|
pprint(val, stream=stream)
|
|
|
|
for line in stream.getvalue().splitlines():
|
|
|
|
tw.line(" " + line)
|
|
|
|
|
|
|
|
ddir = basedir.join("d")
|
|
|
|
if ddir.isdir() and ddir.listdir():
|
|
|
|
tw.sep("-", "cache directories")
|
2017-05-23 14:01:39 +08:00
|
|
|
for p in sorted(basedir.join("d").visit()):
|
2017-07-17 07:25:09 +08:00
|
|
|
# if p.check(dir=1):
|
2015-07-11 16:13:27 +08:00
|
|
|
# print("%s/" % p.relto(basedir))
|
|
|
|
if p.isfile():
|
|
|
|
key = p.relto(basedir)
|
2018-05-23 22:48:46 +08:00
|
|
|
tw.line("%s is a file of length %d" % (key, p.size()))
|
2015-07-11 16:13:27 +08:00
|
|
|
return 0
|