move all the things into _pytest.pathlib
This commit is contained in:
parent
fed4f73a61
commit
85cc9b8f12
|
@ -17,7 +17,8 @@ import atomicwrites
|
|||
import py
|
||||
|
||||
from _pytest.assertion import util
|
||||
from _pytest.compat import PurePath, spec_from_file_location
|
||||
from _pytest.pathlib import PurePath
|
||||
from _pytest.compat import spec_from_file_location
|
||||
from _pytest.paths import fnmatch_ex
|
||||
|
||||
# pytest caches rewritten pycs in __pycache__.
|
||||
|
|
|
@ -16,7 +16,8 @@ import json
|
|||
import shutil
|
||||
|
||||
from . import paths
|
||||
from .compat import _PY2 as PY2, Path
|
||||
from .compat import _PY2 as PY2
|
||||
from .pathlib import Path
|
||||
|
||||
README_CONTENT = u"""\
|
||||
# pytest cache directory #
|
||||
|
|
|
@ -23,8 +23,6 @@ except ImportError: # pragma: no cover
|
|||
# Only available in Python 3.4+ or as a backport
|
||||
enum = None
|
||||
|
||||
__all__ = ["Path", "PurePath"]
|
||||
|
||||
_PY3 = sys.version_info > (3, 0)
|
||||
_PY2 = not _PY3
|
||||
|
||||
|
@ -41,11 +39,6 @@ PY35 = sys.version_info[:2] >= (3, 5)
|
|||
PY36 = sys.version_info[:2] >= (3, 6)
|
||||
MODULE_NOT_FOUND_ERROR = "ModuleNotFoundError" if PY36 else "ImportError"
|
||||
|
||||
if PY36:
|
||||
from pathlib import Path, PurePath
|
||||
else:
|
||||
from pathlib2 import Path, PurePath
|
||||
|
||||
|
||||
if _PY3:
|
||||
from collections.abc import MutableMapping as MappingMixin
|
||||
|
|
|
@ -0,0 +1,183 @@
|
|||
|
||||
import os
|
||||
import errno
|
||||
import atexit
|
||||
import operator
|
||||
import six
|
||||
from functools import reduce
|
||||
import uuid
|
||||
from six.moves import map
|
||||
import itertools
|
||||
import shutil
|
||||
|
||||
from .compat import PY36
|
||||
|
||||
if PY36:
|
||||
from pathlib import Path, PurePath
|
||||
else:
|
||||
from pathlib2 import Path, PurePath
|
||||
|
||||
__all__ = ["Path", "PurePath"]
|
||||
|
||||
|
||||
LOCK_TIMEOUT = 60 * 60 * 3
|
||||
|
||||
get_lock_path = operator.methodcaller("joinpath", ".lock")
|
||||
|
||||
|
||||
def find_prefixed(root, prefix):
|
||||
l_prefix = prefix.lower()
|
||||
for x in root.iterdir():
|
||||
if x.name.lower().startswith(l_prefix):
|
||||
yield x
|
||||
|
||||
|
||||
def extract_suffixes(iter, prefix):
|
||||
p_len = len(prefix)
|
||||
for p in iter:
|
||||
yield p.name[p_len:]
|
||||
|
||||
|
||||
def find_suffixes(root, prefix):
|
||||
return extract_suffixes(find_prefixed(root, prefix), prefix)
|
||||
|
||||
|
||||
def parse_num(maybe_num):
|
||||
try:
|
||||
return int(maybe_num)
|
||||
except ValueError:
|
||||
return -1
|
||||
|
||||
|
||||
def _max(iterable, default):
|
||||
# needed due to python2.7 lacking the default argument for max
|
||||
return reduce(max, iterable, default)
|
||||
|
||||
|
||||
def make_numbered_dir(root, prefix):
|
||||
for i in range(10):
|
||||
# try up to 10 times to create the folder
|
||||
max_existing = _max(map(parse_num, find_suffixes(root, prefix)), -1)
|
||||
new_number = max_existing + 1
|
||||
new_path = root.joinpath("{}{}".format(prefix, new_number))
|
||||
try:
|
||||
new_path.mkdir()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
return new_path
|
||||
else:
|
||||
raise EnvironmentError(
|
||||
"could not create numbered dir with prefix {prefix} in {root})".format(
|
||||
prefix=prefix, root=root
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def create_cleanup_lock(p):
|
||||
lock_path = get_lock_path(p)
|
||||
try:
|
||||
fd = os.open(str(lock_path), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
six.raise_from(
|
||||
EnvironmentError("cannot create lockfile in {path}".format(path=p)), e
|
||||
)
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
pid = os.getpid()
|
||||
spid = str(pid)
|
||||
if not isinstance(spid, six.binary_type):
|
||||
spid = spid.encode("ascii")
|
||||
os.write(fd, spid)
|
||||
os.close(fd)
|
||||
if not lock_path.is_file():
|
||||
raise EnvironmentError("lock path got renamed after sucessfull creation")
|
||||
return lock_path
|
||||
|
||||
|
||||
def register_cleanup_lock_removal(lock_path, register=atexit.register):
|
||||
pid = os.getpid()
|
||||
|
||||
def cleanup_on_exit(lock_path=lock_path, original_pid=pid):
|
||||
current_pid = os.getpid()
|
||||
if current_pid != original_pid:
|
||||
# fork
|
||||
return
|
||||
try:
|
||||
lock_path.unlink()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
return register(cleanup_on_exit)
|
||||
|
||||
|
||||
def delete_a_numbered_dir(path):
|
||||
create_cleanup_lock(path)
|
||||
parent = path.parent
|
||||
|
||||
garbage = parent.joinpath("garbage-{}".format(uuid.uuid4()))
|
||||
path.rename(garbage)
|
||||
shutil.rmtree(str(garbage), ignore_errors=True)
|
||||
|
||||
|
||||
def ensure_deletable(path, consider_lock_dead_if_created_before):
|
||||
lock = get_lock_path(path)
|
||||
if not lock.exists():
|
||||
return True
|
||||
try:
|
||||
lock_time = lock.stat().st_mtime
|
||||
except Exception:
|
||||
return False
|
||||
else:
|
||||
if lock_time < consider_lock_dead_if_created_before:
|
||||
lock.unlink()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def try_cleanup(path, consider_lock_dead_if_created_before):
|
||||
if ensure_deletable(path, consider_lock_dead_if_created_before):
|
||||
delete_a_numbered_dir(path)
|
||||
|
||||
|
||||
def cleanup_candidates(root, prefix, keep):
|
||||
max_existing = _max(map(parse_num, find_suffixes(root, prefix)), -1)
|
||||
max_delete = max_existing - keep
|
||||
paths = find_prefixed(root, prefix)
|
||||
paths, paths2 = itertools.tee(paths)
|
||||
numbers = map(parse_num, extract_suffixes(paths2, prefix))
|
||||
for path, number in zip(paths, numbers):
|
||||
if number <= max_delete:
|
||||
yield path
|
||||
|
||||
|
||||
def cleanup_numbered_dir(root, prefix, keep, consider_lock_dead_if_created_before):
|
||||
for path in cleanup_candidates(root, prefix, keep):
|
||||
try_cleanup(path, consider_lock_dead_if_created_before)
|
||||
for path in root.glob("garbage-*"):
|
||||
try_cleanup(path, consider_lock_dead_if_created_before)
|
||||
|
||||
|
||||
def make_numbered_dir_with_cleanup(root, prefix, keep, lock_timeout):
|
||||
e = None
|
||||
for i in range(10):
|
||||
try:
|
||||
p = make_numbered_dir(root, prefix)
|
||||
lock_path = create_cleanup_lock(p)
|
||||
register_cleanup_lock_removal(lock_path)
|
||||
except Exception as e:
|
||||
pass
|
||||
else:
|
||||
consider_lock_dead_if_created_before = p.stat().st_mtime - lock_timeout
|
||||
cleanup_numbered_dir(
|
||||
root=root,
|
||||
prefix=prefix,
|
||||
keep=keep,
|
||||
consider_lock_dead_if_created_before=consider_lock_dead_if_created_before,
|
||||
)
|
||||
return p
|
||||
assert e is not None
|
||||
raise e
|
|
@ -5,7 +5,7 @@ import sys
|
|||
|
||||
import six
|
||||
|
||||
from .compat import Path, PurePath
|
||||
from .pathlib import Path, PurePath
|
||||
|
||||
|
||||
def resolve_from_str(input, root):
|
||||
|
|
|
@ -17,13 +17,14 @@ from weakref import WeakKeyDictionary
|
|||
|
||||
from _pytest.capture import MultiCapture, SysCapture
|
||||
from _pytest._code import Source
|
||||
import py
|
||||
import pytest
|
||||
from _pytest.main import Session, EXIT_OK
|
||||
from _pytest.assertion.rewrite import AssertionRewritingHook
|
||||
from _pytest.compat import Path
|
||||
from _pytest.pathlib import Path
|
||||
from _pytest.compat import safe_str
|
||||
|
||||
import py
|
||||
import pytest
|
||||
|
||||
IGNORE_PAM = [ # filenames added when obtaining details about the current user
|
||||
u"/var/lib/sss/mc/passwd"
|
||||
]
|
||||
|
|
|
@ -2,185 +2,13 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import re
|
||||
import os
|
||||
import errno
|
||||
import atexit
|
||||
import operator
|
||||
import six
|
||||
from functools import reduce
|
||||
import uuid
|
||||
from six.moves import map
|
||||
import pytest
|
||||
import py
|
||||
from _pytest.monkeypatch import MonkeyPatch
|
||||
from .compat import Path
|
||||
import attr
|
||||
import shutil
|
||||
import tempfile
|
||||
import itertools
|
||||
|
||||
|
||||
LOCK_TIMEOUT = 60 * 60 * 3
|
||||
|
||||
get_lock_path = operator.methodcaller("joinpath", ".lock")
|
||||
|
||||
|
||||
def find_prefixed(root, prefix):
|
||||
l_prefix = prefix.lower()
|
||||
for x in root.iterdir():
|
||||
if x.name.lower().startswith(l_prefix):
|
||||
yield x
|
||||
|
||||
|
||||
def extract_suffixes(iter, prefix):
|
||||
p_len = len(prefix)
|
||||
for p in iter:
|
||||
yield p.name[p_len:]
|
||||
|
||||
|
||||
def find_suffixes(root, prefix):
|
||||
return extract_suffixes(find_prefixed(root, prefix), prefix)
|
||||
|
||||
|
||||
def parse_num(maybe_num):
|
||||
try:
|
||||
return int(maybe_num)
|
||||
except ValueError:
|
||||
return -1
|
||||
|
||||
|
||||
def _max(iterable, default):
|
||||
# needed due to python2.7 lacking the default argument for max
|
||||
return reduce(max, iterable, default)
|
||||
|
||||
|
||||
def make_numbered_dir(root, prefix):
|
||||
for i in range(10):
|
||||
# try up to 10 times to create the folder
|
||||
max_existing = _max(map(parse_num, find_suffixes(root, prefix)), -1)
|
||||
new_number = max_existing + 1
|
||||
new_path = root.joinpath("{}{}".format(prefix, new_number))
|
||||
try:
|
||||
new_path.mkdir()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
return new_path
|
||||
else:
|
||||
raise EnvironmentError(
|
||||
"could not create numbered dir with prefix {prefix} in {root})".format(
|
||||
prefix=prefix, root=root
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def create_cleanup_lock(p):
|
||||
lock_path = get_lock_path(p)
|
||||
try:
|
||||
fd = os.open(str(lock_path), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
six.raise_from(
|
||||
EnvironmentError("cannot create lockfile in {path}".format(path=p)), e
|
||||
)
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
pid = os.getpid()
|
||||
spid = str(pid)
|
||||
if not isinstance(spid, six.binary_type):
|
||||
spid = spid.encode("ascii")
|
||||
os.write(fd, spid)
|
||||
os.close(fd)
|
||||
if not lock_path.is_file():
|
||||
raise EnvironmentError("lock path got renamed after sucessfull creation")
|
||||
return lock_path
|
||||
|
||||
|
||||
def register_cleanup_lock_removal(lock_path, register=atexit.register):
|
||||
pid = os.getpid()
|
||||
|
||||
def cleanup_on_exit(lock_path=lock_path, original_pid=pid):
|
||||
current_pid = os.getpid()
|
||||
if current_pid != original_pid:
|
||||
# fork
|
||||
return
|
||||
try:
|
||||
lock_path.unlink()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
return register(cleanup_on_exit)
|
||||
|
||||
|
||||
def delete_a_numbered_dir(path):
|
||||
create_cleanup_lock(path)
|
||||
parent = path.parent
|
||||
|
||||
garbage = parent.joinpath("garbage-{}".format(uuid.uuid4()))
|
||||
path.rename(garbage)
|
||||
shutil.rmtree(str(garbage), ignore_errors=True)
|
||||
|
||||
|
||||
def ensure_deletable(path, consider_lock_dead_if_created_before):
|
||||
lock = get_lock_path(path)
|
||||
if not lock.exists():
|
||||
return True
|
||||
try:
|
||||
lock_time = lock.stat().st_mtime
|
||||
except Exception:
|
||||
return False
|
||||
else:
|
||||
if lock_time < consider_lock_dead_if_created_before:
|
||||
lock.unlink()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def try_cleanup(path, consider_lock_dead_if_created_before):
|
||||
if ensure_deletable(path, consider_lock_dead_if_created_before):
|
||||
delete_a_numbered_dir(path)
|
||||
|
||||
|
||||
def cleanup_candidates(root, prefix, keep):
|
||||
max_existing = _max(map(parse_num, find_suffixes(root, prefix)), -1)
|
||||
max_delete = max_existing - keep
|
||||
paths = find_prefixed(root, prefix)
|
||||
paths, paths2 = itertools.tee(paths)
|
||||
numbers = map(parse_num, extract_suffixes(paths2, prefix))
|
||||
for path, number in zip(paths, numbers):
|
||||
if number <= max_delete:
|
||||
yield path
|
||||
|
||||
|
||||
def cleanup_numbered_dir(root, prefix, keep, consider_lock_dead_if_created_before):
|
||||
for path in cleanup_candidates(root, prefix, keep):
|
||||
try_cleanup(path, consider_lock_dead_if_created_before)
|
||||
for path in root.glob("garbage-*"):
|
||||
try_cleanup(path, consider_lock_dead_if_created_before)
|
||||
|
||||
|
||||
def make_numbered_dir_with_cleanup(root, prefix, keep, lock_timeout):
|
||||
e = None
|
||||
for i in range(10):
|
||||
try:
|
||||
p = make_numbered_dir(root, prefix)
|
||||
lock_path = create_cleanup_lock(p)
|
||||
register_cleanup_lock_removal(lock_path)
|
||||
except Exception as e:
|
||||
pass
|
||||
else:
|
||||
consider_lock_dead_if_created_before = p.stat().st_mtime - lock_timeout
|
||||
cleanup_numbered_dir(
|
||||
root=root,
|
||||
prefix=prefix,
|
||||
keep=keep,
|
||||
consider_lock_dead_if_created_before=consider_lock_dead_if_created_before,
|
||||
)
|
||||
return p
|
||||
assert e is not None
|
||||
raise e
|
||||
from .pathlib import Path, make_numbered_dir, make_numbered_dir_with_cleanup
|
||||
|
||||
|
||||
@attr.s
|
||||
|
|
|
@ -192,7 +192,7 @@ class TestNumberedDir(object):
|
|||
PREFIX = "fun-"
|
||||
|
||||
def test_make(self, tmp_path):
|
||||
from _pytest.tmpdir import make_numbered_dir
|
||||
from _pytest.pathlib import make_numbered_dir
|
||||
|
||||
for i in range(10):
|
||||
d = make_numbered_dir(root=tmp_path, prefix=self.PREFIX)
|
||||
|
@ -202,7 +202,7 @@ class TestNumberedDir(object):
|
|||
def test_cleanup_lock_create(self, tmp_path):
|
||||
d = tmp_path.joinpath("test")
|
||||
d.mkdir()
|
||||
from _pytest.tmpdir import create_cleanup_lock
|
||||
from _pytest.pathlib import create_cleanup_lock
|
||||
|
||||
lockfile = create_cleanup_lock(d)
|
||||
with pytest.raises(EnvironmentError, match="cannot create lockfile in .*"):
|
||||
|
@ -211,7 +211,7 @@ class TestNumberedDir(object):
|
|||
lockfile.unlink()
|
||||
|
||||
def test_lock_register_cleanup_removal(self, tmp_path):
|
||||
from _pytest.tmpdir import create_cleanup_lock, register_cleanup_lock_removal
|
||||
from _pytest.pathlib import create_cleanup_lock, register_cleanup_lock_removal
|
||||
|
||||
lock = create_cleanup_lock(tmp_path)
|
||||
|
||||
|
@ -236,7 +236,7 @@ class TestNumberedDir(object):
|
|||
|
||||
def test_cleanup_keep(self, tmp_path):
|
||||
self.test_make(tmp_path)
|
||||
from _pytest.tmpdir import cleanup_numbered_dir
|
||||
from _pytest.pathlib import cleanup_numbered_dir
|
||||
|
||||
cleanup_numbered_dir(
|
||||
root=tmp_path,
|
||||
|
@ -249,15 +249,15 @@ class TestNumberedDir(object):
|
|||
|
||||
def test_cleanup_locked(self, tmp_path):
|
||||
|
||||
from _pytest import tmpdir
|
||||
from _pytest import pathlib
|
||||
|
||||
p = tmpdir.make_numbered_dir(root=tmp_path, prefix=self.PREFIX)
|
||||
p = pathlib.make_numbered_dir(root=tmp_path, prefix=self.PREFIX)
|
||||
|
||||
tmpdir.create_cleanup_lock(p)
|
||||
pathlib.create_cleanup_lock(p)
|
||||
|
||||
assert not tmpdir.ensure_deletable(
|
||||
assert not pathlib.ensure_deletable(
|
||||
p, consider_lock_dead_if_created_before=p.stat().st_mtime - 1
|
||||
)
|
||||
assert tmpdir.ensure_deletable(
|
||||
assert pathlib.ensure_deletable(
|
||||
p, consider_lock_dead_if_created_before=p.stat().st_mtime + 1
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue