Merge master into features
This commit is contained in:
commit
6765aca0d1
|
@ -20,9 +20,6 @@ jobs:
|
||||||
include:
|
include:
|
||||||
# OSX tests - first (in test stage), since they are the slower ones.
|
# OSX tests - first (in test stage), since they are the slower ones.
|
||||||
- &test-macos
|
- &test-macos
|
||||||
# NOTE: (tests with) pexpect appear to be buggy on Travis,
|
|
||||||
# at least with coverage.
|
|
||||||
# Log: https://travis-ci.org/pytest-dev/pytest/jobs/500358864
|
|
||||||
os: osx
|
os: osx
|
||||||
osx_image: xcode10.1
|
osx_image: xcode10.1
|
||||||
language: generic
|
language: generic
|
||||||
|
@ -33,7 +30,7 @@ jobs:
|
||||||
- python -V
|
- python -V
|
||||||
- test $(python -c 'import sys; print("%d%d" % sys.version_info[0:2])') = 27
|
- test $(python -c 'import sys; print("%d%d" % sys.version_info[0:2])') = 27
|
||||||
- <<: *test-macos
|
- <<: *test-macos
|
||||||
env: TOXENV=py37-xdist
|
env: TOXENV=py37-pexpect,py37-xdist PYTEST_COVERAGE=1
|
||||||
before_install:
|
before_install:
|
||||||
- which python3
|
- which python3
|
||||||
- python3 -V
|
- python3 -V
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
Fix regression with ``--lf`` not re-running all tests with known failures from non-selected tests.
|
|
@ -164,10 +164,12 @@ class LFPlugin(object):
|
||||||
def last_failed_paths(self):
|
def last_failed_paths(self):
|
||||||
"""Returns a set with all Paths()s of the previously failed nodeids (cached).
|
"""Returns a set with all Paths()s of the previously failed nodeids (cached).
|
||||||
"""
|
"""
|
||||||
result = getattr(self, "_last_failed_paths", None)
|
try:
|
||||||
if result is None:
|
return self._last_failed_paths
|
||||||
|
except AttributeError:
|
||||||
rootpath = Path(self.config.rootdir)
|
rootpath = Path(self.config.rootdir)
|
||||||
result = {rootpath / nodeid.split("::")[0] for nodeid in self.lastfailed}
|
result = {rootpath / nodeid.split("::")[0] for nodeid in self.lastfailed}
|
||||||
|
result = {x for x in result if x.exists()}
|
||||||
self._last_failed_paths = result
|
self._last_failed_paths = result
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -176,12 +178,9 @@ class LFPlugin(object):
|
||||||
Ignore this file path if we are in --lf mode and it is not in the list of
|
Ignore this file path if we are in --lf mode and it is not in the list of
|
||||||
previously failed files.
|
previously failed files.
|
||||||
"""
|
"""
|
||||||
if (
|
if self.active and self.config.getoption("lf") and path.isfile():
|
||||||
self.active
|
last_failed_paths = self.last_failed_paths()
|
||||||
and self.config.getoption("lf")
|
if last_failed_paths:
|
||||||
and path.isfile()
|
|
||||||
and self.lastfailed
|
|
||||||
):
|
|
||||||
skip_it = Path(path) not in self.last_failed_paths()
|
skip_it = Path(path) not in self.last_failed_paths()
|
||||||
if skip_it:
|
if skip_it:
|
||||||
self._skipped_files += 1
|
self._skipped_files += 1
|
||||||
|
@ -234,20 +233,16 @@ class LFPlugin(object):
|
||||||
items[:] = previously_failed + previously_passed
|
items[:] = previously_failed + previously_passed
|
||||||
|
|
||||||
noun = "failure" if self._previously_failed_count == 1 else "failures"
|
noun = "failure" if self._previously_failed_count == 1 else "failures"
|
||||||
|
suffix = " first" if self.config.getoption("failedfirst") else ""
|
||||||
|
self._report_status = "rerun previous {count} {noun}{suffix}".format(
|
||||||
|
count=self._previously_failed_count, suffix=suffix, noun=noun
|
||||||
|
)
|
||||||
|
|
||||||
if self._skipped_files > 0:
|
if self._skipped_files > 0:
|
||||||
files_noun = "file" if self._skipped_files == 1 else "files"
|
files_noun = "file" if self._skipped_files == 1 else "files"
|
||||||
skipped_files_msg = " (skipped {files} {files_noun})".format(
|
self._report_status += " (skipped {files} {files_noun})".format(
|
||||||
files=self._skipped_files, files_noun=files_noun
|
files=self._skipped_files, files_noun=files_noun
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
skipped_files_msg = ""
|
|
||||||
suffix = " first" if self.config.getoption("failedfirst") else ""
|
|
||||||
self._report_status = "rerun previous {count} {noun}{suffix}{skipped_files}".format(
|
|
||||||
count=self._previously_failed_count,
|
|
||||||
suffix=suffix,
|
|
||||||
noun=noun,
|
|
||||||
skipped_files=skipped_files_msg,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
self._report_status = "no previously failed tests, "
|
self._report_status = "no previously failed tests, "
|
||||||
if self.config.getoption("last_failed_no_failures") == "none":
|
if self.config.getoption("last_failed_no_failures") == "none":
|
||||||
|
|
|
@ -832,6 +832,48 @@ class TestLastFailed(object):
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_lastfailed_with_known_failures_not_being_selected(self, testdir):
|
||||||
|
testdir.makepyfile(
|
||||||
|
**{
|
||||||
|
"pkg1/test_1.py": """def test_1(): assert 0""",
|
||||||
|
"pkg1/test_2.py": """def test_2(): pass""",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.stdout.fnmatch_lines(["collected 2 items", "* 1 failed, 1 passed in *"])
|
||||||
|
|
||||||
|
py.path.local("pkg1/test_1.py").remove()
|
||||||
|
result = testdir.runpytest("--lf")
|
||||||
|
result.stdout.fnmatch_lines(
|
||||||
|
[
|
||||||
|
"collected 1 item",
|
||||||
|
"run-last-failure: 1 known failures not in selected tests",
|
||||||
|
"* 1 passed in *",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Recreate file with known failure.
|
||||||
|
testdir.makepyfile(**{"pkg1/test_1.py": """def test_1(): assert 0"""})
|
||||||
|
result = testdir.runpytest("--lf")
|
||||||
|
result.stdout.fnmatch_lines(
|
||||||
|
[
|
||||||
|
"collected 1 item",
|
||||||
|
"run-last-failure: rerun previous 1 failure (skipped 1 file)",
|
||||||
|
"* 1 failed in *",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Remove/rename test.
|
||||||
|
testdir.makepyfile(**{"pkg1/test_1.py": """def test_renamed(): assert 0"""})
|
||||||
|
result = testdir.runpytest("--lf")
|
||||||
|
result.stdout.fnmatch_lines(
|
||||||
|
[
|
||||||
|
"collected 1 item",
|
||||||
|
"run-last-failure: 1 known failures not in selected tests (skipped 1 file)",
|
||||||
|
"* 1 failed in *",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestNewFirst(object):
|
class TestNewFirst(object):
|
||||||
def test_newfirst_usecase(self, testdir):
|
def test_newfirst_usecase(self, testdir):
|
||||||
|
|
|
@ -4,7 +4,6 @@ from __future__ import division
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import platform
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
@ -153,10 +152,11 @@ class TestPDB(object):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def flush(child):
|
def flush(child):
|
||||||
if platform.system() == "Darwin":
|
|
||||||
return
|
|
||||||
if child.isalive():
|
if child.isalive():
|
||||||
|
# Read if the test has not (e.g. test_pdb_unittest_skip).
|
||||||
|
child.read()
|
||||||
child.wait()
|
child.wait()
|
||||||
|
assert not child.isalive()
|
||||||
|
|
||||||
def test_pdb_unittest_postmortem(self, testdir):
|
def test_pdb_unittest_postmortem(self, testdir):
|
||||||
p1 = testdir.makepyfile(
|
p1 = testdir.makepyfile(
|
||||||
|
@ -797,7 +797,6 @@ class TestPDB(object):
|
||||||
rest = child.read().decode("utf8")
|
rest = child.read().decode("utf8")
|
||||||
assert "leave_pdb_hook" in rest
|
assert "leave_pdb_hook" in rest
|
||||||
assert "1 failed" in rest
|
assert "1 failed" in rest
|
||||||
child.sendeof()
|
|
||||||
self.flush(child)
|
self.flush(child)
|
||||||
|
|
||||||
def test_pdb_custom_cls(self, testdir, custom_pdb_calls):
|
def test_pdb_custom_cls(self, testdir, custom_pdb_calls):
|
||||||
|
|
2
tox.ini
2
tox.ini
|
@ -40,7 +40,7 @@ setenv =
|
||||||
lsof: _PYTEST_TOX_POSARGS_LSOF=--lsof
|
lsof: _PYTEST_TOX_POSARGS_LSOF=--lsof
|
||||||
|
|
||||||
pexpect: _PYTEST_TOX_PLATFORM=linux|darwin
|
pexpect: _PYTEST_TOX_PLATFORM=linux|darwin
|
||||||
pexpect: _PYTEST_TOX_POSARGS_PEXPECT=testing/test_pdb.py testing/test_terminal.py testing/test_unittest.py
|
pexpect: _PYTEST_TOX_POSARGS_PEXPECT=-m uses_pexpect
|
||||||
|
|
||||||
twisted: _PYTEST_TOX_POSARGS_TWISTED=testing/test_unittest.py
|
twisted: _PYTEST_TOX_POSARGS_TWISTED=testing/test_unittest.py
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue