Fix flaky TestDurations test
TestDurations tests the `--durations=N` functionality which reports N slowest tests, with durations <= 0.005s not shown by default. The test relies on real time.sleep() (in addition to the code which uses time.perf_counter()) which makes it flaky and inconsistent between platforms. Instead of trying to tweak it more, make it use fake time instead. The way it is done is a little hacky but seems to work.
This commit is contained in:
parent
cb06bc7d6e
commit
c3e6e2e8c8
|
@ -2,8 +2,8 @@
|
||||||
import bdb
|
import bdb
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from time import perf_counter
|
from time import perf_counter # Intentionally not `import time` to avoid being
|
||||||
from time import time
|
from time import time # affected by tests which monkeypatch `time` (issue #185).
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
|
@ -6,9 +6,11 @@ import types
|
||||||
import attr
|
import attr
|
||||||
import py
|
import py
|
||||||
|
|
||||||
|
import _pytest.runner
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest.compat import importlib_metadata
|
from _pytest.compat import importlib_metadata
|
||||||
from _pytest.config import ExitCode
|
from _pytest.config import ExitCode
|
||||||
|
from _pytest.monkeypatch import MonkeyPatch
|
||||||
|
|
||||||
|
|
||||||
def prepend_pythonpath(*dirs):
|
def prepend_pythonpath(*dirs):
|
||||||
|
@ -893,61 +895,64 @@ class TestInvocationVariants:
|
||||||
assert request.config.pluginmanager.hasplugin("python")
|
assert request.config.pluginmanager.hasplugin("python")
|
||||||
|
|
||||||
|
|
||||||
|
def fake_time(monkeypatch: MonkeyPatch) -> None:
|
||||||
|
"""Monkeypatch time functions to make TestDurations not rely on actual time."""
|
||||||
|
import time
|
||||||
|
|
||||||
|
current_time = 1586202699.9859412
|
||||||
|
|
||||||
|
def sleep(seconds: float) -> None:
|
||||||
|
nonlocal current_time
|
||||||
|
current_time += seconds
|
||||||
|
|
||||||
|
monkeypatch.setattr(time, "sleep", sleep)
|
||||||
|
monkeypatch.setattr(_pytest.runner, "time", lambda: current_time)
|
||||||
|
monkeypatch.setattr(_pytest.runner, "perf_counter", lambda: current_time)
|
||||||
|
|
||||||
|
|
||||||
class TestDurations:
|
class TestDurations:
|
||||||
source = """
|
source = """
|
||||||
import time
|
import time
|
||||||
frag = 0.002 # 2 ms
|
|
||||||
def test_something():
|
def test_something():
|
||||||
pass
|
pass
|
||||||
def test_2():
|
def test_2():
|
||||||
time.sleep(frag*5) # 10 ms: on windows might sleep < 0.005s
|
time.sleep(0.010)
|
||||||
def test_1():
|
def test_1():
|
||||||
time.sleep(frag) # 2 ms: on macOS/windows might sleep > 0.005s
|
time.sleep(0.002)
|
||||||
def test_3():
|
def test_3():
|
||||||
time.sleep(frag*10) # 20 ms
|
time.sleep(0.020)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def test_calls(self, testdir):
|
def test_calls(self, testdir):
|
||||||
testdir.makepyfile(self.source)
|
testdir.makepyfile(self.source)
|
||||||
result = testdir.runpytest("--durations=10")
|
fake_time(testdir.monkeypatch)
|
||||||
|
result = testdir.runpytest_inprocess("--durations=10")
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
# on Windows, test 2 (10ms) can actually sleep less than 5ms and become hidden
|
result.stdout.fnmatch_lines_random(
|
||||||
if sys.platform == "win32":
|
["*durations*", "*call*test_3*", "*call*test_2*"]
|
||||||
to_match = ["*durations*", "*call*test_3*"]
|
)
|
||||||
else:
|
|
||||||
to_match = ["*durations*", "*call*test_3*", "*call*test_2*"]
|
|
||||||
result.stdout.fnmatch_lines_random(to_match)
|
|
||||||
|
|
||||||
# The number of hidden should be 8, but on macOS and windows it sometimes is 7
|
|
||||||
# - on MacOS and Windows test 1 can last longer and appear in the list
|
|
||||||
# - on Windows test 2 can last less and disappear from the list
|
|
||||||
if sys.platform in ("win32", "darwin"):
|
|
||||||
nb_hidden = "*"
|
|
||||||
else:
|
|
||||||
nb_hidden = "8"
|
|
||||||
|
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
[
|
["(8 durations < 0.005s hidden. Use -vv to show these durations.)"]
|
||||||
"(%s durations < 0.005s hidden. Use -vv to show these durations.)"
|
|
||||||
% nb_hidden
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_calls_show_2(self, testdir):
|
def test_calls_show_2(self, testdir):
|
||||||
testdir.makepyfile(self.source)
|
testdir.makepyfile(self.source)
|
||||||
result = testdir.runpytest("--durations=2")
|
fake_time(testdir.monkeypatch)
|
||||||
|
result = testdir.runpytest_inprocess("--durations=2")
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
lines = result.stdout.get_lines_after("*slowest*durations*")
|
lines = result.stdout.get_lines_after("*slowest*durations*")
|
||||||
assert "4 passed" in lines[2]
|
assert "4 passed" in lines[2]
|
||||||
|
|
||||||
def test_calls_showall(self, testdir):
|
def test_calls_showall(self, testdir):
|
||||||
testdir.makepyfile(self.source)
|
testdir.makepyfile(self.source)
|
||||||
result = testdir.runpytest("--durations=0")
|
fake_time(testdir.monkeypatch)
|
||||||
|
result = testdir.runpytest_inprocess("--durations=0")
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
# on windows, test 2 (10ms) can actually sleep less than 5ms and become hidden
|
tested = "3"
|
||||||
tested = "3" if sys.platform == "win32" else "23"
|
|
||||||
for x in tested:
|
for x in tested:
|
||||||
for y in ("call",): # 'setup', 'call', 'teardown':
|
for y in ("call",): # 'setup', 'call', 'teardown':
|
||||||
for line in result.stdout.lines:
|
for line in result.stdout.lines:
|
||||||
|
@ -958,8 +963,10 @@ class TestDurations:
|
||||||
|
|
||||||
def test_calls_showall_verbose(self, testdir):
|
def test_calls_showall_verbose(self, testdir):
|
||||||
testdir.makepyfile(self.source)
|
testdir.makepyfile(self.source)
|
||||||
result = testdir.runpytest("--durations=0", "-vv")
|
fake_time(testdir.monkeypatch)
|
||||||
|
result = testdir.runpytest_inprocess("--durations=0", "-vv")
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
for x in "123":
|
for x in "123":
|
||||||
for y in ("call",): # 'setup', 'call', 'teardown':
|
for y in ("call",): # 'setup', 'call', 'teardown':
|
||||||
for line in result.stdout.lines:
|
for line in result.stdout.lines:
|
||||||
|
@ -970,16 +977,19 @@ class TestDurations:
|
||||||
|
|
||||||
def test_with_deselected(self, testdir):
|
def test_with_deselected(self, testdir):
|
||||||
testdir.makepyfile(self.source)
|
testdir.makepyfile(self.source)
|
||||||
# on windows test 2 might sleep less than 0.005s and be hidden. Prefer test 3.
|
fake_time(testdir.monkeypatch)
|
||||||
result = testdir.runpytest("--durations=2", "-k test_3")
|
result = testdir.runpytest_inprocess("--durations=2", "-k test_3")
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
result.stdout.fnmatch_lines(["*durations*", "*call*test_3*"])
|
result.stdout.fnmatch_lines(["*durations*", "*call*test_3*"])
|
||||||
|
|
||||||
def test_with_failing_collection(self, testdir):
|
def test_with_failing_collection(self, testdir):
|
||||||
testdir.makepyfile(self.source)
|
testdir.makepyfile(self.source)
|
||||||
|
fake_time(testdir.monkeypatch)
|
||||||
testdir.makepyfile(test_collecterror="""xyz""")
|
testdir.makepyfile(test_collecterror="""xyz""")
|
||||||
result = testdir.runpytest("--durations=2", "-k test_1")
|
result = testdir.runpytest_inprocess("--durations=2", "-k test_1")
|
||||||
assert result.ret == 2
|
assert result.ret == 2
|
||||||
|
|
||||||
result.stdout.fnmatch_lines(["*Interrupted: 1 error during collection*"])
|
result.stdout.fnmatch_lines(["*Interrupted: 1 error during collection*"])
|
||||||
# Collection errors abort test execution, therefore no duration is
|
# Collection errors abort test execution, therefore no duration is
|
||||||
# output
|
# output
|
||||||
|
@ -987,27 +997,28 @@ class TestDurations:
|
||||||
|
|
||||||
def test_with_not(self, testdir):
|
def test_with_not(self, testdir):
|
||||||
testdir.makepyfile(self.source)
|
testdir.makepyfile(self.source)
|
||||||
result = testdir.runpytest("-k not 1")
|
fake_time(testdir.monkeypatch)
|
||||||
|
result = testdir.runpytest_inprocess("-k not 1")
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
|
|
||||||
class TestDurationWithFixture:
|
class TestDurationsWithFixture:
|
||||||
source = """
|
source = """
|
||||||
import pytest
|
import pytest
|
||||||
import time
|
import time
|
||||||
frag = 0.02 # as on windows sleep(0.01) might take < 0.005s
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def setup_fixt():
|
def setup_fixt():
|
||||||
time.sleep(frag)
|
time.sleep(0.02)
|
||||||
|
|
||||||
def test_1(setup_fixt):
|
def test_1(setup_fixt):
|
||||||
time.sleep(frag)
|
time.sleep(0.02)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def test_setup_function(self, testdir):
|
def test_setup_function(self, testdir):
|
||||||
testdir.makepyfile(self.source)
|
testdir.makepyfile(self.source)
|
||||||
result = testdir.runpytest("--durations=10")
|
fake_time(testdir.monkeypatch)
|
||||||
|
result = testdir.runpytest_inprocess("--durations=10")
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
result.stdout.fnmatch_lines_random(
|
result.stdout.fnmatch_lines_random(
|
||||||
|
|
Loading…
Reference in New Issue