Introduce _pytest.timing as a way to control timing during tests
_pytest.timing is an indirection to 'time' functions, which pytest production code should use instead of 'time' directly. 'mock_timing' is a new fixture which then mocks those functions, allowing us to write time-reliable tests which run instantly and are not flaky. This was triggered by recent flaky junitxml tests on Windows related to timing issues.
This commit is contained in:
parent
c98bc4cd3d
commit
1780924b27
|
@ -13,7 +13,6 @@ import os
|
|||
import platform
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
import py
|
||||
|
@ -21,6 +20,7 @@ import py
|
|||
import pytest
|
||||
from _pytest import deprecated
|
||||
from _pytest import nodes
|
||||
from _pytest import timing
|
||||
from _pytest.config import filename_arg
|
||||
from _pytest.store import StoreKey
|
||||
from _pytest.warnings import _issue_warning_captured
|
||||
|
@ -627,14 +627,14 @@ class LogXML:
|
|||
reporter._add_simple(Junit.error, "internal error", excrepr)
|
||||
|
||||
def pytest_sessionstart(self):
|
||||
self.suite_start_time = time.time()
|
||||
self.suite_start_time = timing.time()
|
||||
|
||||
def pytest_sessionfinish(self):
|
||||
dirname = os.path.dirname(os.path.abspath(self.logfile))
|
||||
if not os.path.isdir(dirname):
|
||||
os.makedirs(dirname)
|
||||
logfile = open(self.logfile, "w", encoding="utf-8")
|
||||
suite_stop_time = time.time()
|
||||
suite_stop_time = timing.time()
|
||||
suite_time_delta = suite_stop_time - self.suite_start_time
|
||||
|
||||
numtests = (
|
||||
|
@ -662,7 +662,7 @@ class LogXML:
|
|||
logfile.close()
|
||||
|
||||
def pytest_terminal_summary(self, terminalreporter):
|
||||
terminalreporter.write_sep("-", "generated xml file: %s" % (self.logfile))
|
||||
terminalreporter.write_sep("-", "generated xml file: {}".format(self.logfile))
|
||||
|
||||
def add_global_property(self, name, value):
|
||||
__tracebackhide__ = True
|
||||
|
|
|
@ -7,7 +7,6 @@ import platform
|
|||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from fnmatch import fnmatch
|
||||
from io import StringIO
|
||||
|
@ -24,6 +23,7 @@ from weakref import WeakKeyDictionary
|
|||
import py
|
||||
|
||||
import pytest
|
||||
from _pytest import timing
|
||||
from _pytest._code import Source
|
||||
from _pytest.capture import MultiCapture
|
||||
from _pytest.capture import SysCapture
|
||||
|
@ -941,7 +941,7 @@ class Testdir:
|
|||
|
||||
if syspathinsert:
|
||||
self.syspathinsert()
|
||||
now = time.time()
|
||||
now = timing.time()
|
||||
capture = MultiCapture(Capture=SysCapture)
|
||||
capture.start_capturing()
|
||||
try:
|
||||
|
@ -970,7 +970,7 @@ class Testdir:
|
|||
sys.stderr.write(err)
|
||||
|
||||
res = RunResult(
|
||||
reprec.ret, out.splitlines(), err.splitlines(), time.time() - now
|
||||
reprec.ret, out.splitlines(), err.splitlines(), timing.time() - now
|
||||
)
|
||||
res.reprec = reprec # type: ignore
|
||||
return res
|
||||
|
@ -1171,7 +1171,7 @@ class Testdir:
|
|||
f1 = open(str(p1), "w", encoding="utf8")
|
||||
f2 = open(str(p2), "w", encoding="utf8")
|
||||
try:
|
||||
now = time.time()
|
||||
now = timing.time()
|
||||
popen = self.popen(
|
||||
cmdargs,
|
||||
stdin=stdin,
|
||||
|
@ -1218,7 +1218,7 @@ class Testdir:
|
|||
ret = ExitCode(ret)
|
||||
except ValueError:
|
||||
pass
|
||||
return RunResult(ret, out, err, time.time() - now)
|
||||
return RunResult(ret, out, err, timing.time() - now)
|
||||
|
||||
def _dump_lines(self, lines, fp):
|
||||
try:
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
import bdb
|
||||
import os
|
||||
import sys
|
||||
from time import perf_counter # Intentionally not `import time` to avoid being
|
||||
from time import time # affected by tests which monkeypatch `time` (issue #185).
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
|
@ -15,6 +13,7 @@ import attr
|
|||
from .reports import CollectErrorRepr
|
||||
from .reports import CollectReport
|
||||
from .reports import TestReport
|
||||
from _pytest import timing
|
||||
from _pytest._code.code import ExceptionChainRepr
|
||||
from _pytest._code.code import ExceptionInfo
|
||||
from _pytest.compat import TYPE_CHECKING
|
||||
|
@ -254,8 +253,8 @@ class CallInfo:
|
|||
#: context of invocation: one of "setup", "call",
|
||||
#: "teardown", "memocollect"
|
||||
excinfo = None
|
||||
start = time()
|
||||
precise_start = perf_counter()
|
||||
start = timing.time()
|
||||
precise_start = timing.perf_counter()
|
||||
try:
|
||||
result = func()
|
||||
except BaseException:
|
||||
|
@ -264,9 +263,9 @@ class CallInfo:
|
|||
raise
|
||||
result = None
|
||||
# use the perf counter
|
||||
precise_stop = perf_counter()
|
||||
precise_stop = timing.perf_counter()
|
||||
duration = precise_stop - precise_start
|
||||
stop = time()
|
||||
stop = timing.time()
|
||||
return cls(
|
||||
start=start,
|
||||
stop=stop,
|
||||
|
|
|
@ -7,7 +7,6 @@ import datetime
|
|||
import inspect
|
||||
import platform
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
@ -26,6 +25,7 @@ from more_itertools import collapse
|
|||
|
||||
import pytest
|
||||
from _pytest import nodes
|
||||
from _pytest import timing
|
||||
from _pytest._io import TerminalWriter
|
||||
from _pytest.compat import order_preserving_dict
|
||||
from _pytest.config import Config
|
||||
|
@ -557,7 +557,7 @@ class TerminalReporter:
|
|||
if self.isatty:
|
||||
if self.config.option.verbose >= 0:
|
||||
self.write("collecting ... ", flush=True, bold=True)
|
||||
self._collect_report_last_write = time.time()
|
||||
self._collect_report_last_write = timing.time()
|
||||
elif self.config.option.verbose >= 1:
|
||||
self.write("collecting ... ", flush=True, bold=True)
|
||||
|
||||
|
@ -577,7 +577,7 @@ class TerminalReporter:
|
|||
|
||||
if not final:
|
||||
# Only write "collecting" report every 0.5s.
|
||||
t = time.time()
|
||||
t = timing.time()
|
||||
if (
|
||||
self._collect_report_last_write is not None
|
||||
and self._collect_report_last_write > t - REPORT_COLLECTING_RESOLUTION
|
||||
|
@ -614,7 +614,7 @@ class TerminalReporter:
|
|||
@pytest.hookimpl(trylast=True)
|
||||
def pytest_sessionstart(self, session: Session) -> None:
|
||||
self._session = session
|
||||
self._sessionstarttime = time.time()
|
||||
self._sessionstarttime = timing.time()
|
||||
if not self.showheader:
|
||||
return
|
||||
self.write_sep("=", "test session starts", bold=True)
|
||||
|
@ -969,7 +969,7 @@ class TerminalReporter:
|
|||
if self.verbosity < -1:
|
||||
return
|
||||
|
||||
session_duration = time.time() - self._sessionstarttime
|
||||
session_duration = timing.time() - self._sessionstarttime
|
||||
(parts, main_color) = self.build_summary_stats_line()
|
||||
line_parts = []
|
||||
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
"""
|
||||
Indirection for time functions.
|
||||
|
||||
We intentionally grab some "time" functions internally to avoid tests mocking "time" to affect
|
||||
pytest runtime information (issue #185).
|
||||
|
||||
Fixture "mock_timinig" also interacts with this module for pytest's own tests.
|
||||
"""
|
||||
from time import perf_counter
|
||||
from time import sleep
|
||||
from time import time
|
||||
|
||||
__all__ = ["perf_counter", "sleep", "time"]
|
|
@ -6,11 +6,9 @@ import types
|
|||
import attr
|
||||
import py
|
||||
|
||||
import _pytest.runner
|
||||
import pytest
|
||||
from _pytest.compat import importlib_metadata
|
||||
from _pytest.config import ExitCode
|
||||
from _pytest.monkeypatch import MonkeyPatch
|
||||
from _pytest.pytester import Testdir
|
||||
|
||||
|
||||
|
@ -896,37 +894,21 @@ class TestInvocationVariants:
|
|||
assert request.config.pluginmanager.hasplugin("python")
|
||||
|
||||
|
||||
def fake_time(monkeypatch: MonkeyPatch) -> None:
|
||||
"""Monkeypatch time functions to make TestDurations not rely on actual time."""
|
||||
import time
|
||||
|
||||
current_time = 1586202699.9859412
|
||||
|
||||
def sleep(seconds: float) -> None:
|
||||
nonlocal current_time
|
||||
current_time += seconds
|
||||
|
||||
monkeypatch.setattr(time, "sleep", sleep)
|
||||
monkeypatch.setattr(_pytest.runner, "time", lambda: current_time)
|
||||
monkeypatch.setattr(_pytest.runner, "perf_counter", lambda: current_time)
|
||||
|
||||
|
||||
class TestDurations:
|
||||
source = """
|
||||
import time
|
||||
from _pytest import timing
|
||||
def test_something():
|
||||
pass
|
||||
def test_2():
|
||||
time.sleep(0.010)
|
||||
timing.sleep(0.010)
|
||||
def test_1():
|
||||
time.sleep(0.002)
|
||||
timing.sleep(0.002)
|
||||
def test_3():
|
||||
time.sleep(0.020)
|
||||
timing.sleep(0.020)
|
||||
"""
|
||||
|
||||
def test_calls(self, testdir):
|
||||
def test_calls(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
fake_time(testdir.monkeypatch)
|
||||
result = testdir.runpytest_inprocess("--durations=10")
|
||||
assert result.ret == 0
|
||||
|
||||
|
@ -938,18 +920,17 @@ class TestDurations:
|
|||
["(8 durations < 0.005s hidden. Use -vv to show these durations.)"]
|
||||
)
|
||||
|
||||
def test_calls_show_2(self, testdir):
|
||||
def test_calls_show_2(self, testdir, mock_timing):
|
||||
|
||||
testdir.makepyfile(self.source)
|
||||
fake_time(testdir.monkeypatch)
|
||||
result = testdir.runpytest_inprocess("--durations=2")
|
||||
assert result.ret == 0
|
||||
|
||||
lines = result.stdout.get_lines_after("*slowest*durations*")
|
||||
assert "4 passed" in lines[2]
|
||||
|
||||
def test_calls_showall(self, testdir):
|
||||
def test_calls_showall(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
fake_time(testdir.monkeypatch)
|
||||
result = testdir.runpytest_inprocess("--durations=0")
|
||||
assert result.ret == 0
|
||||
|
||||
|
@ -962,9 +943,8 @@ class TestDurations:
|
|||
else:
|
||||
raise AssertionError("not found {} {}".format(x, y))
|
||||
|
||||
def test_calls_showall_verbose(self, testdir):
|
||||
def test_calls_showall_verbose(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
fake_time(testdir.monkeypatch)
|
||||
result = testdir.runpytest_inprocess("--durations=0", "-vv")
|
||||
assert result.ret == 0
|
||||
|
||||
|
@ -976,17 +956,15 @@ class TestDurations:
|
|||
else:
|
||||
raise AssertionError("not found {} {}".format(x, y))
|
||||
|
||||
def test_with_deselected(self, testdir):
|
||||
def test_with_deselected(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
fake_time(testdir.monkeypatch)
|
||||
result = testdir.runpytest_inprocess("--durations=2", "-k test_3")
|
||||
assert result.ret == 0
|
||||
|
||||
result.stdout.fnmatch_lines(["*durations*", "*call*test_3*"])
|
||||
|
||||
def test_with_failing_collection(self, testdir):
|
||||
def test_with_failing_collection(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
fake_time(testdir.monkeypatch)
|
||||
testdir.makepyfile(test_collecterror="""xyz""")
|
||||
result = testdir.runpytest_inprocess("--durations=2", "-k test_1")
|
||||
assert result.ret == 2
|
||||
|
@ -996,9 +974,8 @@ class TestDurations:
|
|||
# output
|
||||
result.stdout.no_fnmatch_line("*duration*")
|
||||
|
||||
def test_with_not(self, testdir):
|
||||
def test_with_not(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
fake_time(testdir.monkeypatch)
|
||||
result = testdir.runpytest_inprocess("-k not 1")
|
||||
assert result.ret == 0
|
||||
|
||||
|
@ -1006,27 +983,26 @@ class TestDurations:
|
|||
class TestDurationsWithFixture:
|
||||
source = """
|
||||
import pytest
|
||||
import time
|
||||
from _pytest import timing
|
||||
|
||||
@pytest.fixture
|
||||
def setup_fixt():
|
||||
time.sleep(0.02)
|
||||
timing.sleep(2)
|
||||
|
||||
def test_1(setup_fixt):
|
||||
time.sleep(0.02)
|
||||
timing.sleep(5)
|
||||
"""
|
||||
|
||||
def test_setup_function(self, testdir):
|
||||
def test_setup_function(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
fake_time(testdir.monkeypatch)
|
||||
result = testdir.runpytest_inprocess("--durations=10")
|
||||
assert result.ret == 0
|
||||
|
||||
result.stdout.fnmatch_lines_random(
|
||||
"""
|
||||
*durations*
|
||||
* setup *test_1*
|
||||
* call *test_1*
|
||||
5.00s call *test_1*
|
||||
2.00s setup *test_1*
|
||||
"""
|
||||
)
|
||||
|
||||
|
|
|
@ -197,3 +197,41 @@ def color_mapping():
|
|||
)
|
||||
|
||||
return ColorMapping
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_timing(monkeypatch):
|
||||
"""Mocks _pytest.timing with a known object that can be used to control timing in tests
|
||||
deterministically.
|
||||
|
||||
pytest itself should always use functions from `_pytest.timing` instead of `time` directly.
|
||||
|
||||
This then allows us more control over time during testing, if testing code also
|
||||
uses `_pytest.timing` functions.
|
||||
|
||||
Time is static, and only advances through `sleep` calls, thus tests might sleep over large
|
||||
numbers and obtain accurate time() calls at the end, making tests reliable and instant.
|
||||
"""
|
||||
import attr
|
||||
|
||||
@attr.s
|
||||
class MockTiming:
|
||||
|
||||
_current_time = attr.ib(default=1590150050.0)
|
||||
|
||||
def sleep(self, seconds):
|
||||
self._current_time += seconds
|
||||
|
||||
def time(self):
|
||||
return self._current_time
|
||||
|
||||
def patch(self):
|
||||
from _pytest import timing
|
||||
|
||||
monkeypatch.setattr(timing, "sleep", self.sleep)
|
||||
monkeypatch.setattr(timing, "time", self.time)
|
||||
monkeypatch.setattr(timing, "perf_counter", self.time)
|
||||
|
||||
result = MockTiming()
|
||||
result.patch()
|
||||
return result
|
||||
|
|
|
@ -201,23 +201,23 @@ class TestPython:
|
|||
timestamp = datetime.strptime(node["timestamp"], "%Y-%m-%dT%H:%M:%S.%f")
|
||||
assert start_time <= timestamp < datetime.now()
|
||||
|
||||
def test_timing_function(self, testdir, run_and_parse):
|
||||
def test_timing_function(self, testdir, run_and_parse, mock_timing):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import time, pytest
|
||||
from _pytest import timing
|
||||
def setup_module():
|
||||
time.sleep(0.01)
|
||||
timing.sleep(1)
|
||||
def teardown_module():
|
||||
time.sleep(0.01)
|
||||
timing.sleep(2)
|
||||
def test_sleep():
|
||||
time.sleep(0.01)
|
||||
timing.sleep(4)
|
||||
"""
|
||||
)
|
||||
result, dom = run_and_parse()
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
val = tnode["time"]
|
||||
assert round(float(val), 2) >= 0.03
|
||||
assert float(val) == 7.0
|
||||
|
||||
@pytest.mark.parametrize("duration_report", ["call", "total"])
|
||||
def test_junit_duration_report(
|
||||
|
|
Loading…
Reference in New Issue