Merge pull request #7242 from nicoddemus/fix-flaky-test
Introduce _pytest.timing as a way to control timing during internal tests Conflicts: src/_pytest/pytester.py src/_pytest/runner.py src/_pytest/terminal.py testing/acceptance_test.py
This commit is contained in:
parent
48f787ecff
commit
03bf27f4df
|
@ -13,7 +13,6 @@ import os
|
|||
import platform
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
import py
|
||||
|
@ -21,6 +20,7 @@ import py
|
|||
import pytest
|
||||
from _pytest import deprecated
|
||||
from _pytest import nodes
|
||||
from _pytest import timing
|
||||
from _pytest.config import filename_arg
|
||||
from _pytest.store import StoreKey
|
||||
from _pytest.warnings import _issue_warning_captured
|
||||
|
@ -627,14 +627,14 @@ class LogXML:
|
|||
reporter._add_simple(Junit.error, "internal error", excrepr)
|
||||
|
||||
def pytest_sessionstart(self):
|
||||
self.suite_start_time = time.time()
|
||||
self.suite_start_time = timing.time()
|
||||
|
||||
def pytest_sessionfinish(self):
|
||||
dirname = os.path.dirname(os.path.abspath(self.logfile))
|
||||
if not os.path.isdir(dirname):
|
||||
os.makedirs(dirname)
|
||||
logfile = open(self.logfile, "w", encoding="utf-8")
|
||||
suite_stop_time = time.time()
|
||||
suite_stop_time = timing.time()
|
||||
suite_time_delta = suite_stop_time - self.suite_start_time
|
||||
|
||||
numtests = (
|
||||
|
@ -662,7 +662,7 @@ class LogXML:
|
|||
logfile.close()
|
||||
|
||||
def pytest_terminal_summary(self, terminalreporter):
|
||||
terminalreporter.write_sep("-", "generated xml file: %s" % (self.logfile))
|
||||
terminalreporter.write_sep("-", "generated xml file: {}".format(self.logfile))
|
||||
|
||||
def add_global_property(self, name, value):
|
||||
__tracebackhide__ = True
|
||||
|
|
|
@ -7,7 +7,6 @@ import platform
|
|||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from fnmatch import fnmatch
|
||||
from io import StringIO
|
||||
|
@ -24,6 +23,7 @@ from weakref import WeakKeyDictionary
|
|||
import py
|
||||
|
||||
import pytest
|
||||
from _pytest import timing
|
||||
from _pytest._code import Source
|
||||
from _pytest.capture import MultiCapture
|
||||
from _pytest.capture import SysCapture
|
||||
|
@ -924,7 +924,7 @@ class Testdir:
|
|||
|
||||
if syspathinsert:
|
||||
self.syspathinsert()
|
||||
now = time.time()
|
||||
now = timing.time()
|
||||
capture = MultiCapture(Capture=SysCapture)
|
||||
capture.start_capturing()
|
||||
try:
|
||||
|
@ -953,7 +953,7 @@ class Testdir:
|
|||
sys.stderr.write(err)
|
||||
|
||||
res = RunResult(
|
||||
reprec.ret, out.splitlines(), err.splitlines(), time.time() - now
|
||||
reprec.ret, out.splitlines(), err.splitlines(), timing.time() - now
|
||||
)
|
||||
res.reprec = reprec # type: ignore
|
||||
return res
|
||||
|
@ -1154,7 +1154,7 @@ class Testdir:
|
|||
f1 = open(str(p1), "w", encoding="utf8")
|
||||
f2 = open(str(p2), "w", encoding="utf8")
|
||||
try:
|
||||
now = time.time()
|
||||
now = timing.time()
|
||||
popen = self.popen(
|
||||
cmdargs,
|
||||
stdin=stdin,
|
||||
|
@ -1201,7 +1201,7 @@ class Testdir:
|
|||
ret = ExitCode(ret)
|
||||
except ValueError:
|
||||
pass
|
||||
return RunResult(ret, out, err, time.time() - now)
|
||||
return RunResult(ret, out, err, timing.time() - now)
|
||||
|
||||
def _dump_lines(self, lines, fp):
|
||||
try:
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
import bdb
|
||||
import os
|
||||
import sys
|
||||
from time import time
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
|
@ -14,6 +13,7 @@ import attr
|
|||
from .reports import CollectErrorRepr
|
||||
from .reports import CollectReport
|
||||
from .reports import TestReport
|
||||
from _pytest import timing
|
||||
from _pytest._code.code import ExceptionChainRepr
|
||||
from _pytest._code.code import ExceptionInfo
|
||||
from _pytest.compat import TYPE_CHECKING
|
||||
|
@ -238,8 +238,8 @@ class CallInfo:
|
|||
def from_call(cls, func, when, reraise=None) -> "CallInfo":
|
||||
#: context of invocation: one of "setup", "call",
|
||||
#: "teardown", "memocollect"
|
||||
start = time()
|
||||
excinfo = None
|
||||
start = timing.time()
|
||||
try:
|
||||
result = func()
|
||||
except: # noqa
|
||||
|
@ -247,7 +247,7 @@ class CallInfo:
|
|||
if reraise is not None and excinfo.errisinstance(reraise):
|
||||
raise
|
||||
result = None
|
||||
stop = time()
|
||||
stop = timing.time()
|
||||
return cls(start=start, stop=stop, when=when, result=result, excinfo=excinfo)
|
||||
|
||||
def __repr__(self):
|
||||
|
|
|
@ -7,7 +7,6 @@ import collections
|
|||
import datetime
|
||||
import platform
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
@ -26,6 +25,7 @@ from more_itertools import collapse
|
|||
|
||||
import pytest
|
||||
from _pytest import nodes
|
||||
from _pytest import timing
|
||||
from _pytest._io import TerminalWriter
|
||||
from _pytest.config import Config
|
||||
from _pytest.config import ExitCode
|
||||
|
@ -554,7 +554,7 @@ class TerminalReporter:
|
|||
if self.isatty:
|
||||
if self.config.option.verbose >= 0:
|
||||
self.write("collecting ... ", bold=True)
|
||||
self._collect_report_last_write = time.time()
|
||||
self._collect_report_last_write = timing.time()
|
||||
elif self.config.option.verbose >= 1:
|
||||
self.write("collecting ... ", bold=True)
|
||||
|
||||
|
@ -574,7 +574,7 @@ class TerminalReporter:
|
|||
|
||||
if not final:
|
||||
# Only write "collecting" report every 0.5s.
|
||||
t = time.time()
|
||||
t = timing.time()
|
||||
if (
|
||||
self._collect_report_last_write is not None
|
||||
and self._collect_report_last_write > t - REPORT_COLLECTING_RESOLUTION
|
||||
|
@ -611,7 +611,7 @@ class TerminalReporter:
|
|||
@pytest.hookimpl(trylast=True)
|
||||
def pytest_sessionstart(self, session: Session) -> None:
|
||||
self._session = session
|
||||
self._sessionstarttime = time.time()
|
||||
self._sessionstarttime = timing.time()
|
||||
if not self.showheader:
|
||||
return
|
||||
self.write_sep("=", "test session starts", bold=True)
|
||||
|
@ -960,7 +960,7 @@ class TerminalReporter:
|
|||
if self.verbosity < -1:
|
||||
return
|
||||
|
||||
session_duration = time.time() - self._sessionstarttime
|
||||
session_duration = timing.time() - self._sessionstarttime
|
||||
(parts, main_color) = self.build_summary_stats_line()
|
||||
line_parts = []
|
||||
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
"""
|
||||
Indirection for time functions.
|
||||
|
||||
We intentionally grab some "time" functions internally to avoid tests mocking "time" to affect
|
||||
pytest runtime information (issue #185).
|
||||
|
||||
Fixture "mock_timinig" also interacts with this module for pytest's own tests.
|
||||
"""
|
||||
from time import perf_counter
|
||||
from time import sleep
|
||||
from time import time
|
||||
|
||||
__all__ = ["perf_counter", "sleep", "time"]
|
|
@ -895,21 +895,20 @@ class TestInvocationVariants:
|
|||
|
||||
class TestDurations:
|
||||
source = """
|
||||
import time
|
||||
frag = 0.002
|
||||
from _pytest import timing
|
||||
def test_something():
|
||||
pass
|
||||
def test_2():
|
||||
time.sleep(frag*5)
|
||||
timing.sleep(0.010)
|
||||
def test_1():
|
||||
time.sleep(frag)
|
||||
timing.sleep(0.002)
|
||||
def test_3():
|
||||
time.sleep(frag*10)
|
||||
timing.sleep(0.020)
|
||||
"""
|
||||
|
||||
def test_calls(self, testdir):
|
||||
def test_calls(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
result = testdir.runpytest("--durations=10")
|
||||
result = testdir.runpytest_inprocess("--durations=10")
|
||||
assert result.ret == 0
|
||||
result.stdout.fnmatch_lines_random(
|
||||
["*durations*", "*call*test_3*", "*call*test_2*"]
|
||||
|
@ -918,16 +917,17 @@ class TestDurations:
|
|||
["(0.00 durations hidden. Use -vv to show these durations.)"]
|
||||
)
|
||||
|
||||
def test_calls_show_2(self, testdir):
|
||||
def test_calls_show_2(self, testdir, mock_timing):
|
||||
|
||||
testdir.makepyfile(self.source)
|
||||
result = testdir.runpytest("--durations=2")
|
||||
result = testdir.runpytest_inprocess("--durations=2")
|
||||
assert result.ret == 0
|
||||
lines = result.stdout.get_lines_after("*slowest*durations*")
|
||||
assert "4 passed" in lines[2]
|
||||
|
||||
def test_calls_showall(self, testdir):
|
||||
def test_calls_showall(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
result = testdir.runpytest("--durations=0")
|
||||
result = testdir.runpytest_inprocess("--durations=0")
|
||||
assert result.ret == 0
|
||||
for x in "23":
|
||||
for y in ("call",): # 'setup', 'call', 'teardown':
|
||||
|
@ -937,9 +937,9 @@ class TestDurations:
|
|||
else:
|
||||
raise AssertionError("not found {} {}".format(x, y))
|
||||
|
||||
def test_calls_showall_verbose(self, testdir):
|
||||
def test_calls_showall_verbose(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
result = testdir.runpytest("--durations=0", "-vv")
|
||||
result = testdir.runpytest_inprocess("--durations=0", "-vv")
|
||||
assert result.ret == 0
|
||||
for x in "123":
|
||||
for y in ("call",): # 'setup', 'call', 'teardown':
|
||||
|
@ -949,13 +949,13 @@ class TestDurations:
|
|||
else:
|
||||
raise AssertionError("not found {} {}".format(x, y))
|
||||
|
||||
def test_with_deselected(self, testdir):
|
||||
def test_with_deselected(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
result = testdir.runpytest("--durations=2", "-k test_2")
|
||||
result = testdir.runpytest_inprocess("--durations=2", "-k test_2")
|
||||
assert result.ret == 0
|
||||
result.stdout.fnmatch_lines(["*durations*", "*call*test_2*"])
|
||||
|
||||
def test_with_failing_collection(self, testdir):
|
||||
def test_with_failing_collection(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
testdir.makepyfile(test_collecterror="""xyz""")
|
||||
result = testdir.runpytest("--durations=2", "-k test_1")
|
||||
|
@ -965,36 +965,35 @@ class TestDurations:
|
|||
# output
|
||||
result.stdout.no_fnmatch_line("*duration*")
|
||||
|
||||
def test_with_not(self, testdir):
|
||||
def test_with_not(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
result = testdir.runpytest("-k not 1")
|
||||
result = testdir.runpytest_inprocess("-k not 1")
|
||||
assert result.ret == 0
|
||||
|
||||
|
||||
class TestDurationWithFixture:
|
||||
source = """
|
||||
import pytest
|
||||
import time
|
||||
frag = 0.01
|
||||
from _pytest import timing
|
||||
|
||||
@pytest.fixture
|
||||
def setup_fixt():
|
||||
time.sleep(frag)
|
||||
timing.sleep(2)
|
||||
|
||||
def test_1(setup_fixt):
|
||||
time.sleep(frag)
|
||||
timing.sleep(5)
|
||||
"""
|
||||
|
||||
def test_setup_function(self, testdir):
|
||||
def test_setup_function(self, testdir, mock_timing):
|
||||
testdir.makepyfile(self.source)
|
||||
result = testdir.runpytest("--durations=10")
|
||||
result = testdir.runpytest_inprocess("--durations=10")
|
||||
assert result.ret == 0
|
||||
|
||||
result.stdout.fnmatch_lines_random(
|
||||
"""
|
||||
*durations*
|
||||
* setup *test_1*
|
||||
* call *test_1*
|
||||
5.00s call *test_1*
|
||||
2.00s setup *test_1*
|
||||
"""
|
||||
)
|
||||
|
||||
|
|
|
@ -195,3 +195,41 @@ def color_mapping():
|
|||
pytest.skip("doing limited testing because lacking ordered markup")
|
||||
|
||||
return ColorMapping
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_timing(monkeypatch):
|
||||
"""Mocks _pytest.timing with a known object that can be used to control timing in tests
|
||||
deterministically.
|
||||
|
||||
pytest itself should always use functions from `_pytest.timing` instead of `time` directly.
|
||||
|
||||
This then allows us more control over time during testing, if testing code also
|
||||
uses `_pytest.timing` functions.
|
||||
|
||||
Time is static, and only advances through `sleep` calls, thus tests might sleep over large
|
||||
numbers and obtain accurate time() calls at the end, making tests reliable and instant.
|
||||
"""
|
||||
import attr
|
||||
|
||||
@attr.s
|
||||
class MockTiming:
|
||||
|
||||
_current_time = attr.ib(default=1590150050.0)
|
||||
|
||||
def sleep(self, seconds):
|
||||
self._current_time += seconds
|
||||
|
||||
def time(self):
|
||||
return self._current_time
|
||||
|
||||
def patch(self):
|
||||
from _pytest import timing
|
||||
|
||||
monkeypatch.setattr(timing, "sleep", self.sleep)
|
||||
monkeypatch.setattr(timing, "time", self.time)
|
||||
monkeypatch.setattr(timing, "perf_counter", self.time)
|
||||
|
||||
result = MockTiming()
|
||||
result.patch()
|
||||
return result
|
||||
|
|
|
@ -200,23 +200,23 @@ class TestPython:
|
|||
timestamp = datetime.strptime(node["timestamp"], "%Y-%m-%dT%H:%M:%S.%f")
|
||||
assert start_time <= timestamp < datetime.now()
|
||||
|
||||
def test_timing_function(self, testdir, run_and_parse):
|
||||
def test_timing_function(self, testdir, run_and_parse, mock_timing):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import time, pytest
|
||||
from _pytest import timing
|
||||
def setup_module():
|
||||
time.sleep(0.01)
|
||||
timing.sleep(1)
|
||||
def teardown_module():
|
||||
time.sleep(0.01)
|
||||
timing.sleep(2)
|
||||
def test_sleep():
|
||||
time.sleep(0.01)
|
||||
timing.sleep(4)
|
||||
"""
|
||||
)
|
||||
result, dom = run_and_parse()
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
val = tnode["time"]
|
||||
assert round(float(val), 2) >= 0.03
|
||||
assert float(val) == 7.0
|
||||
|
||||
@pytest.mark.parametrize("duration_report", ["call", "total"])
|
||||
def test_junit_duration_report(
|
||||
|
|
Loading…
Reference in New Issue