Show session duration in human-readable format (#5721)
Show session duration in human-readable format
This commit is contained in:
		
						commit
						300f78556f
					
				|  | @ -0,0 +1,4 @@ | ||||||
|  | Time taken to run the test suite now includes a human-readable representation when it takes over | ||||||
|  | 60 seconds, for example:: | ||||||
|  | 
 | ||||||
|  |     ===== 2 failed in 102.70s (0:01:42) ===== | ||||||
|  | @ -340,7 +340,10 @@ def _config_for_test(): | ||||||
|     config._ensure_unconfigure()  # cleanup, e.g. capman closing tmpfiles. |     config._ensure_unconfigure()  # cleanup, e.g. capman closing tmpfiles. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| rex_outcome = re.compile(r"(\d+) ([\w-]+)") | # regex to match the session duration string in the summary: "74.34s" | ||||||
|  | rex_session_duration = re.compile(r"\d+\.\d\ds") | ||||||
|  | # regex to match all the counts and phrases in the summary line: "34 passed, 111 skipped" | ||||||
|  | rex_outcome = re.compile(r"(\d+) (\w+)") | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class RunResult: | class RunResult: | ||||||
|  | @ -379,14 +382,11 @@ class RunResult: | ||||||
| 
 | 
 | ||||||
|         """ |         """ | ||||||
|         for line in reversed(self.outlines): |         for line in reversed(self.outlines): | ||||||
|             if "seconds" in line: |             if rex_session_duration.search(line): | ||||||
|                 outcomes = rex_outcome.findall(line) |                 outcomes = rex_outcome.findall(line) | ||||||
|                 if outcomes: |                 return {noun: int(count) for (count, noun) in outcomes} | ||||||
|                     d = {} | 
 | ||||||
|                     for num, cat in outcomes: |         raise ValueError("Pytest terminal summary report not found") | ||||||
|                         d[cat] = int(num) |  | ||||||
|                     return d |  | ||||||
|         raise ValueError("Pytest terminal report not found") |  | ||||||
| 
 | 
 | ||||||
|     def assert_outcomes( |     def assert_outcomes( | ||||||
|         self, passed=0, skipped=0, failed=0, error=0, xpassed=0, xfailed=0 |         self, passed=0, skipped=0, failed=0, error=0, xpassed=0, xfailed=0 | ||||||
|  |  | ||||||
|  | @ -4,6 +4,7 @@ This is a good source for looking at the various reporting hooks. | ||||||
| """ | """ | ||||||
| import argparse | import argparse | ||||||
| import collections | import collections | ||||||
|  | import datetime | ||||||
| import platform | import platform | ||||||
| import sys | import sys | ||||||
| import time | import time | ||||||
|  | @ -861,7 +862,7 @@ class TerminalReporter: | ||||||
|     def summary_stats(self): |     def summary_stats(self): | ||||||
|         session_duration = time.time() - self._sessionstarttime |         session_duration = time.time() - self._sessionstarttime | ||||||
|         (line, color) = build_summary_stats_line(self.stats) |         (line, color) = build_summary_stats_line(self.stats) | ||||||
|         msg = "{} in {:.2f} seconds".format(line, session_duration) |         msg = "{} in {}".format(line, format_session_duration(session_duration)) | ||||||
|         markup = {color: True, "bold": True} |         markup = {color: True, "bold": True} | ||||||
| 
 | 
 | ||||||
|         if self.verbosity >= 0: |         if self.verbosity >= 0: | ||||||
|  | @ -1055,3 +1056,12 @@ def _plugin_nameversions(plugininfo): | ||||||
|         if name not in values: |         if name not in values: | ||||||
|             values.append(name) |             values.append(name) | ||||||
|     return values |     return values | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def format_session_duration(seconds): | ||||||
|  |     """Format the given seconds in a human readable manner to show in the final summary""" | ||||||
|  |     if seconds < 60: | ||||||
|  |         return "{:.2f}s".format(seconds) | ||||||
|  |     else: | ||||||
|  |         dt = datetime.timedelta(seconds=int(seconds)) | ||||||
|  |         return "{:.2f}s ({})".format(seconds, dt) | ||||||
|  |  | ||||||
|  | @ -946,7 +946,7 @@ def test_collection_collect_only_live_logging(testdir, verbose): | ||||||
|         expected_lines.extend( |         expected_lines.extend( | ||||||
|             [ |             [ | ||||||
|                 "*test_collection_collect_only_live_logging.py::test_simple*", |                 "*test_collection_collect_only_live_logging.py::test_simple*", | ||||||
|                 "no tests ran in * seconds", |                 "no tests ran in 0.[0-9][0-9]s", | ||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|     elif verbose == "-qq": |     elif verbose == "-qq": | ||||||
|  |  | ||||||
|  | @ -278,7 +278,7 @@ def test_assert_outcomes_after_pytest_error(testdir): | ||||||
|     testdir.makepyfile("def test_foo(): assert True") |     testdir.makepyfile("def test_foo(): assert True") | ||||||
| 
 | 
 | ||||||
|     result = testdir.runpytest("--unexpected-argument") |     result = testdir.runpytest("--unexpected-argument") | ||||||
|     with pytest.raises(ValueError, match="Pytest terminal report not found"): |     with pytest.raises(ValueError, match="Pytest terminal summary report not found"): | ||||||
|         result.assert_outcomes(passed=0) |         result.assert_outcomes(passed=0) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -617,7 +617,7 @@ class TestTerminalFunctional: | ||||||
|                     pluggy.__version__, |                     pluggy.__version__, | ||||||
|                 ), |                 ), | ||||||
|                 "*test_header_trailer_info.py .*", |                 "*test_header_trailer_info.py .*", | ||||||
|                 "=* 1 passed*in *.[0-9][0-9] seconds *=", |                 "=* 1 passed*in *.[0-9][0-9]s *=", | ||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|         if request.config.pluginmanager.list_plugin_distinfo(): |         if request.config.pluginmanager.list_plugin_distinfo(): | ||||||
|  | @ -1678,3 +1678,20 @@ def test_line_with_reprcrash(monkeypatch): | ||||||
|     check("😄😄😄😄😄\n2nd line", 41, "FAILED nodeid::😄::withunicode - 😄😄...") |     check("😄😄😄😄😄\n2nd line", 41, "FAILED nodeid::😄::withunicode - 😄😄...") | ||||||
|     check("😄😄😄😄😄\n2nd line", 42, "FAILED nodeid::😄::withunicode - 😄😄😄...") |     check("😄😄😄😄😄\n2nd line", 42, "FAILED nodeid::😄::withunicode - 😄😄😄...") | ||||||
|     check("😄😄😄😄😄\n2nd line", 80, "FAILED nodeid::😄::withunicode - 😄😄😄😄😄") |     check("😄😄😄😄😄\n2nd line", 80, "FAILED nodeid::😄::withunicode - 😄😄😄😄😄") | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     "seconds, expected", | ||||||
|  |     [ | ||||||
|  |         (10.0, "10.00s"), | ||||||
|  |         (10.34, "10.34s"), | ||||||
|  |         (59.99, "59.99s"), | ||||||
|  |         (60.55, "60.55s (0:01:00)"), | ||||||
|  |         (123.55, "123.55s (0:02:03)"), | ||||||
|  |         (60 * 60 + 0.5, "3600.50s (1:00:00)"), | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | def test_format_session_duration(seconds, expected): | ||||||
|  |     from _pytest.terminal import format_session_duration | ||||||
|  | 
 | ||||||
|  |     assert format_session_duration(seconds) == expected | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue