Compare commits
4 Commits
main
...
report_xfa
Author | SHA1 | Date |
---|---|---|
|
0de126848e | |
|
b247f574a3 | |
|
0cd183dc0a | |
|
1ce234dc78 |
|
@ -878,8 +878,10 @@ class TerminalReporter:
|
|||
def pytest_terminal_summary(self) -> Generator[None, None, None]:
|
||||
self.summary_errors()
|
||||
self.summary_failures()
|
||||
self.summary_xfailures()
|
||||
self.summary_warnings()
|
||||
self.summary_passes()
|
||||
self.summary_xpasses()
|
||||
try:
|
||||
return (yield)
|
||||
finally:
|
||||
|
@ -1022,6 +1024,20 @@ class TerminalReporter:
|
|||
self._outrep_summary(rep)
|
||||
self._handle_teardown_sections(rep.nodeid)
|
||||
|
||||
def summary_xpasses(self) -> None:
|
||||
if self.config.option.tbstyle != "no":
|
||||
if self.hasopt("X"):
|
||||
reports: List[TestReport] = self.getreports("xpassed")
|
||||
if not reports:
|
||||
return
|
||||
self.write_sep("=", "XPASSES")
|
||||
for rep in reports:
|
||||
if rep.sections:
|
||||
msg = self._getfailureheadline(rep)
|
||||
self.write_sep("_", msg, green=True, bold=True)
|
||||
self._outrep_summary(rep)
|
||||
self._handle_teardown_sections(rep.nodeid)
|
||||
|
||||
def _get_teardown_reports(self, nodeid: str) -> List[TestReport]:
|
||||
reports = self.getreports("")
|
||||
return [
|
||||
|
@ -1064,6 +1080,24 @@ class TerminalReporter:
|
|||
self._outrep_summary(rep)
|
||||
self._handle_teardown_sections(rep.nodeid)
|
||||
|
||||
def summary_xfailures(self) -> None:
|
||||
if self.config.option.tbstyle != "no":
|
||||
if self.hasopt("x"):
|
||||
reports: List[BaseReport] = self.getreports("xfailed")
|
||||
if not reports:
|
||||
return
|
||||
self.write_sep("=", "XFAILURES")
|
||||
if self.config.option.tbstyle == "line":
|
||||
for rep in reports:
|
||||
line = self._getcrashline(rep)
|
||||
self.write_line(line)
|
||||
else:
|
||||
for rep in reports:
|
||||
msg = self._getfailureheadline(rep)
|
||||
self.write_sep("_", msg, red=True, bold=True)
|
||||
self._outrep_summary(rep)
|
||||
self._handle_teardown_sections(rep.nodeid)
|
||||
|
||||
def summary_errors(self) -> None:
|
||||
if self.config.option.tbstyle != "no":
|
||||
reports: List[BaseReport] = self.getreports("error")
|
||||
|
@ -1152,12 +1186,13 @@ class TerminalReporter:
|
|||
markup_word = self._tw.markup(
|
||||
verbose_word, **{_color_for_type["warnings"]: True}
|
||||
)
|
||||
nodeid = _get_node_id_with_markup(self._tw, self.config, rep)
|
||||
line = f"{markup_word} {nodeid}"
|
||||
color = _color_for_type.get("xfailed", _color_for_type_default)
|
||||
line = _get_line_with_reprcrash_message(
|
||||
self.config, rep, self._tw, {color: True}
|
||||
)
|
||||
reason = rep.wasxfail
|
||||
if reason:
|
||||
line += " - " + str(reason)
|
||||
|
||||
lines.append(line)
|
||||
|
||||
def show_xpassed(lines: List[str]) -> None:
|
||||
|
@ -1168,8 +1203,11 @@ class TerminalReporter:
|
|||
verbose_word, **{_color_for_type["warnings"]: True}
|
||||
)
|
||||
nodeid = _get_node_id_with_markup(self._tw, self.config, rep)
|
||||
line = f"{markup_word} {nodeid}"
|
||||
reason = rep.wasxfail
|
||||
lines.append(f"{markup_word} {nodeid} {reason}")
|
||||
if reason:
|
||||
line += " - " + str(reason)
|
||||
lines.append(line)
|
||||
|
||||
def show_skipped(lines: List[str]) -> None:
|
||||
skipped: List[CollectReport] = self.stats.get("skipped", [])
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
import pytest
|
||||
|
||||
def test_pass():
|
||||
...
|
||||
|
||||
|
||||
def test_fail():
|
||||
a,b = 1,2
|
||||
assert a == b
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
a,b = 1,2
|
||||
assert a == b
|
||||
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xpass():
|
||||
a,b = 1,1
|
||||
assert a == b
|
||||
|
|
@ -2614,3 +2614,117 @@ def test_format_trimmed() -> None:
|
|||
|
||||
assert _format_trimmed(" ({}) ", msg, len(msg) + 4) == " (unconditional skip) "
|
||||
assert _format_trimmed(" ({}) ", msg, len(msg) + 3) == " (unconditional ...) "
|
||||
|
||||
|
||||
def test_summary_xfail_reason(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
assert False
|
||||
|
||||
@pytest.mark.xfail(reason="foo")
|
||||
def test_xfail_reason():
|
||||
assert False
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rx")
|
||||
expect1 = "XFAIL test_summary_xfail_reason.py::test_xfail - assert False"
|
||||
expect2 = "XFAIL test_summary_xfail_reason.py::test_xfail_reason - assert False - foo"
|
||||
result.stdout.fnmatch_lines([expect1, expect2])
|
||||
assert result.stdout.lines.count(expect1) == 1
|
||||
assert result.stdout.lines.count(expect2) == 1
|
||||
|
||||
|
||||
def test_summary_xfail_tb(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
a, b = 1, 2
|
||||
assert a == b
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rx")
|
||||
result.stdout.fnmatch_lines( [
|
||||
"*= XFAILURES =*",
|
||||
"*_ test_xfail _*",
|
||||
"* @pytest.mark.xfail*",
|
||||
"* def test_xfail():*",
|
||||
"* a, b = 1, 2*",
|
||||
"> *assert a == b*",
|
||||
"E *assert 1 == 2*",
|
||||
"test_summary_xfail_tb.py:6: AssertionError*",
|
||||
"*= short test summary info =*",
|
||||
"XFAIL test_summary_xfail_tb.py::test_xfail - assert 1 == 2",
|
||||
"*= 1 xfailed in * =*"
|
||||
])
|
||||
|
||||
|
||||
def test_xfail_tb_line(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
a, b = 1, 2
|
||||
assert a == b
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rx", "--tb=line")
|
||||
result.stdout.fnmatch_lines( [
|
||||
"*= XFAILURES =*",
|
||||
"*test_xfail_tb_line.py:6: assert 1 == 2",
|
||||
"*= short test summary info =*",
|
||||
"XFAIL test_xfail_tb_line.py::test_xfail - assert 1 == 2",
|
||||
"*= 1 xfailed in * =*"
|
||||
])
|
||||
|
||||
|
||||
def test_summary_xpass_reason(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_pass():
|
||||
...
|
||||
|
||||
@pytest.mark.xfail(reason="foo")
|
||||
def test_reason():
|
||||
...
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rX")
|
||||
expect1 = "XPASS test_summary_xpass_reason.py::test_pass"
|
||||
expect2 = "XPASS test_summary_xpass_reason.py::test_reason - foo"
|
||||
result.stdout.fnmatch_lines([expect1, expect2])
|
||||
assert result.stdout.lines.count(expect1) == 1
|
||||
assert result.stdout.lines.count(expect2) == 1
|
||||
|
||||
|
||||
def test_xpass_output(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_pass():
|
||||
print('hi there')
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rX")
|
||||
result.stdout.fnmatch_lines( [
|
||||
"*= XPASSES =*",
|
||||
"*_ test_pass _*",
|
||||
"*- Captured stdout call -*",
|
||||
"*= short test summary info =*",
|
||||
"XPASS test_xpass_output.py::test_pass",
|
||||
"*= 1 xpassed in * =*"
|
||||
])
|
||||
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
home = /Library/Frameworks/Python.framework/Versions/3.11/bin
|
||||
include-system-site-packages = false
|
||||
version = 3.11.5
|
||||
prompt = 'pytest'
|
||||
executable = /Library/Frameworks/Python.framework/Versions/3.11/bin/python3.11
|
||||
command = /Library/Frameworks/Python.framework/Versions/3.11/bin/python3 -m venv --prompt="." /Users/okken/projects/pytest/venv
|
|
@ -0,0 +1,3 @@
|
|||
[tool.pytest.ini_options]
|
||||
addopts = "-ra --strict-markers"
|
||||
#xfail_strict = true
|
|
@ -0,0 +1,30 @@
|
|||
import pytest
|
||||
|
||||
def test_pass():
|
||||
print('in test_pass')
|
||||
|
||||
|
||||
def test_fail():
|
||||
print('in test_fail')
|
||||
a,b = 1,2
|
||||
assert a == b
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
print('in test_xfail')
|
||||
a,b = 1,2
|
||||
assert a == b
|
||||
|
||||
@pytest.mark.xfail(reason="reason 1")
|
||||
def test_xfail_reason():
|
||||
print('in test_xfail')
|
||||
a,b = 1,2
|
||||
assert a == b
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="reason 2")
|
||||
def test_xpass():
|
||||
print('in test_xpass')
|
||||
a,b = 1,1
|
||||
assert a == b
|
||||
|
Loading…
Reference in New Issue