Slight tweak to xfail summary, and added tests
This commit is contained in:
parent
b247f574a3
commit
0de126848e
|
@ -1186,12 +1186,13 @@ class TerminalReporter:
|
|||
markup_word = self._tw.markup(
|
||||
verbose_word, **{_color_for_type["warnings"]: True}
|
||||
)
|
||||
nodeid = _get_node_id_with_markup(self._tw, self.config, rep)
|
||||
line = f"{markup_word} {nodeid}"
|
||||
color = _color_for_type.get("xfailed", _color_for_type_default)
|
||||
line = _get_line_with_reprcrash_message(
|
||||
self.config, rep, self._tw, {color: True}
|
||||
)
|
||||
reason = rep.wasxfail
|
||||
if reason:
|
||||
line += " - " + str(reason)
|
||||
|
||||
lines.append(line)
|
||||
|
||||
def show_xpassed(lines: List[str]) -> None:
|
||||
|
@ -1202,8 +1203,11 @@ class TerminalReporter:
|
|||
verbose_word, **{_color_for_type["warnings"]: True}
|
||||
)
|
||||
nodeid = _get_node_id_with_markup(self._tw, self.config, rep)
|
||||
line = f"{markup_word} {nodeid}"
|
||||
reason = rep.wasxfail
|
||||
lines.append(f"{markup_word} {nodeid} {reason}")
|
||||
if reason:
|
||||
line += " - " + str(reason)
|
||||
lines.append(line)
|
||||
|
||||
def show_skipped(lines: List[str]) -> None:
|
||||
skipped: List[CollectReport] = self.stats.get("skipped", [])
|
||||
|
|
|
@ -2614,3 +2614,117 @@ def test_format_trimmed() -> None:
|
|||
|
||||
assert _format_trimmed(" ({}) ", msg, len(msg) + 4) == " (unconditional skip) "
|
||||
assert _format_trimmed(" ({}) ", msg, len(msg) + 3) == " (unconditional ...) "
|
||||
|
||||
|
||||
def test_summary_xfail_reason(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
assert False
|
||||
|
||||
@pytest.mark.xfail(reason="foo")
|
||||
def test_xfail_reason():
|
||||
assert False
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rx")
|
||||
expect1 = "XFAIL test_summary_xfail_reason.py::test_xfail - assert False"
|
||||
expect2 = "XFAIL test_summary_xfail_reason.py::test_xfail_reason - assert False - foo"
|
||||
result.stdout.fnmatch_lines([expect1, expect2])
|
||||
assert result.stdout.lines.count(expect1) == 1
|
||||
assert result.stdout.lines.count(expect2) == 1
|
||||
|
||||
|
||||
def test_summary_xfail_tb(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
a, b = 1, 2
|
||||
assert a == b
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rx")
|
||||
result.stdout.fnmatch_lines( [
|
||||
"*= XFAILURES =*",
|
||||
"*_ test_xfail _*",
|
||||
"* @pytest.mark.xfail*",
|
||||
"* def test_xfail():*",
|
||||
"* a, b = 1, 2*",
|
||||
"> *assert a == b*",
|
||||
"E *assert 1 == 2*",
|
||||
"test_summary_xfail_tb.py:6: AssertionError*",
|
||||
"*= short test summary info =*",
|
||||
"XFAIL test_summary_xfail_tb.py::test_xfail - assert 1 == 2",
|
||||
"*= 1 xfailed in * =*"
|
||||
])
|
||||
|
||||
|
||||
def test_xfail_tb_line(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
a, b = 1, 2
|
||||
assert a == b
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rx", "--tb=line")
|
||||
result.stdout.fnmatch_lines( [
|
||||
"*= XFAILURES =*",
|
||||
"*test_xfail_tb_line.py:6: assert 1 == 2",
|
||||
"*= short test summary info =*",
|
||||
"XFAIL test_xfail_tb_line.py::test_xfail - assert 1 == 2",
|
||||
"*= 1 xfailed in * =*"
|
||||
])
|
||||
|
||||
|
||||
def test_summary_xpass_reason(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_pass():
|
||||
...
|
||||
|
||||
@pytest.mark.xfail(reason="foo")
|
||||
def test_reason():
|
||||
...
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rX")
|
||||
expect1 = "XPASS test_summary_xpass_reason.py::test_pass"
|
||||
expect2 = "XPASS test_summary_xpass_reason.py::test_reason - foo"
|
||||
result.stdout.fnmatch_lines([expect1, expect2])
|
||||
assert result.stdout.lines.count(expect1) == 1
|
||||
assert result.stdout.lines.count(expect2) == 1
|
||||
|
||||
|
||||
def test_xpass_output(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_pass():
|
||||
print('hi there')
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rX")
|
||||
result.stdout.fnmatch_lines( [
|
||||
"*= XPASSES =*",
|
||||
"*_ test_pass _*",
|
||||
"*- Captured stdout call -*",
|
||||
"*= short test summary info =*",
|
||||
"XPASS test_xpass_output.py::test_pass",
|
||||
"*= 1 xpassed in * =*"
|
||||
])
|
||||
|
||||
|
|
Loading…
Reference in New Issue