From 5d726faad07b7878852e36fd196584b03cb2cfda Mon Sep 17 00:00:00 2001 From: Brian Okken <1568356+okken@users.noreply.github.com> Date: Mon, 25 Dec 2023 16:15:22 -0800 Subject: [PATCH] remove assert outcome from xfail summary, as it breaks [NOTRUN] functionality --- src/_pytest/terminal.py | 8 +++++--- testing/test_skipping.py | 2 +- testing/test_terminal.py | 10 +++++----- 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/src/_pytest/terminal.py b/src/_pytest/terminal.py index df90111da..3ffcdfd91 100644 --- a/src/_pytest/terminal.py +++ b/src/_pytest/terminal.py @@ -1182,10 +1182,12 @@ class TerminalReporter: def show_xfailed(lines: List[str]) -> None: xfailed = self.stats.get("xfailed", []) for rep in xfailed: - color = _color_for_type.get("xfailed", _color_for_type_default) - line = _get_line_with_reprcrash_message( - self.config, rep, self._tw, {color: True} + verbose_word = rep._get_verbose_word(self.config) + markup_word = self._tw.markup( + verbose_word, **{_color_for_type["warnings"]: True} ) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" reason = rep.wasxfail if reason: line += " - " + str(reason) diff --git a/testing/test_skipping.py b/testing/test_skipping.py index b7e448df3..b2ad4b0cf 100644 --- a/testing/test_skipping.py +++ b/testing/test_skipping.py @@ -649,7 +649,7 @@ class TestXFail: result.stdout.fnmatch_lines( [ "*test_strict_xfail*", - "XPASS test_strict_xfail.py::test_foo unsupported feature", + "XPASS test_strict_xfail.py::test_foo - unsupported feature", ] ) assert result.ret == (1 if strict else 0) diff --git a/testing/test_terminal.py b/testing/test_terminal.py index 4f8d449e7..43811bc0f 100644 --- a/testing/test_terminal.py +++ b/testing/test_terminal.py @@ -2631,8 +2631,8 @@ def test_summary_xfail_reason(pytester: Pytester) -> None: """ ) result = pytester.runpytest("-rx") - expect1 = "XFAIL test_summary_xfail_reason.py::test_xfail - assert False" - expect2 = "XFAIL test_summary_xfail_reason.py::test_xfail_reason - assert False - foo" + expect1 = "XFAIL test_summary_xfail_reason.py::test_xfail" + expect2 = "XFAIL test_summary_xfail_reason.py::test_xfail_reason - foo" result.stdout.fnmatch_lines([expect1, expect2]) assert result.stdout.lines.count(expect1) == 1 assert result.stdout.lines.count(expect2) == 1 @@ -2660,7 +2660,7 @@ def test_summary_xfail_tb(pytester: Pytester) -> None: "E *assert 1 == 2*", "test_summary_xfail_tb.py:6: AssertionError*", "*= short test summary info =*", - "XFAIL test_summary_xfail_tb.py::test_xfail - assert 1 == 2", + "XFAIL test_summary_xfail_tb.py::test_xfail", "*= 1 xfailed in * =*" ]) @@ -2681,7 +2681,7 @@ def test_xfail_tb_line(pytester: Pytester) -> None: "*= XFAILURES =*", "*test_xfail_tb_line.py:6: assert 1 == 2", "*= short test summary info =*", - "XFAIL test_xfail_tb_line.py::test_xfail - assert 1 == 2", + "XFAIL test_xfail_tb_line.py::test_xfail", "*= 1 xfailed in * =*" ]) @@ -2724,7 +2724,7 @@ def test_xpass_output(pytester: Pytester) -> None: "*_ test_pass _*", "*- Captured stdout call -*", "*= short test summary info =*", - "XPASS test_xpass_output.py::test_pass", + "XPASS test_xpass_output.py::test_pass*", "*= 1 xpassed in * =*" ])