implement #12231 new flag --xfail-tb
This commit is contained in:
parent
1385ec117d
commit
37e415dbc1
|
@ -0,0 +1,13 @@
|
|||
Add `--xfail-tb` flag, which turns traceback output for XFAIL results.
|
||||
|
||||
* If the `--xfail-tb` flag is not sent, tracebacks for XFAIL results are NOT shown.
|
||||
* The style of traceback for XFAIL is set with `--tb`, and can be `auto|long|short|line|native|no`.
|
||||
* Note: Even if you have `--xfail-tb` set, you won't see them if `--tb=no`.
|
||||
|
||||
Some history:
|
||||
|
||||
* This is a behavior break, but brings default behavior back to pre-8.0.0 behavior.
|
||||
* With this change, default `-rx`/ `-ra` behavior is identical to pre-8.0 with respect to xfail tracebacks.
|
||||
* With pytest 8.0, `-rx` or `-ra` would not only turn on summary reports for xfail, but also report the tracebacks for xfail results.
|
||||
* This caused issues with some projects that utilize xfail, but don't want to see all of the xfail tracebacks.
|
||||
* This change detaches xfail tracebacks from `-rx`, and now we turn on xfail tracebacks with `--xfail-tb`.
|
|
@ -216,6 +216,13 @@ def pytest_addoption(parser: Parser) -> None:
|
|||
choices=["auto", "long", "short", "no", "line", "native"],
|
||||
help="Traceback print mode (auto/long/short/line/native/no)",
|
||||
)
|
||||
group._addoption(
|
||||
"--xfail-tb",
|
||||
action="store_true",
|
||||
dest="xfail_tb",
|
||||
default=False,
|
||||
help="Show tracebacks for xfail (as long as --tb != no)",
|
||||
)
|
||||
group._addoption(
|
||||
"--show-capture",
|
||||
action="store",
|
||||
|
@ -1086,21 +1093,28 @@ class TerminalReporter:
|
|||
self._tw.line(content)
|
||||
|
||||
def summary_failures(self) -> None:
|
||||
self.summary_failures_combined("failed", "FAILURES")
|
||||
style = self.config.option.tbstyle
|
||||
self.summary_failures_combined("failed", "FAILURES", style=style)
|
||||
|
||||
def summary_xfailures(self) -> None:
|
||||
self.summary_failures_combined("xfailed", "XFAILURES", "x")
|
||||
show_tb = self.config.option.xfail_tb
|
||||
style = self.config.option.tbstyle if show_tb else "no"
|
||||
self.summary_failures_combined("xfailed", "XFAILURES", style=style)
|
||||
|
||||
def summary_failures_combined(
|
||||
self, which_reports: str, sep_title: str, needed_opt: Optional[str] = None
|
||||
self,
|
||||
which_reports: str,
|
||||
sep_title: str,
|
||||
needed_opt: Optional[str] = None,
|
||||
style: Optional[str] = None,
|
||||
) -> None:
|
||||
if self.config.option.tbstyle != "no":
|
||||
if style != "no":
|
||||
if not needed_opt or self.hasopt(needed_opt):
|
||||
reports: List[BaseReport] = self.getreports(which_reports)
|
||||
if not reports:
|
||||
return
|
||||
self.write_sep("=", sep_title)
|
||||
if self.config.option.tbstyle == "line":
|
||||
if style == "line":
|
||||
for rep in reports:
|
||||
line = self._getcrashline(rep)
|
||||
self.write_line(line)
|
||||
|
|
|
@ -2909,54 +2909,75 @@ def test_summary_xfail_reason(pytester: Pytester) -> None:
|
|||
assert result.stdout.lines.count(expect2) == 1
|
||||
|
||||
|
||||
def test_summary_xfail_tb(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
@pytest.fixture()
|
||||
def xfail_testfile(pytester: Pytester) -> Path:
|
||||
return pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
def test_fail():
|
||||
a, b = 1, 2
|
||||
assert a == b
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
c, d = 3, 4
|
||||
assert c == d
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rx")
|
||||
|
||||
|
||||
def test_xfail_tb_default(xfail_testfile, pytester: Pytester) -> None:
|
||||
result = pytester.runpytest(xfail_testfile)
|
||||
|
||||
# test_fail, show traceback
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*= FAILURES =*",
|
||||
"*_ test_fail _*",
|
||||
"*def test_fail():*",
|
||||
"* a, b = 1, 2*",
|
||||
"*> assert a == b*",
|
||||
"*E assert 1 == 2*",
|
||||
]
|
||||
)
|
||||
|
||||
# test_xfail, don't show traceback
|
||||
result.stdout.no_fnmatch_line("*= XFAILURES =*")
|
||||
|
||||
|
||||
def test_xfail_tb_true(xfail_testfile, pytester: Pytester) -> None:
|
||||
result = pytester.runpytest(xfail_testfile, "--xfail-tb")
|
||||
|
||||
# both test_fail and test_xfail, show traceback
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*= FAILURES =*",
|
||||
"*_ test_fail _*",
|
||||
"*def test_fail():*",
|
||||
"* a, b = 1, 2*",
|
||||
"*> assert a == b*",
|
||||
"*E assert 1 == 2*",
|
||||
"*= XFAILURES =*",
|
||||
"*_ test_xfail _*",
|
||||
"* @pytest.mark.xfail*",
|
||||
"* def test_xfail():*",
|
||||
"* a, b = 1, 2*",
|
||||
"> *assert a == b*",
|
||||
"E *assert 1 == 2*",
|
||||
"test_summary_xfail_tb.py:6: AssertionError*",
|
||||
"*= short test summary info =*",
|
||||
"XFAIL test_summary_xfail_tb.py::test_xfail",
|
||||
"*= 1 xfailed in * =*",
|
||||
"*def test_xfail():*",
|
||||
"* c, d = 3, 4*",
|
||||
"*> assert c == d*",
|
||||
"*E assert 3 == 4*",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_xfail_tb_line(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
def test_xfail_tb_line(xfail_testfile, pytester: Pytester) -> None:
|
||||
result = pytester.runpytest(xfail_testfile, "--xfail-tb", "--tb=line")
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
a, b = 1, 2
|
||||
assert a == b
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rx", "--tb=line")
|
||||
# both test_fail and test_xfail, show line
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*= FAILURES =*",
|
||||
"*test_xfail_tb_line.py:5: assert 1 == 2",
|
||||
"*= XFAILURES =*",
|
||||
"*test_xfail_tb_line.py:6: assert 1 == 2",
|
||||
"*= short test summary info =*",
|
||||
"XFAIL test_xfail_tb_line.py::test_xfail",
|
||||
"*= 1 xfailed in * =*",
|
||||
"*test_xfail_tb_line.py:10: assert 3 == 4",
|
||||
]
|
||||
)
|
||||
|
||||
|
|
Loading…
Reference in New Issue