Merge branch 'okken_report_xfail' into report_xfails

This commit is contained in:
Brian Okken 2024-01-02 09:19:59 -08:00
commit 35bbbddf44
8 changed files with 169 additions and 30 deletions

View File

@ -266,6 +266,7 @@ Michael Goerz
Michael Krebs Michael Krebs
Michael Seifert Michael Seifert
Michal Wajszczuk Michal Wajszczuk
Michał Górny
Michał Zięba Michał Zięba
Mickey Pashov Mickey Pashov
Mihai Capotă Mihai Capotă

View File

@ -0,0 +1 @@
Fixed handling ``NO_COLOR`` and ``FORCE_COLOR`` to ignore an empty value.

View File

@ -1146,13 +1146,13 @@ When set to ``0``, pytest will not use color.
.. envvar:: NO_COLOR .. envvar:: NO_COLOR
When set (regardless of value), pytest will not use color in terminal output. When set to a non-empty string (regardless of value), pytest will not use color in terminal output.
``PY_COLORS`` takes precedence over ``NO_COLOR``, which takes precedence over ``FORCE_COLOR``. ``PY_COLORS`` takes precedence over ``NO_COLOR``, which takes precedence over ``FORCE_COLOR``.
See `no-color.org <https://no-color.org/>`__ for other libraries supporting this community standard. See `no-color.org <https://no-color.org/>`__ for other libraries supporting this community standard.
.. envvar:: FORCE_COLOR .. envvar:: FORCE_COLOR
When set (regardless of value), pytest will use color in terminal output. When set to a non-empty string (regardless of value), pytest will use color in terminal output.
``PY_COLORS`` and ``NO_COLOR`` take precedence over ``FORCE_COLOR``. ``PY_COLORS`` and ``NO_COLOR`` take precedence over ``FORCE_COLOR``.
Exceptions Exceptions

View File

@ -29,9 +29,9 @@ def should_do_markup(file: TextIO) -> bool:
return True return True
if os.environ.get("PY_COLORS") == "0": if os.environ.get("PY_COLORS") == "0":
return False return False
if "NO_COLOR" in os.environ: if os.environ.get("NO_COLOR"):
return False return False
if "FORCE_COLOR" in os.environ: if os.environ.get("FORCE_COLOR"):
return True return True
return ( return (
hasattr(file, "isatty") and file.isatty() and os.environ.get("TERM") != "dumb" hasattr(file, "isatty") and file.isatty() and os.environ.get("TERM") != "dumb"

View File

@ -1202,8 +1202,11 @@ class TerminalReporter:
verbose_word, **{_color_for_type["warnings"]: True} verbose_word, **{_color_for_type["warnings"]: True}
) )
nodeid = _get_node_id_with_markup(self._tw, self.config, rep) nodeid = _get_node_id_with_markup(self._tw, self.config, rep)
line = f"{markup_word} {nodeid}"
reason = rep.wasxfail reason = rep.wasxfail
lines.append(f"{markup_word} {nodeid} {reason}") if reason:
line += " - " + str(reason)
lines.append(line)
def show_skipped(lines: List[str]) -> None: def show_skipped(lines: List[str]) -> None:
skipped: List[CollectReport] = self.stats.get("skipped", []) skipped: List[CollectReport] = self.stats.get("skipped", [])

View File

@ -5,6 +5,7 @@ import shutil
import sys import sys
from pathlib import Path from pathlib import Path
from typing import Generator from typing import Generator
from typing import Optional
from unittest import mock from unittest import mock
import pytest import pytest
@ -164,53 +165,67 @@ def test_attr_hasmarkup() -> None:
assert "\x1b[0m" in s assert "\x1b[0m" in s
def assert_color_set(): def assert_color(expected: bool, default: Optional[bool] = None) -> None:
file = io.StringIO() file = io.StringIO()
tw = terminalwriter.TerminalWriter(file) if default is None:
assert tw.hasmarkup default = not expected
file.isatty = lambda: default # type: ignore
tw = terminalwriter.TerminalWriter(file=file)
assert tw.hasmarkup is expected
tw.line("hello", bold=True) tw.line("hello", bold=True)
s = file.getvalue() s = file.getvalue()
assert len(s) > len("hello\n") if expected:
assert "\x1b[1m" in s assert len(s) > len("hello\n")
assert "\x1b[0m" in s assert "\x1b[1m" in s
assert "\x1b[0m" in s
else:
def assert_color_not_set(): assert s == "hello\n"
f = io.StringIO()
f.isatty = lambda: True # type: ignore
tw = terminalwriter.TerminalWriter(file=f)
assert not tw.hasmarkup
tw.line("hello", bold=True)
s = f.getvalue()
assert s == "hello\n"
def test_should_do_markup_PY_COLORS_eq_1(monkeypatch: MonkeyPatch) -> None: def test_should_do_markup_PY_COLORS_eq_1(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setitem(os.environ, "PY_COLORS", "1") monkeypatch.setitem(os.environ, "PY_COLORS", "1")
assert_color_set() assert_color(True)
def test_should_not_do_markup_PY_COLORS_eq_0(monkeypatch: MonkeyPatch) -> None: def test_should_not_do_markup_PY_COLORS_eq_0(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setitem(os.environ, "PY_COLORS", "0") monkeypatch.setitem(os.environ, "PY_COLORS", "0")
assert_color_not_set() assert_color(False)
def test_should_not_do_markup_NO_COLOR(monkeypatch: MonkeyPatch) -> None: def test_should_not_do_markup_NO_COLOR(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setitem(os.environ, "NO_COLOR", "1") monkeypatch.setitem(os.environ, "NO_COLOR", "1")
assert_color_not_set() assert_color(False)
def test_should_do_markup_FORCE_COLOR(monkeypatch: MonkeyPatch) -> None: def test_should_do_markup_FORCE_COLOR(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setitem(os.environ, "FORCE_COLOR", "1") monkeypatch.setitem(os.environ, "FORCE_COLOR", "1")
assert_color_set() assert_color(True)
def test_should_not_do_markup_NO_COLOR_and_FORCE_COLOR( @pytest.mark.parametrize(
["NO_COLOR", "FORCE_COLOR", "expected"],
[
("1", "1", False),
("", "1", True),
("1", "", False),
],
)
def test_NO_COLOR_and_FORCE_COLOR(
monkeypatch: MonkeyPatch, monkeypatch: MonkeyPatch,
NO_COLOR: str,
FORCE_COLOR: str,
expected: bool,
) -> None: ) -> None:
monkeypatch.setitem(os.environ, "NO_COLOR", "1") monkeypatch.setitem(os.environ, "NO_COLOR", NO_COLOR)
monkeypatch.setitem(os.environ, "FORCE_COLOR", "1") monkeypatch.setitem(os.environ, "FORCE_COLOR", FORCE_COLOR)
assert_color_not_set() assert_color(expected)
def test_empty_NO_COLOR_and_FORCE_COLOR_ignored(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setitem(os.environ, "NO_COLOR", "")
monkeypatch.setitem(os.environ, "FORCE_COLOR", "")
assert_color(True, True)
assert_color(False, False)
class TestTerminalWriterLineWidth: class TestTerminalWriterLineWidth:

View File

@ -649,7 +649,7 @@ class TestXFail:
result.stdout.fnmatch_lines( result.stdout.fnmatch_lines(
[ [
"*test_strict_xfail*", "*test_strict_xfail*",
"XPASS test_strict_xfail.py::test_foo unsupported feature", "XPASS test_strict_xfail.py::test_foo - unsupported feature",
] ]
) )
assert result.ret == (1 if strict else 0) assert result.ret == (1 if strict else 0)

View File

@ -2614,3 +2614,122 @@ def test_format_trimmed() -> None:
assert _format_trimmed(" ({}) ", msg, len(msg) + 4) == " (unconditional skip) " assert _format_trimmed(" ({}) ", msg, len(msg) + 4) == " (unconditional skip) "
assert _format_trimmed(" ({}) ", msg, len(msg) + 3) == " (unconditional ...) " assert _format_trimmed(" ({}) ", msg, len(msg) + 3) == " (unconditional ...) "
def test_summary_xfail_reason(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_xfail():
assert False
@pytest.mark.xfail(reason="foo")
def test_xfail_reason():
assert False
"""
)
result = pytester.runpytest("-rx")
expect1 = "XFAIL test_summary_xfail_reason.py::test_xfail"
expect2 = "XFAIL test_summary_xfail_reason.py::test_xfail_reason - foo"
result.stdout.fnmatch_lines([expect1, expect2])
assert result.stdout.lines.count(expect1) == 1
assert result.stdout.lines.count(expect2) == 1
def test_summary_xfail_tb(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_xfail():
a, b = 1, 2
assert a == b
"""
)
result = pytester.runpytest("-rx")
result.stdout.fnmatch_lines(
[
"*= XFAILURES =*",
"*_ test_xfail _*",
"* @pytest.mark.xfail*",
"* def test_xfail():*",
"* a, b = 1, 2*",
"> *assert a == b*",
"E *assert 1 == 2*",
"test_summary_xfail_tb.py:6: AssertionError*",
"*= short test summary info =*",
"XFAIL test_summary_xfail_tb.py::test_xfail",
"*= 1 xfailed in * =*",
]
)
def test_xfail_tb_line(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_xfail():
a, b = 1, 2
assert a == b
"""
)
result = pytester.runpytest("-rx", "--tb=line")
result.stdout.fnmatch_lines(
[
"*= XFAILURES =*",
"*test_xfail_tb_line.py:6: assert 1 == 2",
"*= short test summary info =*",
"XFAIL test_xfail_tb_line.py::test_xfail",
"*= 1 xfailed in * =*",
]
)
def test_summary_xpass_reason(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_pass():
...
@pytest.mark.xfail(reason="foo")
def test_reason():
...
"""
)
result = pytester.runpytest("-rX")
expect1 = "XPASS test_summary_xpass_reason.py::test_pass"
expect2 = "XPASS test_summary_xpass_reason.py::test_reason - foo"
result.stdout.fnmatch_lines([expect1, expect2])
assert result.stdout.lines.count(expect1) == 1
assert result.stdout.lines.count(expect2) == 1
def test_xpass_output(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_pass():
print('hi there')
"""
)
result = pytester.runpytest("-rX")
result.stdout.fnmatch_lines(
[
"*= XPASSES =*",
"*_ test_pass _*",
"*- Captured stdout call -*",
"*= short test summary info =*",
"XPASS test_xpass_output.py::test_pass*",
"*= 1 xpassed in * =*",
]
)