Merge branch 'okken_report_xfail' into report_xfails
This commit is contained in:
commit
35bbbddf44
1
AUTHORS
1
AUTHORS
|
@ -266,6 +266,7 @@ Michael Goerz
|
|||
Michael Krebs
|
||||
Michael Seifert
|
||||
Michal Wajszczuk
|
||||
Michał Górny
|
||||
Michał Zięba
|
||||
Mickey Pashov
|
||||
Mihai Capotă
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Fixed handling ``NO_COLOR`` and ``FORCE_COLOR`` to ignore an empty value.
|
|
@ -1146,13 +1146,13 @@ When set to ``0``, pytest will not use color.
|
|||
|
||||
.. envvar:: NO_COLOR
|
||||
|
||||
When set (regardless of value), pytest will not use color in terminal output.
|
||||
When set to a non-empty string (regardless of value), pytest will not use color in terminal output.
|
||||
``PY_COLORS`` takes precedence over ``NO_COLOR``, which takes precedence over ``FORCE_COLOR``.
|
||||
See `no-color.org <https://no-color.org/>`__ for other libraries supporting this community standard.
|
||||
|
||||
.. envvar:: FORCE_COLOR
|
||||
|
||||
When set (regardless of value), pytest will use color in terminal output.
|
||||
When set to a non-empty string (regardless of value), pytest will use color in terminal output.
|
||||
``PY_COLORS`` and ``NO_COLOR`` take precedence over ``FORCE_COLOR``.
|
||||
|
||||
Exceptions
|
||||
|
|
|
@ -29,9 +29,9 @@ def should_do_markup(file: TextIO) -> bool:
|
|||
return True
|
||||
if os.environ.get("PY_COLORS") == "0":
|
||||
return False
|
||||
if "NO_COLOR" in os.environ:
|
||||
if os.environ.get("NO_COLOR"):
|
||||
return False
|
||||
if "FORCE_COLOR" in os.environ:
|
||||
if os.environ.get("FORCE_COLOR"):
|
||||
return True
|
||||
return (
|
||||
hasattr(file, "isatty") and file.isatty() and os.environ.get("TERM") != "dumb"
|
||||
|
|
|
@ -1202,8 +1202,11 @@ class TerminalReporter:
|
|||
verbose_word, **{_color_for_type["warnings"]: True}
|
||||
)
|
||||
nodeid = _get_node_id_with_markup(self._tw, self.config, rep)
|
||||
line = f"{markup_word} {nodeid}"
|
||||
reason = rep.wasxfail
|
||||
lines.append(f"{markup_word} {nodeid} {reason}")
|
||||
if reason:
|
||||
line += " - " + str(reason)
|
||||
lines.append(line)
|
||||
|
||||
def show_skipped(lines: List[str]) -> None:
|
||||
skipped: List[CollectReport] = self.stats.get("skipped", [])
|
||||
|
|
|
@ -5,6 +5,7 @@ import shutil
|
|||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Generator
|
||||
from typing import Optional
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
@ -164,53 +165,67 @@ def test_attr_hasmarkup() -> None:
|
|||
assert "\x1b[0m" in s
|
||||
|
||||
|
||||
def assert_color_set():
|
||||
def assert_color(expected: bool, default: Optional[bool] = None) -> None:
|
||||
file = io.StringIO()
|
||||
tw = terminalwriter.TerminalWriter(file)
|
||||
assert tw.hasmarkup
|
||||
if default is None:
|
||||
default = not expected
|
||||
file.isatty = lambda: default # type: ignore
|
||||
tw = terminalwriter.TerminalWriter(file=file)
|
||||
assert tw.hasmarkup is expected
|
||||
tw.line("hello", bold=True)
|
||||
s = file.getvalue()
|
||||
if expected:
|
||||
assert len(s) > len("hello\n")
|
||||
assert "\x1b[1m" in s
|
||||
assert "\x1b[0m" in s
|
||||
|
||||
|
||||
def assert_color_not_set():
|
||||
f = io.StringIO()
|
||||
f.isatty = lambda: True # type: ignore
|
||||
tw = terminalwriter.TerminalWriter(file=f)
|
||||
assert not tw.hasmarkup
|
||||
tw.line("hello", bold=True)
|
||||
s = f.getvalue()
|
||||
else:
|
||||
assert s == "hello\n"
|
||||
|
||||
|
||||
def test_should_do_markup_PY_COLORS_eq_1(monkeypatch: MonkeyPatch) -> None:
|
||||
monkeypatch.setitem(os.environ, "PY_COLORS", "1")
|
||||
assert_color_set()
|
||||
assert_color(True)
|
||||
|
||||
|
||||
def test_should_not_do_markup_PY_COLORS_eq_0(monkeypatch: MonkeyPatch) -> None:
|
||||
monkeypatch.setitem(os.environ, "PY_COLORS", "0")
|
||||
assert_color_not_set()
|
||||
assert_color(False)
|
||||
|
||||
|
||||
def test_should_not_do_markup_NO_COLOR(monkeypatch: MonkeyPatch) -> None:
|
||||
monkeypatch.setitem(os.environ, "NO_COLOR", "1")
|
||||
assert_color_not_set()
|
||||
assert_color(False)
|
||||
|
||||
|
||||
def test_should_do_markup_FORCE_COLOR(monkeypatch: MonkeyPatch) -> None:
|
||||
monkeypatch.setitem(os.environ, "FORCE_COLOR", "1")
|
||||
assert_color_set()
|
||||
assert_color(True)
|
||||
|
||||
|
||||
def test_should_not_do_markup_NO_COLOR_and_FORCE_COLOR(
|
||||
@pytest.mark.parametrize(
|
||||
["NO_COLOR", "FORCE_COLOR", "expected"],
|
||||
[
|
||||
("1", "1", False),
|
||||
("", "1", True),
|
||||
("1", "", False),
|
||||
],
|
||||
)
|
||||
def test_NO_COLOR_and_FORCE_COLOR(
|
||||
monkeypatch: MonkeyPatch,
|
||||
NO_COLOR: str,
|
||||
FORCE_COLOR: str,
|
||||
expected: bool,
|
||||
) -> None:
|
||||
monkeypatch.setitem(os.environ, "NO_COLOR", "1")
|
||||
monkeypatch.setitem(os.environ, "FORCE_COLOR", "1")
|
||||
assert_color_not_set()
|
||||
monkeypatch.setitem(os.environ, "NO_COLOR", NO_COLOR)
|
||||
monkeypatch.setitem(os.environ, "FORCE_COLOR", FORCE_COLOR)
|
||||
assert_color(expected)
|
||||
|
||||
|
||||
def test_empty_NO_COLOR_and_FORCE_COLOR_ignored(monkeypatch: MonkeyPatch) -> None:
|
||||
monkeypatch.setitem(os.environ, "NO_COLOR", "")
|
||||
monkeypatch.setitem(os.environ, "FORCE_COLOR", "")
|
||||
assert_color(True, True)
|
||||
assert_color(False, False)
|
||||
|
||||
|
||||
class TestTerminalWriterLineWidth:
|
||||
|
|
|
@ -649,7 +649,7 @@ class TestXFail:
|
|||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*test_strict_xfail*",
|
||||
"XPASS test_strict_xfail.py::test_foo unsupported feature",
|
||||
"XPASS test_strict_xfail.py::test_foo - unsupported feature",
|
||||
]
|
||||
)
|
||||
assert result.ret == (1 if strict else 0)
|
||||
|
|
|
@ -2614,3 +2614,122 @@ def test_format_trimmed() -> None:
|
|||
|
||||
assert _format_trimmed(" ({}) ", msg, len(msg) + 4) == " (unconditional skip) "
|
||||
assert _format_trimmed(" ({}) ", msg, len(msg) + 3) == " (unconditional ...) "
|
||||
|
||||
|
||||
def test_summary_xfail_reason(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
assert False
|
||||
|
||||
@pytest.mark.xfail(reason="foo")
|
||||
def test_xfail_reason():
|
||||
assert False
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rx")
|
||||
expect1 = "XFAIL test_summary_xfail_reason.py::test_xfail"
|
||||
expect2 = "XFAIL test_summary_xfail_reason.py::test_xfail_reason - foo"
|
||||
result.stdout.fnmatch_lines([expect1, expect2])
|
||||
assert result.stdout.lines.count(expect1) == 1
|
||||
assert result.stdout.lines.count(expect2) == 1
|
||||
|
||||
|
||||
def test_summary_xfail_tb(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
a, b = 1, 2
|
||||
assert a == b
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rx")
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*= XFAILURES =*",
|
||||
"*_ test_xfail _*",
|
||||
"* @pytest.mark.xfail*",
|
||||
"* def test_xfail():*",
|
||||
"* a, b = 1, 2*",
|
||||
"> *assert a == b*",
|
||||
"E *assert 1 == 2*",
|
||||
"test_summary_xfail_tb.py:6: AssertionError*",
|
||||
"*= short test summary info =*",
|
||||
"XFAIL test_summary_xfail_tb.py::test_xfail",
|
||||
"*= 1 xfailed in * =*",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_xfail_tb_line(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_xfail():
|
||||
a, b = 1, 2
|
||||
assert a == b
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rx", "--tb=line")
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*= XFAILURES =*",
|
||||
"*test_xfail_tb_line.py:6: assert 1 == 2",
|
||||
"*= short test summary info =*",
|
||||
"XFAIL test_xfail_tb_line.py::test_xfail",
|
||||
"*= 1 xfailed in * =*",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_summary_xpass_reason(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_pass():
|
||||
...
|
||||
|
||||
@pytest.mark.xfail(reason="foo")
|
||||
def test_reason():
|
||||
...
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rX")
|
||||
expect1 = "XPASS test_summary_xpass_reason.py::test_pass"
|
||||
expect2 = "XPASS test_summary_xpass_reason.py::test_reason - foo"
|
||||
result.stdout.fnmatch_lines([expect1, expect2])
|
||||
assert result.stdout.lines.count(expect1) == 1
|
||||
assert result.stdout.lines.count(expect2) == 1
|
||||
|
||||
|
||||
def test_xpass_output(pytester: Pytester) -> None:
|
||||
pytester.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_pass():
|
||||
print('hi there')
|
||||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rX")
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*= XPASSES =*",
|
||||
"*_ test_pass _*",
|
||||
"*- Captured stdout call -*",
|
||||
"*= short test summary info =*",
|
||||
"XPASS test_xpass_output.py::test_pass*",
|
||||
"*= 1 xpassed in * =*",
|
||||
]
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue