fix conflicts after #9875 merge
This commit is contained in:
commit
3b7818dfdd
|
@ -42,7 +42,7 @@ repos:
|
|||
- id: reorder-python-imports
|
||||
args: ['--application-directories=.:src', --py37-plus]
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.32.0
|
||||
rev: v2.32.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
||||
|
|
1
AUTHORS
1
AUTHORS
|
@ -44,6 +44,7 @@ Aron Coyle
|
|||
Aron Curzon
|
||||
Aviral Verma
|
||||
Aviv Palivoda
|
||||
Babak Keyvani
|
||||
Barney Gale
|
||||
Ben Gartner
|
||||
Ben Webb
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
Improve :py:func:`pytest.raises`. Previously passing an empty tuple would give a confusing
|
||||
error. We now raise immediately with a more helpful message.
|
|
@ -176,8 +176,8 @@ logging records as they are emitted directly into the console.
|
|||
|
||||
You can specify the logging level for which log records with equal or higher
|
||||
level are printed to the console by passing ``--log-cli-level``. This setting
|
||||
accepts the logging level names as seen in python's documentation or an integer
|
||||
as the logging level num.
|
||||
accepts the logging level names or numeric values as seen in
|
||||
:ref:`logging's documentation <python:levels>`.
|
||||
|
||||
Additionally, you can also specify ``--log-cli-format`` and
|
||||
``--log-cli-date-format`` which mirror and default to ``--log-format`` and
|
||||
|
@ -198,9 +198,8 @@ Note that relative paths for the log-file location, whether passed on the CLI or
|
|||
config file, are always resolved relative to the current working directory.
|
||||
|
||||
You can also specify the logging level for the log file by passing
|
||||
``--log-file-level``. This setting accepts the logging level names as seen in
|
||||
python's documentation(ie, uppercased level names) or an integer as the logging
|
||||
level num.
|
||||
``--log-file-level``. This setting accepts the logging level names or numeric
|
||||
values as seen in :ref:`logging's documentation <python:levels>`.
|
||||
|
||||
Additionally, you can also specify ``--log-file-format`` and
|
||||
``--log-file-date-format`` which are equal to ``--log-format`` and
|
||||
|
|
|
@ -27,12 +27,15 @@ Almost all ``unittest`` features are supported:
|
|||
* ``setUpClass/tearDownClass``;
|
||||
* ``setUpModule/tearDownModule``;
|
||||
|
||||
.. _`pytest-subtests`: https://github.com/pytest-dev/pytest-subtests
|
||||
.. _`load_tests protocol`: https://docs.python.org/3/library/unittest.html#load-tests-protocol
|
||||
|
||||
Additionally, :ref:`subtests <python:subtests>` are supported by the
|
||||
`pytest-subtests`_ plugin.
|
||||
|
||||
Up to this point pytest does not have support for the following features:
|
||||
|
||||
* `load_tests protocol`_;
|
||||
* :ref:`subtests <python:subtests>`;
|
||||
|
||||
Benefits out of the box
|
||||
-----------------------
|
||||
|
|
|
@ -92,7 +92,7 @@ pytest.param
|
|||
pytest.raises
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
**Tutorial**: :ref:`assertraises`.
|
||||
**Tutorial**: :ref:`assertraises`
|
||||
|
||||
.. autofunction:: pytest.raises(expected_exception: Exception [, *, match])
|
||||
:with: excinfo
|
||||
|
@ -100,7 +100,7 @@ pytest.raises
|
|||
pytest.deprecated_call
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Tutorial**: :ref:`ensuring_function_triggers`.
|
||||
**Tutorial**: :ref:`ensuring_function_triggers`
|
||||
|
||||
.. autofunction:: pytest.deprecated_call()
|
||||
:with:
|
||||
|
@ -108,7 +108,7 @@ pytest.deprecated_call
|
|||
pytest.register_assert_rewrite
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Tutorial**: :ref:`assertion-rewriting`.
|
||||
**Tutorial**: :ref:`assertion-rewriting`
|
||||
|
||||
.. autofunction:: pytest.register_assert_rewrite
|
||||
|
||||
|
@ -123,7 +123,7 @@ pytest.warns
|
|||
pytest.freeze_includes
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Tutorial**: :ref:`freezing-pytest`.
|
||||
**Tutorial**: :ref:`freezing-pytest`
|
||||
|
||||
.. autofunction:: pytest.freeze_includes
|
||||
|
||||
|
@ -143,7 +143,7 @@ fixtures or plugins.
|
|||
pytest.mark.filterwarnings
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Tutorial**: :ref:`filterwarnings`.
|
||||
**Tutorial**: :ref:`filterwarnings`
|
||||
|
||||
Add warning filters to marked test items.
|
||||
|
||||
|
@ -169,7 +169,7 @@ Add warning filters to marked test items.
|
|||
pytest.mark.parametrize
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:ref:`parametrize`.
|
||||
**Tutorial**: :ref:`parametrize`
|
||||
|
||||
This mark has the same signature as :py:meth:`pytest.Metafunc.parametrize`; see there.
|
||||
|
||||
|
@ -179,7 +179,7 @@ This mark has the same signature as :py:meth:`pytest.Metafunc.parametrize`; see
|
|||
pytest.mark.skip
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
:ref:`skip`.
|
||||
**Tutorial**: :ref:`skip`
|
||||
|
||||
Unconditionally skip a test function.
|
||||
|
||||
|
@ -193,7 +193,7 @@ Unconditionally skip a test function.
|
|||
pytest.mark.skipif
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:ref:`skipif`.
|
||||
**Tutorial**: :ref:`skipif`
|
||||
|
||||
Skip a test function if a condition is ``True``.
|
||||
|
||||
|
@ -209,7 +209,7 @@ Skip a test function if a condition is ``True``.
|
|||
pytest.mark.usefixtures
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Tutorial**: :ref:`usefixtures`.
|
||||
**Tutorial**: :ref:`usefixtures`
|
||||
|
||||
Mark a test function as using the given fixture names.
|
||||
|
||||
|
@ -231,7 +231,7 @@ Mark a test function as using the given fixture names.
|
|||
pytest.mark.xfail
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Tutorial**: :ref:`xfail`.
|
||||
**Tutorial**: :ref:`xfail`
|
||||
|
||||
Marks a test function as *expected to fail*.
|
||||
|
||||
|
@ -297,7 +297,7 @@ When :meth:`Node.iter_markers <_pytest.nodes.Node.iter_markers>` or :meth:`Node.
|
|||
Fixtures
|
||||
--------
|
||||
|
||||
**Tutorial**: :ref:`fixture`.
|
||||
**Tutorial**: :ref:`fixture`
|
||||
|
||||
Fixtures are requested by test functions or other fixtures by declaring them as argument names.
|
||||
|
||||
|
@ -338,7 +338,7 @@ For more details, consult the full :ref:`fixtures docs <fixture>`.
|
|||
config.cache
|
||||
~~~~~~~~~~~~
|
||||
|
||||
**Tutorial**: :ref:`cache`.
|
||||
**Tutorial**: :ref:`cache`
|
||||
|
||||
The ``config.cache`` object allows other plugins and fixtures
|
||||
to store and retrieve values across test runs. To access it from fixtures
|
||||
|
@ -358,22 +358,11 @@ Under the hood, the cache plugin uses the simple
|
|||
capsys
|
||||
~~~~~~
|
||||
|
||||
:ref:`captures`.
|
||||
**Tutorial**: :ref:`captures`
|
||||
|
||||
.. autofunction:: _pytest.capture.capsys()
|
||||
:no-auto-options:
|
||||
|
||||
Returns an instance of :class:`CaptureFixture[str] <pytest.CaptureFixture>`.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_output(capsys):
|
||||
print("hello")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "hello\n"
|
||||
|
||||
.. autoclass:: pytest.CaptureFixture()
|
||||
:members:
|
||||
|
||||
|
@ -383,93 +372,48 @@ capsys
|
|||
capsysbinary
|
||||
~~~~~~~~~~~~
|
||||
|
||||
:ref:`captures`.
|
||||
**Tutorial**: :ref:`captures`
|
||||
|
||||
.. autofunction:: _pytest.capture.capsysbinary()
|
||||
:no-auto-options:
|
||||
|
||||
Returns an instance of :class:`CaptureFixture[bytes] <pytest.CaptureFixture>`.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_output(capsysbinary):
|
||||
print("hello")
|
||||
captured = capsysbinary.readouterr()
|
||||
assert captured.out == b"hello\n"
|
||||
|
||||
|
||||
.. fixture:: capfd
|
||||
|
||||
capfd
|
||||
~~~~~~
|
||||
|
||||
:ref:`captures`.
|
||||
**Tutorial**: :ref:`captures`
|
||||
|
||||
.. autofunction:: _pytest.capture.capfd()
|
||||
:no-auto-options:
|
||||
|
||||
Returns an instance of :class:`CaptureFixture[str] <pytest.CaptureFixture>`.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_system_echo(capfd):
|
||||
os.system('echo "hello"')
|
||||
captured = capfd.readouterr()
|
||||
assert captured.out == "hello\n"
|
||||
|
||||
|
||||
.. fixture:: capfdbinary
|
||||
|
||||
capfdbinary
|
||||
~~~~~~~~~~~~
|
||||
|
||||
:ref:`captures`.
|
||||
**Tutorial**: :ref:`captures`
|
||||
|
||||
.. autofunction:: _pytest.capture.capfdbinary()
|
||||
:no-auto-options:
|
||||
|
||||
Returns an instance of :class:`CaptureFixture[bytes] <pytest.CaptureFixture>`.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_system_echo(capfdbinary):
|
||||
os.system('echo "hello"')
|
||||
captured = capfdbinary.readouterr()
|
||||
assert captured.out == b"hello\n"
|
||||
|
||||
|
||||
.. fixture:: doctest_namespace
|
||||
|
||||
doctest_namespace
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
:ref:`doctest`.
|
||||
**Tutorial**: :ref:`doctest`
|
||||
|
||||
.. autofunction:: _pytest.doctest.doctest_namespace()
|
||||
|
||||
Usually this fixture is used in conjunction with another ``autouse`` fixture:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def add_np(doctest_namespace):
|
||||
doctest_namespace["np"] = numpy
|
||||
|
||||
For more details: :ref:`doctest_namespace`.
|
||||
|
||||
|
||||
.. fixture:: request
|
||||
|
||||
request
|
||||
~~~~~~~
|
||||
|
||||
:ref:`request example`.
|
||||
**Example**: :ref:`request example`
|
||||
|
||||
The ``request`` fixture is a special fixture providing information of the requesting test function.
|
||||
|
||||
|
@ -490,7 +434,7 @@ pytestconfig
|
|||
record_property
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Tutorial**: :ref:`record_property example`.
|
||||
**Tutorial**: :ref:`record_property example`
|
||||
|
||||
.. autofunction:: _pytest.junitxml.record_property()
|
||||
|
||||
|
@ -500,7 +444,7 @@ record_property
|
|||
record_testsuite_property
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Tutorial**: :ref:`record_testsuite_property example`.
|
||||
**Tutorial**: :ref:`record_testsuite_property example`
|
||||
|
||||
.. autofunction:: _pytest.junitxml.record_testsuite_property()
|
||||
|
||||
|
@ -510,7 +454,7 @@ record_testsuite_property
|
|||
caplog
|
||||
~~~~~~
|
||||
|
||||
:ref:`logging`.
|
||||
**Tutorial**: :ref:`logging`
|
||||
|
||||
.. autofunction:: _pytest.logging.caplog()
|
||||
:no-auto-options:
|
||||
|
@ -526,7 +470,7 @@ caplog
|
|||
monkeypatch
|
||||
~~~~~~~~~~~
|
||||
|
||||
:ref:`monkeypatching`.
|
||||
**Tutorial**: :ref:`monkeypatching`
|
||||
|
||||
.. autofunction:: _pytest.monkeypatch.monkeypatch()
|
||||
:no-auto-options:
|
||||
|
@ -600,19 +544,13 @@ recwarn
|
|||
.. autoclass:: pytest.WarningsRecorder()
|
||||
:members:
|
||||
|
||||
Each recorded warning is an instance of :class:`warnings.WarningMessage`.
|
||||
|
||||
.. note::
|
||||
``DeprecationWarning`` and ``PendingDeprecationWarning`` are treated
|
||||
differently; see :ref:`ensuring_function_triggers`.
|
||||
|
||||
|
||||
.. fixture:: tmp_path
|
||||
|
||||
tmp_path
|
||||
~~~~~~~~
|
||||
|
||||
:ref:`tmp_path`
|
||||
**Tutorial**: :ref:`tmp_path`
|
||||
|
||||
.. autofunction:: _pytest.tmpdir.tmp_path()
|
||||
:no-auto-options:
|
||||
|
@ -623,7 +561,7 @@ tmp_path
|
|||
tmp_path_factory
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
:ref:`tmp_path_factory example`
|
||||
**Tutorial**: :ref:`tmp_path_factory example`
|
||||
|
||||
.. _`tmp_path_factory factory api`:
|
||||
|
||||
|
@ -638,7 +576,7 @@ tmp_path_factory
|
|||
tmpdir
|
||||
~~~~~~
|
||||
|
||||
:ref:`tmpdir and tmpdir_factory`
|
||||
**Tutorial**: :ref:`tmpdir and tmpdir_factory`
|
||||
|
||||
.. autofunction:: _pytest.legacypath.LegacyTmpdirPlugin.tmpdir()
|
||||
:no-auto-options:
|
||||
|
@ -649,7 +587,7 @@ tmpdir
|
|||
tmpdir_factory
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
:ref:`tmpdir and tmpdir_factory`
|
||||
**Tutorial**: :ref:`tmpdir and tmpdir_factory`
|
||||
|
||||
``tmpdir_factory`` is an instance of :class:`~pytest.TempdirFactory`:
|
||||
|
||||
|
@ -662,7 +600,7 @@ tmpdir_factory
|
|||
Hooks
|
||||
-----
|
||||
|
||||
:ref:`writing-plugins`.
|
||||
**Tutorial**: :ref:`writing-plugins`
|
||||
|
||||
.. currentmodule:: _pytest.hookspec
|
||||
|
||||
|
|
|
@ -876,11 +876,22 @@ class CaptureFixture(Generic[AnyStr]):
|
|||
|
||||
@fixture
|
||||
def capsys(request: SubRequest) -> Generator[CaptureFixture[str], None, None]:
|
||||
"""Enable text capturing of writes to ``sys.stdout`` and ``sys.stderr``.
|
||||
r"""Enable text capturing of writes to ``sys.stdout`` and ``sys.stderr``.
|
||||
|
||||
The captured output is made available via ``capsys.readouterr()`` method
|
||||
calls, which return a ``(out, err)`` namedtuple.
|
||||
``out`` and ``err`` will be ``text`` objects.
|
||||
|
||||
Returns an instance of :class:`CaptureFixture[str] <pytest.CaptureFixture>`.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_output(capsys):
|
||||
print("hello")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "hello\n"
|
||||
"""
|
||||
capman = request.config.pluginmanager.getplugin("capturemanager")
|
||||
capture_fixture = CaptureFixture[str](SysCapture, request, _ispytest=True)
|
||||
|
@ -893,11 +904,22 @@ def capsys(request: SubRequest) -> Generator[CaptureFixture[str], None, None]:
|
|||
|
||||
@fixture
|
||||
def capsysbinary(request: SubRequest) -> Generator[CaptureFixture[bytes], None, None]:
|
||||
"""Enable bytes capturing of writes to ``sys.stdout`` and ``sys.stderr``.
|
||||
r"""Enable bytes capturing of writes to ``sys.stdout`` and ``sys.stderr``.
|
||||
|
||||
The captured output is made available via ``capsysbinary.readouterr()``
|
||||
method calls, which return a ``(out, err)`` namedtuple.
|
||||
``out`` and ``err`` will be ``bytes`` objects.
|
||||
|
||||
Returns an instance of :class:`CaptureFixture[bytes] <pytest.CaptureFixture>`.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_output(capsysbinary):
|
||||
print("hello")
|
||||
captured = capsysbinary.readouterr()
|
||||
assert captured.out == b"hello\n"
|
||||
"""
|
||||
capman = request.config.pluginmanager.getplugin("capturemanager")
|
||||
capture_fixture = CaptureFixture[bytes](SysCaptureBinary, request, _ispytest=True)
|
||||
|
@ -910,11 +932,22 @@ def capsysbinary(request: SubRequest) -> Generator[CaptureFixture[bytes], None,
|
|||
|
||||
@fixture
|
||||
def capfd(request: SubRequest) -> Generator[CaptureFixture[str], None, None]:
|
||||
"""Enable text capturing of writes to file descriptors ``1`` and ``2``.
|
||||
r"""Enable text capturing of writes to file descriptors ``1`` and ``2``.
|
||||
|
||||
The captured output is made available via ``capfd.readouterr()`` method
|
||||
calls, which return a ``(out, err)`` namedtuple.
|
||||
``out`` and ``err`` will be ``text`` objects.
|
||||
|
||||
Returns an instance of :class:`CaptureFixture[str] <pytest.CaptureFixture>`.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_system_echo(capfd):
|
||||
os.system('echo "hello"')
|
||||
captured = capfd.readouterr()
|
||||
assert captured.out == "hello\n"
|
||||
"""
|
||||
capman = request.config.pluginmanager.getplugin("capturemanager")
|
||||
capture_fixture = CaptureFixture[str](FDCapture, request, _ispytest=True)
|
||||
|
@ -927,11 +960,23 @@ def capfd(request: SubRequest) -> Generator[CaptureFixture[str], None, None]:
|
|||
|
||||
@fixture
|
||||
def capfdbinary(request: SubRequest) -> Generator[CaptureFixture[bytes], None, None]:
|
||||
"""Enable bytes capturing of writes to file descriptors ``1`` and ``2``.
|
||||
r"""Enable bytes capturing of writes to file descriptors ``1`` and ``2``.
|
||||
|
||||
The captured output is made available via ``capfd.readouterr()`` method
|
||||
calls, which return a ``(out, err)`` namedtuple.
|
||||
``out`` and ``err`` will be ``byte`` objects.
|
||||
|
||||
Returns an instance of :class:`CaptureFixture[bytes] <pytest.CaptureFixture>`.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_system_echo(capfdbinary):
|
||||
os.system('echo "hello"')
|
||||
captured = capfdbinary.readouterr()
|
||||
assert captured.out == b"hello\n"
|
||||
|
||||
"""
|
||||
capman = request.config.pluginmanager.getplugin("capturemanager")
|
||||
capture_fixture = CaptureFixture[bytes](FDCaptureBinary, request, _ispytest=True)
|
||||
|
|
|
@ -730,5 +730,16 @@ def _get_report_choice(key: str) -> int:
|
|||
@pytest.fixture(scope="session")
|
||||
def doctest_namespace() -> Dict[str, Any]:
|
||||
"""Fixture that returns a :py:class:`dict` that will be injected into the
|
||||
namespace of doctests."""
|
||||
namespace of doctests.
|
||||
|
||||
Usually this fixture is used in conjunction with another ``autouse`` fixture:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def add_np(doctest_namespace):
|
||||
doctest_namespace["np"] = numpy
|
||||
|
||||
For more details: :ref:`doctest_namespace`.
|
||||
"""
|
||||
return dict()
|
||||
|
|
|
@ -899,6 +899,12 @@ def raises(
|
|||
"""
|
||||
__tracebackhide__ = True
|
||||
|
||||
if not expected_exception:
|
||||
raise ValueError(
|
||||
f"Expected an exception type or a tuple of exception types, but got `{expected_exception!r}`. "
|
||||
f"Raising exceptions is already understood as failing the test, so you don't need "
|
||||
f"any special code to say 'this should never raise an exception'."
|
||||
)
|
||||
if isinstance(expected_exception, type):
|
||||
excepted_exceptions: Tuple[Type[E], ...] = (expected_exception,)
|
||||
else:
|
||||
|
|
|
@ -160,7 +160,14 @@ def warns(
|
|||
class WarningsRecorder(warnings.catch_warnings):
|
||||
"""A context manager to record raised warnings.
|
||||
|
||||
Each recorded warning is an instance of :class:`warnings.WarningMessage`.
|
||||
|
||||
Adapted from `warnings.catch_warnings`.
|
||||
|
||||
.. note::
|
||||
``DeprecationWarning`` and ``PendingDeprecationWarning`` are treated
|
||||
differently; see :ref:`ensuring_function_triggers`.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, *, _ispytest: bool = False) -> None:
|
||||
|
|
|
@ -35,6 +35,7 @@ from _pytest import nodes
|
|||
from _pytest import timing
|
||||
from _pytest._code import ExceptionInfo
|
||||
from _pytest._code.code import ExceptionRepr
|
||||
from _pytest._io import TerminalWriter
|
||||
from _pytest._io.wcwidth import wcswidth
|
||||
from _pytest.assertion.util import running_on_ci
|
||||
from _pytest.compat import final
|
||||
|
@ -1075,33 +1076,43 @@ class TerminalReporter:
|
|||
if not self.reportchars:
|
||||
return
|
||||
|
||||
def show_simple(stat, lines: List[str]) -> None:
|
||||
def show_simple(lines: List[str], *, stat: str) -> None:
|
||||
failed = self.stats.get(stat, [])
|
||||
if not failed:
|
||||
return
|
||||
termwidth = self._tw.fullwidth
|
||||
config = self.config
|
||||
for rep in failed:
|
||||
line = _get_line_with_reprcrash_message(config, rep, termwidth)
|
||||
color = _color_for_type.get(stat, _color_for_type_default)
|
||||
line = _get_line_with_reprcrash_message(
|
||||
config, rep, self._tw, {color: True}
|
||||
)
|
||||
lines.append(line)
|
||||
|
||||
def show_xfailed(lines: List[str]) -> None:
|
||||
xfailed = self.stats.get("xfailed", [])
|
||||
for rep in xfailed:
|
||||
verbose_word = rep._get_verbose_word(self.config)
|
||||
pos = _get_pos(self.config, rep)
|
||||
lines.append(f"{verbose_word} {pos}")
|
||||
markup_word = self._tw.markup(
|
||||
verbose_word, **{_color_for_type["warnings"]: True}
|
||||
)
|
||||
nodeid = _get_node_id_with_markup(self._tw, self.config, rep)
|
||||
line = f"{markup_word} {nodeid}"
|
||||
reason = rep.wasxfail
|
||||
if reason:
|
||||
lines.append(" " + str(reason))
|
||||
line += " - " + str(reason)
|
||||
|
||||
lines.append(line)
|
||||
|
||||
def show_xpassed(lines: List[str]) -> None:
|
||||
xpassed = self.stats.get("xpassed", [])
|
||||
for rep in xpassed:
|
||||
verbose_word = rep._get_verbose_word(self.config)
|
||||
pos = _get_pos(self.config, rep)
|
||||
markup_word = self._tw.markup(
|
||||
verbose_word, **{_color_for_type["warnings"]: True}
|
||||
)
|
||||
nodeid = _get_node_id_with_markup(self._tw, self.config, rep)
|
||||
reason = rep.wasxfail
|
||||
lines.append(f"{verbose_word} {pos} {reason}")
|
||||
lines.append(f"{markup_word} {nodeid} {reason}")
|
||||
|
||||
def show_skipped(lines: List[str]) -> None:
|
||||
skipped: List[CollectReport] = self.stats.get("skipped", [])
|
||||
|
@ -1109,24 +1120,27 @@ class TerminalReporter:
|
|||
if not fskips:
|
||||
return
|
||||
verbose_word = skipped[0]._get_verbose_word(self.config)
|
||||
markup_word = self._tw.markup(
|
||||
verbose_word, **{_color_for_type["warnings"]: True}
|
||||
)
|
||||
prefix = "Skipped: "
|
||||
for num, fspath, lineno, reason in fskips:
|
||||
if reason.startswith("Skipped: "):
|
||||
reason = reason[9:]
|
||||
if reason.startswith(prefix):
|
||||
reason = reason[len(prefix) :]
|
||||
if lineno is not None:
|
||||
lines.append(
|
||||
"%s [%d] %s:%d: %s"
|
||||
% (verbose_word, num, fspath, lineno, reason)
|
||||
"%s [%d] %s:%d: %s" % (markup_word, num, fspath, lineno, reason)
|
||||
)
|
||||
else:
|
||||
lines.append("%s [%d] %s: %s" % (verbose_word, num, fspath, reason))
|
||||
lines.append("%s [%d] %s: %s" % (markup_word, num, fspath, reason))
|
||||
|
||||
REPORTCHAR_ACTIONS: Mapping[str, Callable[[List[str]], None]] = {
|
||||
"x": show_xfailed,
|
||||
"X": show_xpassed,
|
||||
"f": partial(show_simple, "failed"),
|
||||
"f": partial(show_simple, stat="failed"),
|
||||
"s": show_skipped,
|
||||
"p": partial(show_simple, "passed"),
|
||||
"E": partial(show_simple, "error"),
|
||||
"p": partial(show_simple, stat="passed"),
|
||||
"E": partial(show_simple, stat="error"),
|
||||
}
|
||||
|
||||
lines: List[str] = []
|
||||
|
@ -1136,7 +1150,7 @@ class TerminalReporter:
|
|||
action(lines)
|
||||
|
||||
if lines:
|
||||
self.write_sep("=", "short test summary info")
|
||||
self.write_sep("=", "short test summary info", cyan=True, bold=True)
|
||||
for line in lines:
|
||||
self.write_line(line)
|
||||
|
||||
|
@ -1250,9 +1264,14 @@ class TerminalReporter:
|
|||
return parts, main_color
|
||||
|
||||
|
||||
def _get_pos(config: Config, rep: BaseReport):
|
||||
def _get_node_id_with_markup(tw: TerminalWriter, config: Config, rep: BaseReport):
|
||||
nodeid = config.cwd_relative_nodeid(rep.nodeid)
|
||||
return nodeid
|
||||
path, *parts = nodeid.split("::")
|
||||
if parts:
|
||||
parts_markup = tw.markup("::".join(parts), bold=True)
|
||||
return path + "::" + parts_markup
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
def _format_trimmed(format: str, msg: str, available_width: int) -> Optional[str]:
|
||||
|
@ -1281,13 +1300,14 @@ def _format_trimmed(format: str, msg: str, available_width: int) -> Optional[str
|
|||
|
||||
|
||||
def _get_line_with_reprcrash_message(
|
||||
config: Config, rep: BaseReport, termwidth: int
|
||||
config: Config, rep: BaseReport, tw: TerminalWriter, word_markup: Dict[str, bool]
|
||||
) -> str:
|
||||
"""Get summary line for a report, trying to add reprcrash message."""
|
||||
verbose_word = rep._get_verbose_word(config)
|
||||
pos = _get_pos(config, rep)
|
||||
word = tw.markup(verbose_word, **word_markup)
|
||||
node = _get_node_id_with_markup(tw, config, rep)
|
||||
|
||||
line = f"{verbose_word} {pos}"
|
||||
line = f"{word} {node}"
|
||||
line_width = wcswidth(line)
|
||||
|
||||
try:
|
||||
|
@ -1297,7 +1317,7 @@ def _get_line_with_reprcrash_message(
|
|||
pass
|
||||
else:
|
||||
if not running_on_ci():
|
||||
available_width = termwidth - line_width
|
||||
available_width = tw.fullwidth - line_width
|
||||
msg = _format_trimmed(" - {}", msg, available_width)
|
||||
else:
|
||||
msg = f" - {msg}"
|
||||
|
|
|
@ -19,6 +19,16 @@ class TestRaises:
|
|||
excinfo = pytest.raises(ValueError, int, "hello")
|
||||
assert "invalid literal" in str(excinfo.value)
|
||||
|
||||
def test_raises_does_not_allow_none(self):
|
||||
with pytest.raises(ValueError, match="Expected an exception type or"):
|
||||
# We're testing that this invalid usage gives a helpful error,
|
||||
# so we can ignore Mypy telling us that None is invalid.
|
||||
pytest.raises(expected_exception=None) # type: ignore
|
||||
|
||||
def test_raises_does_not_allow_empty_tuple(self):
|
||||
with pytest.raises(ValueError, match="Expected an exception type or"):
|
||||
pytest.raises(expected_exception=())
|
||||
|
||||
def test_raises_callable_no_exception(self) -> None:
|
||||
class A:
|
||||
def __call__(self):
|
||||
|
|
|
@ -441,10 +441,8 @@ class TestXFail:
|
|||
result = pytester.runpytest(p, "-rx")
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*test_one*test_this*",
|
||||
"*NOTRUN*noway",
|
||||
"*test_one*test_this_true*",
|
||||
"*NOTRUN*condition:*True*",
|
||||
"*test_one*test_this - reason: *NOTRUN* noway",
|
||||
"*test_one*test_this_true - reason: *NOTRUN* condition: True",
|
||||
"*1 passed*",
|
||||
]
|
||||
)
|
||||
|
@ -461,9 +459,7 @@ class TestXFail:
|
|||
"""
|
||||
)
|
||||
result = pytester.runpytest(p, "-rx")
|
||||
result.stdout.fnmatch_lines(
|
||||
["*test_one*test_this*", "*NOTRUN*hello", "*1 xfailed*"]
|
||||
)
|
||||
result.stdout.fnmatch_lines(["*test_one*test_this*NOTRUN*hello", "*1 xfailed*"])
|
||||
|
||||
def test_xfail_xpass(self, pytester: Pytester) -> None:
|
||||
p = pytester.makepyfile(
|
||||
|
@ -489,7 +485,7 @@ class TestXFail:
|
|||
result = pytester.runpytest(p)
|
||||
result.stdout.fnmatch_lines(["*1 xfailed*"])
|
||||
result = pytester.runpytest(p, "-rx")
|
||||
result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"])
|
||||
result.stdout.fnmatch_lines(["*XFAIL*test_this*reason:*hello*"])
|
||||
result = pytester.runpytest(p, "--runxfail")
|
||||
result.stdout.fnmatch_lines(["*1 pass*"])
|
||||
|
||||
|
@ -507,7 +503,7 @@ class TestXFail:
|
|||
result = pytester.runpytest(p)
|
||||
result.stdout.fnmatch_lines(["*1 xfailed*"])
|
||||
result = pytester.runpytest(p, "-rx")
|
||||
result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"])
|
||||
result.stdout.fnmatch_lines(["*XFAIL*test_this*reason:*hello*"])
|
||||
result = pytester.runpytest(p, "--runxfail")
|
||||
result.stdout.fnmatch_lines(
|
||||
"""
|
||||
|
@ -543,7 +539,7 @@ class TestXFail:
|
|||
"""
|
||||
)
|
||||
result = pytester.runpytest(p, "-rxX")
|
||||
result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*NOTRUN*"])
|
||||
result.stdout.fnmatch_lines(["*XFAIL*test_this*NOTRUN*"])
|
||||
|
||||
def test_dynamic_xfail_set_during_funcarg_setup(self, pytester: Pytester) -> None:
|
||||
p = pytester.makepyfile(
|
||||
|
@ -622,7 +618,7 @@ class TestXFail:
|
|||
"""
|
||||
)
|
||||
result = pytester.runpytest(p, "-rxX")
|
||||
result.stdout.fnmatch_lines(["*XFAIL*", "*unsupported feature*"])
|
||||
result.stdout.fnmatch_lines(["*XFAIL*unsupported feature*"])
|
||||
assert result.ret == 0
|
||||
|
||||
@pytest.mark.parametrize("strict", [True, False])
|
||||
|
@ -1185,7 +1181,7 @@ def test_xfail_skipif_with_globals(pytester: Pytester) -> None:
|
|||
"""
|
||||
)
|
||||
result = pytester.runpytest("-rsx")
|
||||
result.stdout.fnmatch_lines(["*SKIP*x == 3*", "*XFAIL*test_boolean*", "*x == 3*"])
|
||||
result.stdout.fnmatch_lines(["*SKIP*x == 3*", "*XFAIL*test_boolean*x == 3*"])
|
||||
|
||||
|
||||
def test_default_markers(pytester: Pytester) -> None:
|
||||
|
@ -1297,8 +1293,7 @@ class TestBooleanCondition:
|
|||
result = pytester.runpytest("-rxs")
|
||||
result.stdout.fnmatch_lines(
|
||||
"""
|
||||
*XFAIL*
|
||||
*True123*
|
||||
*XFAIL*True123*
|
||||
*1 xfail*
|
||||
"""
|
||||
)
|
||||
|
|
|
@ -2333,7 +2333,7 @@ def test_line_with_reprcrash(monkeypatch: MonkeyPatch) -> None:
|
|||
def mock_get_pos(*args):
|
||||
return mocked_pos
|
||||
|
||||
monkeypatch.setattr(_pytest.terminal, "_get_pos", mock_get_pos)
|
||||
monkeypatch.setattr(_pytest.terminal, "_get_node_id_with_markup", mock_get_pos)
|
||||
|
||||
class config:
|
||||
pass
|
||||
|
@ -2347,10 +2347,16 @@ def test_line_with_reprcrash(monkeypatch: MonkeyPatch) -> None:
|
|||
pass
|
||||
|
||||
def check(msg, width, expected):
|
||||
class DummyTerminalWriter:
|
||||
fullwidth = width
|
||||
|
||||
def markup(self, word: str, **markup: str):
|
||||
return word
|
||||
|
||||
__tracebackhide__ = True
|
||||
if msg:
|
||||
rep.longrepr.reprcrash.message = msg # type: ignore
|
||||
actual = _get_line_with_reprcrash_message(config, rep(), width) # type: ignore
|
||||
actual = _get_line_with_reprcrash_message(config, rep(), DummyTerminalWriter(), {}) # type: ignore
|
||||
|
||||
assert actual == expected
|
||||
if actual != f"{mocked_verbose_word} {mocked_pos}":
|
||||
|
|
Loading…
Reference in New Issue