From 8d369f73bab0e490b6bb3749e5cb7f3d405e52ab Mon Sep 17 00:00:00 2001 From: Christine M Date: Wed, 28 Oct 2020 10:05:54 -0500 Subject: [PATCH] Migrate test_skipping.py from testdir to pytester (#7953) --- AUTHORS | 1 + testing/test_skipping.py | 416 ++++++++++++++++++++------------------- 2 files changed, 214 insertions(+), 203 deletions(-) diff --git a/AUTHORS b/AUTHORS index d2bb3d3ec..35d220e00 100644 --- a/AUTHORS +++ b/AUTHORS @@ -60,6 +60,7 @@ Christian Fetzer Christian Neumüller Christian Theunert Christian Tismer +Christine Mecklenborg Christoph Buelter Christopher Dignam Christopher Gilling diff --git a/testing/test_skipping.py b/testing/test_skipping.py index bf014e343..cfc0cdbca 100644 --- a/testing/test_skipping.py +++ b/testing/test_skipping.py @@ -1,7 +1,7 @@ import sys import pytest -from _pytest.pytester import Testdir +from _pytest.pytester import Pytester from _pytest.runner import runtestprotocol from _pytest.skipping import evaluate_skip_marks from _pytest.skipping import evaluate_xfail_marks @@ -9,13 +9,13 @@ from _pytest.skipping import pytest_runtest_setup class TestEvaluation: - def test_no_marker(self, testdir): - item = testdir.getitem("def test_func(): pass") + def test_no_marker(self, pytester: Pytester) -> None: + item = pytester.getitem("def test_func(): pass") skipped = evaluate_skip_marks(item) assert not skipped - def test_marked_xfail_no_args(self, testdir): - item = testdir.getitem( + def test_marked_xfail_no_args(self, pytester: Pytester) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.xfail @@ -28,8 +28,8 @@ class TestEvaluation: assert xfailed.reason == "" assert xfailed.run - def test_marked_skipif_no_args(self, testdir): - item = testdir.getitem( + def test_marked_skipif_no_args(self, pytester: Pytester) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.skipif @@ -41,8 +41,8 @@ class TestEvaluation: assert skipped assert skipped.reason == "" - def test_marked_one_arg(self, testdir): - item = testdir.getitem( + def test_marked_one_arg(self, pytester: Pytester) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.skipif("hasattr(os, 'sep')") @@ -54,8 +54,8 @@ class TestEvaluation: assert skipped assert skipped.reason == "condition: hasattr(os, 'sep')" - def test_marked_one_arg_with_reason(self, testdir): - item = testdir.getitem( + def test_marked_one_arg_with_reason(self, pytester: Pytester) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.skipif("hasattr(os, 'sep')", attr=2, reason="hello world") @@ -67,13 +67,13 @@ class TestEvaluation: assert skipped assert skipped.reason == "hello world" - def test_marked_one_arg_twice(self, testdir): + def test_marked_one_arg_twice(self, pytester: Pytester) -> None: lines = [ """@pytest.mark.skipif("not hasattr(os, 'murks')")""", """@pytest.mark.skipif(condition="hasattr(os, 'murks')")""", ] for i in range(0, 2): - item = testdir.getitem( + item = pytester.getitem( """ import pytest %s @@ -87,8 +87,8 @@ class TestEvaluation: assert skipped assert skipped.reason == "condition: not hasattr(os, 'murks')" - def test_marked_one_arg_twice2(self, testdir): - item = testdir.getitem( + def test_marked_one_arg_twice2(self, pytester: Pytester) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.skipif("hasattr(os, 'murks')") @@ -101,8 +101,10 @@ class TestEvaluation: assert skipped assert skipped.reason == "condition: not hasattr(os, 'murks')" - def test_marked_skipif_with_boolean_without_reason(self, testdir) -> None: - item = testdir.getitem( + def test_marked_skipif_with_boolean_without_reason( + self, pytester: Pytester + ) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.skipif(False) @@ -118,8 +120,8 @@ class TestEvaluation: in excinfo.value.msg ) - def test_marked_skipif_with_invalid_boolean(self, testdir) -> None: - item = testdir.getitem( + def test_marked_skipif_with_invalid_boolean(self, pytester: Pytester) -> None: + item = pytester.getitem( """ import pytest @@ -138,8 +140,8 @@ class TestEvaluation: assert "Error evaluating 'skipif' condition as a boolean" in excinfo.value.msg assert "INVALID" in excinfo.value.msg - def test_skipif_class(self, testdir): - (item,) = testdir.getitems( + def test_skipif_class(self, pytester: Pytester) -> None: + (item,) = pytester.getitems( """ import pytest class TestClass(object): @@ -148,7 +150,7 @@ class TestEvaluation: pass """ ) - item.config._hackxyz = 3 + item.config._hackxyz = 3 # type: ignore[attr-defined] skipped = evaluate_skip_marks(item) assert skipped assert skipped.reason == "condition: config._hackxyz" @@ -156,8 +158,8 @@ class TestEvaluation: class TestXFail: @pytest.mark.parametrize("strict", [True, False]) - def test_xfail_simple(self, testdir, strict): - item = testdir.getitem( + def test_xfail_simple(self, pytester: Pytester, strict: bool) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.xfail(strict=%s) @@ -172,8 +174,8 @@ class TestXFail: assert callreport.skipped assert callreport.wasxfail == "" - def test_xfail_xpassed(self, testdir): - item = testdir.getitem( + def test_xfail_xpassed(self, pytester: Pytester) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.xfail(reason="this is an xfail") @@ -187,9 +189,9 @@ class TestXFail: assert callreport.passed assert callreport.wasxfail == "this is an xfail" - def test_xfail_using_platform(self, testdir): + def test_xfail_using_platform(self, pytester: Pytester) -> None: """Verify that platform can be used with xfail statements.""" - item = testdir.getitem( + item = pytester.getitem( """ import pytest @pytest.mark.xfail("platform.platform() == platform.platform()") @@ -202,8 +204,8 @@ class TestXFail: callreport = reports[1] assert callreport.wasxfail - def test_xfail_xpassed_strict(self, testdir): - item = testdir.getitem( + def test_xfail_xpassed_strict(self, pytester: Pytester) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.xfail(strict=True, reason="nope") @@ -218,8 +220,8 @@ class TestXFail: assert str(callreport.longrepr) == "[XPASS(strict)] nope" assert not hasattr(callreport, "wasxfail") - def test_xfail_run_anyway(self, testdir): - testdir.makepyfile( + def test_xfail_run_anyway(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.xfail @@ -229,7 +231,7 @@ class TestXFail: pytest.xfail("hello") """ ) - result = testdir.runpytest("--runxfail") + result = pytester.runpytest("--runxfail") result.stdout.fnmatch_lines( ["*def test_func():*", "*assert 0*", "*1 failed*1 pass*"] ) @@ -247,8 +249,10 @@ class TestXFail: ), ], ) - def test_xfail_run_with_skip_mark(self, testdir, test_input, expected): - testdir.makepyfile( + def test_xfail_run_with_skip_mark( + self, pytester: Pytester, test_input, expected + ) -> None: + pytester.makepyfile( test_sample=""" import pytest @pytest.mark.skip @@ -256,11 +260,11 @@ class TestXFail: assert 0 """ ) - result = testdir.runpytest(*test_input) + result = pytester.runpytest(*test_input) result.stdout.fnmatch_lines(expected) - def test_xfail_evalfalse_but_fails(self, testdir): - item = testdir.getitem( + def test_xfail_evalfalse_but_fails(self, pytester: Pytester) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.xfail('False') @@ -274,8 +278,8 @@ class TestXFail: assert not hasattr(callreport, "wasxfail") assert "xfail" in callreport.keywords - def test_xfail_not_report_default(self, testdir): - p = testdir.makepyfile( + def test_xfail_not_report_default(self, pytester: Pytester) -> None: + p = pytester.makepyfile( test_one=""" import pytest @pytest.mark.xfail @@ -283,13 +287,13 @@ class TestXFail: assert 0 """ ) - testdir.runpytest(p, "-v") + pytester.runpytest(p, "-v") # result.stdout.fnmatch_lines([ # "*HINT*use*-r*" # ]) - def test_xfail_not_run_xfail_reporting(self, testdir): - p = testdir.makepyfile( + def test_xfail_not_run_xfail_reporting(self, pytester: Pytester) -> None: + p = pytester.makepyfile( test_one=""" import pytest @pytest.mark.xfail(run=False, reason="noway") @@ -303,7 +307,7 @@ class TestXFail: assert 1 """ ) - result = testdir.runpytest(p, "-rx") + result = pytester.runpytest(p, "-rx") result.stdout.fnmatch_lines( [ "*test_one*test_this*", @@ -314,8 +318,8 @@ class TestXFail: ] ) - def test_xfail_not_run_no_setup_run(self, testdir): - p = testdir.makepyfile( + def test_xfail_not_run_no_setup_run(self, pytester: Pytester) -> None: + p = pytester.makepyfile( test_one=""" import pytest @pytest.mark.xfail(run=False, reason="hello") @@ -325,13 +329,13 @@ class TestXFail: raise ValueError(42) """ ) - result = testdir.runpytest(p, "-rx") + result = pytester.runpytest(p, "-rx") result.stdout.fnmatch_lines( ["*test_one*test_this*", "*NOTRUN*hello", "*1 xfailed*"] ) - def test_xfail_xpass(self, testdir): - p = testdir.makepyfile( + def test_xfail_xpass(self, pytester: Pytester) -> None: + p = pytester.makepyfile( test_one=""" import pytest @pytest.mark.xfail @@ -339,27 +343,27 @@ class TestXFail: assert 1 """ ) - result = testdir.runpytest(p, "-rX") + result = pytester.runpytest(p, "-rX") result.stdout.fnmatch_lines(["*XPASS*test_that*", "*1 xpassed*"]) assert result.ret == 0 - def test_xfail_imperative(self, testdir): - p = testdir.makepyfile( + def test_xfail_imperative(self, pytester: Pytester) -> None: + p = pytester.makepyfile( """ import pytest def test_this(): pytest.xfail("hello") """ ) - result = testdir.runpytest(p) + result = pytester.runpytest(p) result.stdout.fnmatch_lines(["*1 xfailed*"]) - result = testdir.runpytest(p, "-rx") + result = pytester.runpytest(p, "-rx") result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"]) - result = testdir.runpytest(p, "--runxfail") + result = pytester.runpytest(p, "--runxfail") result.stdout.fnmatch_lines(["*1 pass*"]) - def test_xfail_imperative_in_setup_function(self, testdir): - p = testdir.makepyfile( + def test_xfail_imperative_in_setup_function(self, pytester: Pytester) -> None: + p = pytester.makepyfile( """ import pytest def setup_function(function): @@ -369,11 +373,11 @@ class TestXFail: assert 0 """ ) - result = testdir.runpytest(p) + result = pytester.runpytest(p) result.stdout.fnmatch_lines(["*1 xfailed*"]) - result = testdir.runpytest(p, "-rx") + result = pytester.runpytest(p, "-rx") result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"]) - result = testdir.runpytest(p, "--runxfail") + result = pytester.runpytest(p, "--runxfail") result.stdout.fnmatch_lines( """ *def test_this* @@ -381,8 +385,8 @@ class TestXFail: """ ) - def xtest_dynamic_xfail_set_during_setup(self, testdir): - p = testdir.makepyfile( + def xtest_dynamic_xfail_set_during_setup(self, pytester: Pytester) -> None: + p = pytester.makepyfile( """ import pytest def setup_function(function): @@ -393,11 +397,11 @@ class TestXFail: assert 1 """ ) - result = testdir.runpytest(p, "-rxX") + result = pytester.runpytest(p, "-rxX") result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*XPASS*test_that*"]) - def test_dynamic_xfail_no_run(self, testdir): - p = testdir.makepyfile( + def test_dynamic_xfail_no_run(self, pytester: Pytester) -> None: + p = pytester.makepyfile( """ import pytest @pytest.fixture @@ -407,11 +411,11 @@ class TestXFail: assert 0 """ ) - result = testdir.runpytest(p, "-rxX") + result = pytester.runpytest(p, "-rxX") result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*NOTRUN*"]) - def test_dynamic_xfail_set_during_funcarg_setup(self, testdir): - p = testdir.makepyfile( + def test_dynamic_xfail_set_during_funcarg_setup(self, pytester: Pytester) -> None: + p = pytester.makepyfile( """ import pytest @pytest.fixture @@ -421,12 +425,12 @@ class TestXFail: assert 0 """ ) - result = testdir.runpytest(p) + result = pytester.runpytest(p) result.stdout.fnmatch_lines(["*1 xfailed*"]) - def test_dynamic_xfail_set_during_runtest_failed(self, testdir: Testdir) -> None: + def test_dynamic_xfail_set_during_runtest_failed(self, pytester: Pytester) -> None: # Issue #7486. - p = testdir.makepyfile( + p = pytester.makepyfile( """ import pytest def test_this(request): @@ -434,21 +438,21 @@ class TestXFail: assert 0 """ ) - result = testdir.runpytest(p) + result = pytester.runpytest(p) result.assert_outcomes(xfailed=1) def test_dynamic_xfail_set_during_runtest_passed_strict( - self, testdir: Testdir + self, pytester: Pytester ) -> None: # Issue #7486. - p = testdir.makepyfile( + p = pytester.makepyfile( """ import pytest def test_this(request): request.node.add_marker(pytest.mark.xfail(reason="xfail", strict=True)) """ ) - result = testdir.runpytest(p) + result = pytester.runpytest(p) result.assert_outcomes(failed=1) @pytest.mark.parametrize( @@ -460,8 +464,10 @@ class TestXFail: ("(AttributeError, TypeError)", "IndexError", "*1 failed*"), ], ) - def test_xfail_raises(self, expected, actual, matchline, testdir): - p = testdir.makepyfile( + def test_xfail_raises( + self, expected, actual, matchline, pytester: Pytester + ) -> None: + p = pytester.makepyfile( """ import pytest @pytest.mark.xfail(raises=%s) @@ -470,13 +476,13 @@ class TestXFail: """ % (expected, actual) ) - result = testdir.runpytest(p) + result = pytester.runpytest(p) result.stdout.fnmatch_lines([matchline]) - def test_strict_sanity(self, testdir): + def test_strict_sanity(self, pytester: Pytester) -> None: """Sanity check for xfail(strict=True): a failing test should behave exactly like a normal xfail.""" - p = testdir.makepyfile( + p = pytester.makepyfile( """ import pytest @pytest.mark.xfail(reason='unsupported feature', strict=True) @@ -484,13 +490,13 @@ class TestXFail: assert 0 """ ) - result = testdir.runpytest(p, "-rxX") + result = pytester.runpytest(p, "-rxX") result.stdout.fnmatch_lines(["*XFAIL*", "*unsupported feature*"]) assert result.ret == 0 @pytest.mark.parametrize("strict", [True, False]) - def test_strict_xfail(self, testdir, strict): - p = testdir.makepyfile( + def test_strict_xfail(self, pytester: Pytester, strict: bool) -> None: + p = pytester.makepyfile( """ import pytest @@ -500,7 +506,7 @@ class TestXFail: """ % strict ) - result = testdir.runpytest(p, "-rxX") + result = pytester.runpytest(p, "-rxX") if strict: result.stdout.fnmatch_lines( ["*test_foo*", "*XPASS(strict)*unsupported feature*"] @@ -513,11 +519,11 @@ class TestXFail: ] ) assert result.ret == (1 if strict else 0) - assert testdir.tmpdir.join("foo_executed").isfile() + assert pytester.path.joinpath("foo_executed").exists() @pytest.mark.parametrize("strict", [True, False]) - def test_strict_xfail_condition(self, testdir, strict): - p = testdir.makepyfile( + def test_strict_xfail_condition(self, pytester: Pytester, strict: bool) -> None: + p = pytester.makepyfile( """ import pytest @@ -527,13 +533,13 @@ class TestXFail: """ % strict ) - result = testdir.runpytest(p, "-rxX") + result = pytester.runpytest(p, "-rxX") result.stdout.fnmatch_lines(["*1 passed*"]) assert result.ret == 0 @pytest.mark.parametrize("strict", [True, False]) - def test_xfail_condition_keyword(self, testdir, strict): - p = testdir.makepyfile( + def test_xfail_condition_keyword(self, pytester: Pytester, strict: bool) -> None: + p = pytester.makepyfile( """ import pytest @@ -543,20 +549,22 @@ class TestXFail: """ % strict ) - result = testdir.runpytest(p, "-rxX") + result = pytester.runpytest(p, "-rxX") result.stdout.fnmatch_lines(["*1 passed*"]) assert result.ret == 0 @pytest.mark.parametrize("strict_val", ["true", "false"]) - def test_strict_xfail_default_from_file(self, testdir, strict_val): - testdir.makeini( + def test_strict_xfail_default_from_file( + self, pytester: Pytester, strict_val + ) -> None: + pytester.makeini( """ [pytest] xfail_strict = %s """ % strict_val ) - p = testdir.makepyfile( + p = pytester.makepyfile( """ import pytest @pytest.mark.xfail(reason='unsupported feature') @@ -564,15 +572,15 @@ class TestXFail: pass """ ) - result = testdir.runpytest(p, "-rxX") + result = pytester.runpytest(p, "-rxX") strict = strict_val == "true" result.stdout.fnmatch_lines(["*1 failed*" if strict else "*1 xpassed*"]) assert result.ret == (1 if strict else 0) class TestXFailwithSetupTeardown: - def test_failing_setup_issue9(self, testdir): - testdir.makepyfile( + def test_failing_setup_issue9(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest def setup_function(func): @@ -583,11 +591,11 @@ class TestXFailwithSetupTeardown: pass """ ) - result = testdir.runpytest() + result = pytester.runpytest() result.stdout.fnmatch_lines(["*1 xfail*"]) - def test_failing_teardown_issue9(self, testdir): - testdir.makepyfile( + def test_failing_teardown_issue9(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest def teardown_function(func): @@ -598,13 +606,13 @@ class TestXFailwithSetupTeardown: pass """ ) - result = testdir.runpytest() + result = pytester.runpytest() result.stdout.fnmatch_lines(["*1 xfail*"]) class TestSkip: - def test_skip_class(self, testdir): - testdir.makepyfile( + def test_skip_class(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.skip @@ -618,11 +626,11 @@ class TestSkip: pass """ ) - rec = testdir.inline_run() + rec = pytester.inline_run() rec.assertoutcome(skipped=2, passed=1) - def test_skips_on_false_string(self, testdir): - testdir.makepyfile( + def test_skips_on_false_string(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.skip('False') @@ -630,11 +638,11 @@ class TestSkip: pass """ ) - rec = testdir.inline_run() + rec = pytester.inline_run() rec.assertoutcome(skipped=1) - def test_arg_as_reason(self, testdir): - testdir.makepyfile( + def test_arg_as_reason(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.skip('testing stuff') @@ -642,11 +650,11 @@ class TestSkip: pass """ ) - result = testdir.runpytest("-rs") + result = pytester.runpytest("-rs") result.stdout.fnmatch_lines(["*testing stuff*", "*1 skipped*"]) - def test_skip_no_reason(self, testdir): - testdir.makepyfile( + def test_skip_no_reason(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.skip @@ -654,11 +662,11 @@ class TestSkip: pass """ ) - result = testdir.runpytest("-rs") + result = pytester.runpytest("-rs") result.stdout.fnmatch_lines(["*unconditional skip*", "*1 skipped*"]) - def test_skip_with_reason(self, testdir): - testdir.makepyfile( + def test_skip_with_reason(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.skip(reason="for lolz") @@ -666,11 +674,11 @@ class TestSkip: pass """ ) - result = testdir.runpytest("-rs") + result = pytester.runpytest("-rs") result.stdout.fnmatch_lines(["*for lolz*", "*1 skipped*"]) - def test_only_skips_marked_test(self, testdir): - testdir.makepyfile( + def test_only_skips_marked_test(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.skip @@ -683,11 +691,11 @@ class TestSkip: assert True """ ) - result = testdir.runpytest("-rs") + result = pytester.runpytest("-rs") result.stdout.fnmatch_lines(["*nothing in particular*", "*1 passed*2 skipped*"]) - def test_strict_and_skip(self, testdir): - testdir.makepyfile( + def test_strict_and_skip(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.skip @@ -695,13 +703,13 @@ class TestSkip: pass """ ) - result = testdir.runpytest("-rs") + result = pytester.runpytest("-rs") result.stdout.fnmatch_lines(["*unconditional skip*", "*1 skipped*"]) class TestSkipif: - def test_skipif_conditional(self, testdir): - item = testdir.getitem( + def test_skipif_conditional(self, pytester: Pytester) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.skipif("hasattr(os, 'sep')") @@ -715,8 +723,8 @@ class TestSkipif: @pytest.mark.parametrize( "params", ["\"hasattr(sys, 'platform')\"", 'True, reason="invalid platform"'] ) - def test_skipif_reporting(self, testdir, params): - p = testdir.makepyfile( + def test_skipif_reporting(self, pytester: Pytester, params) -> None: + p = pytester.makepyfile( test_foo=""" import pytest @pytest.mark.skipif(%(params)s) @@ -725,12 +733,12 @@ class TestSkipif: """ % dict(params=params) ) - result = testdir.runpytest(p, "-s", "-rs") + result = pytester.runpytest(p, "-s", "-rs") result.stdout.fnmatch_lines(["*SKIP*1*test_foo.py*platform*", "*1 skipped*"]) assert result.ret == 0 - def test_skipif_using_platform(self, testdir): - item = testdir.getitem( + def test_skipif_using_platform(self, pytester: Pytester) -> None: + item = pytester.getitem( """ import pytest @pytest.mark.skipif("platform.platform() == platform.platform()") @@ -744,8 +752,10 @@ class TestSkipif: "marker, msg1, msg2", [("skipif", "SKIP", "skipped"), ("xfail", "XPASS", "xpassed")], ) - def test_skipif_reporting_multiple(self, testdir, marker, msg1, msg2): - testdir.makepyfile( + def test_skipif_reporting_multiple( + self, pytester: Pytester, marker, msg1, msg2 + ) -> None: + pytester.makepyfile( test_foo=""" import pytest @pytest.mark.{marker}(False, reason='first_condition') @@ -756,22 +766,22 @@ class TestSkipif: marker=marker ) ) - result = testdir.runpytest("-s", "-rsxX") + result = pytester.runpytest("-s", "-rsxX") result.stdout.fnmatch_lines( [f"*{msg1}*test_foo.py*second_condition*", f"*1 {msg2}*"] ) assert result.ret == 0 -def test_skip_not_report_default(testdir): - p = testdir.makepyfile( +def test_skip_not_report_default(pytester: Pytester) -> None: + p = pytester.makepyfile( test_one=""" import pytest def test_this(): pytest.skip("hello") """ ) - result = testdir.runpytest(p, "-v") + result = pytester.runpytest(p, "-v") result.stdout.fnmatch_lines( [ # "*HINT*use*-r*", @@ -780,8 +790,8 @@ def test_skip_not_report_default(testdir): ) -def test_skipif_class(testdir): - p = testdir.makepyfile( +def test_skipif_class(pytester: Pytester) -> None: + p = pytester.makepyfile( """ import pytest @@ -793,12 +803,12 @@ def test_skipif_class(testdir): assert 0 """ ) - result = testdir.runpytest(p) + result = pytester.runpytest(p) result.stdout.fnmatch_lines(["*2 skipped*"]) -def test_skipped_reasons_functional(testdir): - testdir.makepyfile( +def test_skipped_reasons_functional(pytester: Pytester) -> None: + pytester.makepyfile( test_one=""" import pytest from conftest import doskip @@ -824,7 +834,7 @@ def test_skipped_reasons_functional(testdir): pytest.skip('test') """, ) - result = testdir.runpytest("-rs") + result = pytester.runpytest("-rs") result.stdout.fnmatch_lines_random( [ "SKIPPED [[]2[]] conftest.py:4: test", @@ -834,8 +844,8 @@ def test_skipped_reasons_functional(testdir): assert result.ret == 0 -def test_skipped_folding(testdir): - testdir.makepyfile( +def test_skipped_folding(pytester: Pytester) -> None: + pytester.makepyfile( test_one=""" import pytest pytestmark = pytest.mark.skip("Folding") @@ -848,13 +858,13 @@ def test_skipped_folding(testdir): pass """ ) - result = testdir.runpytest("-rs") + result = pytester.runpytest("-rs") result.stdout.fnmatch_lines(["*SKIP*2*test_one.py: Folding"]) assert result.ret == 0 -def test_reportchars(testdir): - testdir.makepyfile( +def test_reportchars(pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest def test_1(): @@ -869,14 +879,14 @@ def test_reportchars(testdir): pytest.skip("four") """ ) - result = testdir.runpytest("-rfxXs") + result = pytester.runpytest("-rfxXs") result.stdout.fnmatch_lines( ["FAIL*test_1*", "XFAIL*test_2*", "XPASS*test_3*", "SKIP*four*"] ) -def test_reportchars_error(testdir): - testdir.makepyfile( +def test_reportchars_error(pytester: Pytester) -> None: + pytester.makepyfile( conftest=""" def pytest_runtest_teardown(): assert 0 @@ -886,12 +896,12 @@ def test_reportchars_error(testdir): pass """, ) - result = testdir.runpytest("-rE") + result = pytester.runpytest("-rE") result.stdout.fnmatch_lines(["ERROR*test_foo*"]) -def test_reportchars_all(testdir): - testdir.makepyfile( +def test_reportchars_all(pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest def test_1(): @@ -911,7 +921,7 @@ def test_reportchars_all(testdir): pass """ ) - result = testdir.runpytest("-ra") + result = pytester.runpytest("-ra") result.stdout.fnmatch_lines( [ "SKIP*four*", @@ -923,8 +933,8 @@ def test_reportchars_all(testdir): ) -def test_reportchars_all_error(testdir): - testdir.makepyfile( +def test_reportchars_all_error(pytester: Pytester) -> None: + pytester.makepyfile( conftest=""" def pytest_runtest_teardown(): assert 0 @@ -934,12 +944,12 @@ def test_reportchars_all_error(testdir): pass """, ) - result = testdir.runpytest("-ra") + result = pytester.runpytest("-ra") result.stdout.fnmatch_lines(["ERROR*test_foo*"]) -def test_errors_in_xfail_skip_expressions(testdir) -> None: - testdir.makepyfile( +def test_errors_in_xfail_skip_expressions(pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.skipif("asd") @@ -953,7 +963,7 @@ def test_errors_in_xfail_skip_expressions(testdir) -> None: pass """ ) - result = testdir.runpytest() + result = pytester.runpytest() markline = " ^" pypy_version_info = getattr(sys, "pypy_version_info", None) if pypy_version_info is not None and pypy_version_info < (6,): @@ -975,8 +985,8 @@ def test_errors_in_xfail_skip_expressions(testdir) -> None: ) -def test_xfail_skipif_with_globals(testdir): - testdir.makepyfile( +def test_xfail_skipif_with_globals(pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest x = 3 @@ -988,12 +998,12 @@ def test_xfail_skipif_with_globals(testdir): assert 0 """ ) - result = testdir.runpytest("-rsx") + result = pytester.runpytest("-rsx") result.stdout.fnmatch_lines(["*SKIP*x == 3*", "*XFAIL*test_boolean*", "*x == 3*"]) -def test_default_markers(testdir): - result = testdir.runpytest("--markers") +def test_default_markers(pytester: Pytester) -> None: + result = pytester.runpytest("--markers") result.stdout.fnmatch_lines( [ "*skipif(condition, ..., [*], reason=...)*skip*", @@ -1002,14 +1012,14 @@ def test_default_markers(testdir): ) -def test_xfail_test_setup_exception(testdir): - testdir.makeconftest( +def test_xfail_test_setup_exception(pytester: Pytester) -> None: + pytester.makeconftest( """ def pytest_runtest_setup(): 0 / 0 """ ) - p = testdir.makepyfile( + p = pytester.makepyfile( """ import pytest @pytest.mark.xfail @@ -1017,14 +1027,14 @@ def test_xfail_test_setup_exception(testdir): assert 0 """ ) - result = testdir.runpytest(p) + result = pytester.runpytest(p) assert result.ret == 0 assert "xfailed" in result.stdout.str() result.stdout.no_fnmatch_line("*xpassed*") -def test_imperativeskip_on_xfail_test(testdir): - testdir.makepyfile( +def test_imperativeskip_on_xfail_test(pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.xfail @@ -1036,14 +1046,14 @@ def test_imperativeskip_on_xfail_test(testdir): pass """ ) - testdir.makeconftest( + pytester.makeconftest( """ import pytest def pytest_runtest_setup(item): pytest.skip("abc") """ ) - result = testdir.runpytest("-rsxX") + result = pytester.runpytest("-rsxX") result.stdout.fnmatch_lines_random( """ *SKIP*abc* @@ -1054,8 +1064,8 @@ def test_imperativeskip_on_xfail_test(testdir): class TestBooleanCondition: - def test_skipif(self, testdir): - testdir.makepyfile( + def test_skipif(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.skipif(True, reason="True123") @@ -1066,15 +1076,15 @@ class TestBooleanCondition: pass """ ) - result = testdir.runpytest() + result = pytester.runpytest() result.stdout.fnmatch_lines( """ *1 passed*1 skipped* """ ) - def test_skipif_noreason(self, testdir): - testdir.makepyfile( + def test_skipif_noreason(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.skipif(True) @@ -1082,15 +1092,15 @@ class TestBooleanCondition: pass """ ) - result = testdir.runpytest("-rs") + result = pytester.runpytest("-rs") result.stdout.fnmatch_lines( """ *1 error* """ ) - def test_xfail(self, testdir): - testdir.makepyfile( + def test_xfail(self, pytester: Pytester) -> None: + pytester.makepyfile( """ import pytest @pytest.mark.xfail(True, reason="True123") @@ -1098,7 +1108,7 @@ class TestBooleanCondition: assert 0 """ ) - result = testdir.runpytest("-rxs") + result = pytester.runpytest("-rxs") result.stdout.fnmatch_lines( """ *XFAIL* @@ -1108,9 +1118,9 @@ class TestBooleanCondition: ) -def test_xfail_item(testdir): +def test_xfail_item(pytester: Pytester) -> None: # Ensure pytest.xfail works with non-Python Item - testdir.makeconftest( + pytester.makeconftest( """ import pytest @@ -1123,16 +1133,16 @@ def test_xfail_item(testdir): return MyItem.from_parent(name="foo", parent=parent) """ ) - result = testdir.inline_run() + result = pytester.inline_run() passed, skipped, failed = result.listoutcomes() assert not failed xfailed = [r for r in skipped if hasattr(r, "wasxfail")] assert xfailed -def test_module_level_skip_error(testdir): +def test_module_level_skip_error(pytester: Pytester) -> None: """Verify that using pytest.skip at module level causes a collection error.""" - testdir.makepyfile( + pytester.makepyfile( """ import pytest pytest.skip("skip_module_level") @@ -1141,15 +1151,15 @@ def test_module_level_skip_error(testdir): assert True """ ) - result = testdir.runpytest() + result = pytester.runpytest() result.stdout.fnmatch_lines( ["*Using pytest.skip outside of a test is not allowed*"] ) -def test_module_level_skip_with_allow_module_level(testdir): +def test_module_level_skip_with_allow_module_level(pytester: Pytester) -> None: """Verify that using pytest.skip(allow_module_level=True) is allowed.""" - testdir.makepyfile( + pytester.makepyfile( """ import pytest pytest.skip("skip_module_level", allow_module_level=True) @@ -1158,13 +1168,13 @@ def test_module_level_skip_with_allow_module_level(testdir): assert 0 """ ) - result = testdir.runpytest("-rxs") + result = pytester.runpytest("-rxs") result.stdout.fnmatch_lines(["*SKIP*skip_module_level"]) -def test_invalid_skip_keyword_parameter(testdir): +def test_invalid_skip_keyword_parameter(pytester: Pytester) -> None: """Verify that using pytest.skip() with unknown parameter raises an error.""" - testdir.makepyfile( + pytester.makepyfile( """ import pytest pytest.skip("skip_module_level", unknown=1) @@ -1173,13 +1183,13 @@ def test_invalid_skip_keyword_parameter(testdir): assert 0 """ ) - result = testdir.runpytest() + result = pytester.runpytest() result.stdout.fnmatch_lines(["*TypeError:*['unknown']*"]) -def test_mark_xfail_item(testdir): +def test_mark_xfail_item(pytester: Pytester) -> None: # Ensure pytest.mark.xfail works with non-Python Item - testdir.makeconftest( + pytester.makeconftest( """ import pytest @@ -1197,23 +1207,23 @@ def test_mark_xfail_item(testdir): return MyItem.from_parent(name="foo", parent=parent) """ ) - result = testdir.inline_run() + result = pytester.inline_run() passed, skipped, failed = result.listoutcomes() assert not failed xfailed = [r for r in skipped if hasattr(r, "wasxfail")] assert xfailed -def test_summary_list_after_errors(testdir): +def test_summary_list_after_errors(pytester: Pytester) -> None: """Ensure the list of errors/fails/xfails/skips appears after tracebacks in terminal reporting.""" - testdir.makepyfile( + pytester.makepyfile( """ import pytest def test_fail(): assert 0 """ ) - result = testdir.runpytest("-ra") + result = pytester.runpytest("-ra") result.stdout.fnmatch_lines( [ "=* FAILURES *=", @@ -1223,7 +1233,7 @@ def test_summary_list_after_errors(testdir): ) -def test_importorskip(): +def test_importorskip() -> None: with pytest.raises( pytest.skip.Exception, match="^could not import 'doesnotexist': No module named .*", @@ -1231,8 +1241,8 @@ def test_importorskip(): pytest.importorskip("doesnotexist") -def test_relpath_rootdir(testdir): - testdir.makepyfile( +def test_relpath_rootdir(pytester: Pytester) -> None: + pytester.makepyfile( **{ "tests/test_1.py": """ import pytest @@ -1242,7 +1252,7 @@ def test_relpath_rootdir(testdir): """, } ) - result = testdir.runpytest("-rs", "tests/test_1.py", "--rootdir=tests") + result = pytester.runpytest("-rs", "tests/test_1.py", "--rootdir=tests") result.stdout.fnmatch_lines( ["SKIPPED [[]1[]] tests/test_1.py:2: unconditional skip"] )