Add mark.xfail argument raises so that unexpected exceptions show up as test failures.
--HG-- branch : xfail-cause
This commit is contained in:
		
							parent
							
								
									d98521b0d9
								
							
						
					
					
						commit
						7b273b8577
					
				|  | @ -26,11 +26,13 @@ def pytest_configure(config): | ||||||
|         "http://pytest.org/latest/skipping.html" |         "http://pytest.org/latest/skipping.html" | ||||||
|     ) |     ) | ||||||
|     config.addinivalue_line("markers", |     config.addinivalue_line("markers", | ||||||
|         "xfail(condition, reason=None, run=True): mark the the test function " |         "xfail(condition, reason=None, run=True, raises=None): mark the the test function " | ||||||
|         "as an expected failure if eval(condition) has a True value. " |         "as an expected failure if eval(condition) has a True value. " | ||||||
|         "Optionally specify a reason for better reporting and run=False if " |         "Optionally specify a reason for better reporting and run=False if " | ||||||
|         "you don't even want to execute the test function. See " |         "you don't even want to execute the test function. If only specific " | ||||||
|         "http://pytest.org/latest/skipping.html" |         "exception(s) are expected, you can list them in raises, and if the test fails " | ||||||
|  |         "in other ways, it will be reported as a true failure. " | ||||||
|  |         "See http://pytest.org/latest/skipping.html" | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
| def pytest_namespace(): | def pytest_namespace(): | ||||||
|  | @ -60,6 +62,15 @@ class MarkEvaluator: | ||||||
|     def wasvalid(self): |     def wasvalid(self): | ||||||
|         return not hasattr(self, 'exc') |         return not hasattr(self, 'exc') | ||||||
| 
 | 
 | ||||||
|  |     def invalidraise(self, exctype): | ||||||
|  |         raises = self.get('raises') | ||||||
|  |         if not raises: | ||||||
|  |             return | ||||||
|  |         if isinstance(raises, tuple): | ||||||
|  |             return exctype not in raises | ||||||
|  |         else: | ||||||
|  |             return raises != exctype | ||||||
|  | 
 | ||||||
|     def istrue(self): |     def istrue(self): | ||||||
|         try: |         try: | ||||||
|             return self._istrue() |             return self._istrue() | ||||||
|  | @ -171,6 +182,10 @@ def pytest_runtest_makereport(__multicall__, item, call): | ||||||
|         if not item.config.option.runxfail: |         if not item.config.option.runxfail: | ||||||
|             if evalxfail.wasvalid() and evalxfail.istrue(): |             if evalxfail.wasvalid() and evalxfail.istrue(): | ||||||
|                 if call.excinfo: |                 if call.excinfo: | ||||||
|  |                     if evalxfail.invalidraise(call.excinfo.type): | ||||||
|  |                         rep.outcome = "failed" | ||||||
|  |                         return rep | ||||||
|  |                     else: | ||||||
|                         rep.outcome = "skipped" |                         rep.outcome = "skipped" | ||||||
|                 elif call.when == "call": |                 elif call.when == "call": | ||||||
|                     rep.outcome = "failed" |                     rep.outcome = "failed" | ||||||
|  |  | ||||||
|  | @ -330,6 +330,53 @@ class TestXFail: | ||||||
|             "*1 xfailed*", |             "*1 xfailed*", | ||||||
|         ]) |         ]) | ||||||
| 
 | 
 | ||||||
|  |     def test_xfail_raises_match(self, testdir): | ||||||
|  |         p = testdir.makepyfile(""" | ||||||
|  |             import pytest | ||||||
|  |             @pytest.mark.xfail(raises=TypeError) | ||||||
|  |             def test_raises(): | ||||||
|  |                 raise TypeError() | ||||||
|  |         """) | ||||||
|  |         result = testdir.runpytest(p) | ||||||
|  |         result.stdout.fnmatch_lines([ | ||||||
|  |             "*1 xfailed*", | ||||||
|  |         ]) | ||||||
|  | 
 | ||||||
|  |     def test_xfail_raises_mismatch(self, testdir): | ||||||
|  |         p = testdir.makepyfile(""" | ||||||
|  |             import pytest | ||||||
|  |             @pytest.mark.xfail(raises=IndexError) | ||||||
|  |             def test_raises(): | ||||||
|  |                 raise TypeError() | ||||||
|  |         """) | ||||||
|  |         result = testdir.runpytest(p) | ||||||
|  |         result.stdout.fnmatch_lines([ | ||||||
|  |             "*1 failed*", | ||||||
|  |         ]) | ||||||
|  |     def test_xfail_raises_tuple_match(self, testdir): | ||||||
|  |         p = testdir.makepyfile(""" | ||||||
|  |             import pytest | ||||||
|  |             @pytest.mark.xfail(raises=(AttributeError, TypeError)) | ||||||
|  |             def test_raises(): | ||||||
|  |                 raise TypeError() | ||||||
|  |         """) | ||||||
|  |         result = testdir.runpytest(p) | ||||||
|  |         result.stdout.fnmatch_lines([ | ||||||
|  |             "*1 xfailed*", | ||||||
|  |         ]) | ||||||
|  | 
 | ||||||
|  |     def test_xfail_raises_tuple_mismatch(self, testdir): | ||||||
|  |         p = testdir.makepyfile(""" | ||||||
|  |             import pytest | ||||||
|  |             @pytest.mark.xfail(raises=(AttributeError, IndexError)) | ||||||
|  |             def test_raises(): | ||||||
|  |                 raise TypeError() | ||||||
|  |         """) | ||||||
|  |         result = testdir.runpytest(p) | ||||||
|  |         result.stdout.fnmatch_lines([ | ||||||
|  |             "*1 failed*", | ||||||
|  |         ]) | ||||||
|  | 
 | ||||||
| class TestXFailwithSetupTeardown: | class TestXFailwithSetupTeardown: | ||||||
|     def test_failing_setup_issue9(self, testdir): |     def test_failing_setup_issue9(self, testdir): | ||||||
|         testdir.makepyfile(""" |         testdir.makepyfile(""" | ||||||
|  | @ -575,7 +622,7 @@ def test_default_markers(testdir): | ||||||
|     result = testdir.runpytest("--markers") |     result = testdir.runpytest("--markers") | ||||||
|     result.stdout.fnmatch_lines([ |     result.stdout.fnmatch_lines([ | ||||||
|         "*skipif(*condition)*skip*", |         "*skipif(*condition)*skip*", | ||||||
|         "*xfail(*condition, reason=None, run=True)*expected failure*", |         "*xfail(*condition, reason=None, run=True, raises=None)*expected failure*", | ||||||
|     ]) |     ]) | ||||||
| 
 | 
 | ||||||
| def test_xfail_test_setup_exception(testdir): | def test_xfail_test_setup_exception(testdir): | ||||||
|  | @ -617,7 +664,6 @@ def test_imperativeskip_on_xfail_test(testdir): | ||||||
|         *2 skipped* |         *2 skipped* | ||||||
|     """) |     """) | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| class TestBooleanCondition: | class TestBooleanCondition: | ||||||
|     def test_skipif(self, testdir): |     def test_skipif(self, testdir): | ||||||
|         testdir.makepyfile(""" |         testdir.makepyfile(""" | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue