add `assert_outcomes(warnings=)` functionality to `RunResult`
* expose `warnings=` to pytester `assert_outcomes()` * fix test fallout from adding warnings= to assert_outcomes() * #closes 8593 - Improve test and add a `changelog` entry for the change
This commit is contained in:
		
							parent
							
								
									c27db3bd8e
								
							
						
					
					
						commit
						ef5d81ad5c
					
				| 
						 | 
					@ -0,0 +1,2 @@
 | 
				
			||||||
 | 
					:class:`RunResult <_pytest.pytester.RunResult>` method :meth:`assert_outcomes <_pytest.pytester.RunResult.assert_outcomes>` now accepts a
 | 
				
			||||||
 | 
					``warnings`` argument to assert the total number of warnings captured.
 | 
				
			||||||
| 
						 | 
					@ -588,6 +588,7 @@ class RunResult:
 | 
				
			||||||
        errors: int = 0,
 | 
					        errors: int = 0,
 | 
				
			||||||
        xpassed: int = 0,
 | 
					        xpassed: int = 0,
 | 
				
			||||||
        xfailed: int = 0,
 | 
					        xfailed: int = 0,
 | 
				
			||||||
 | 
					        warnings: int = 0,
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        """Assert that the specified outcomes appear with the respective
 | 
					        """Assert that the specified outcomes appear with the respective
 | 
				
			||||||
        numbers (0 means it didn't occur) in the text output from a test run."""
 | 
					        numbers (0 means it didn't occur) in the text output from a test run."""
 | 
				
			||||||
| 
						 | 
					@ -603,6 +604,7 @@ class RunResult:
 | 
				
			||||||
            errors=errors,
 | 
					            errors=errors,
 | 
				
			||||||
            xpassed=xpassed,
 | 
					            xpassed=xpassed,
 | 
				
			||||||
            xfailed=xfailed,
 | 
					            xfailed=xfailed,
 | 
				
			||||||
 | 
					            warnings=warnings,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -42,6 +42,7 @@ def assert_outcomes(
 | 
				
			||||||
    errors: int = 0,
 | 
					    errors: int = 0,
 | 
				
			||||||
    xpassed: int = 0,
 | 
					    xpassed: int = 0,
 | 
				
			||||||
    xfailed: int = 0,
 | 
					    xfailed: int = 0,
 | 
				
			||||||
 | 
					    warnings: int = 0,
 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
    """Assert that the specified outcomes appear with the respective
 | 
					    """Assert that the specified outcomes appear with the respective
 | 
				
			||||||
    numbers (0 means it didn't occur) in the text output from a test run."""
 | 
					    numbers (0 means it didn't occur) in the text output from a test run."""
 | 
				
			||||||
| 
						 | 
					@ -54,6 +55,7 @@ def assert_outcomes(
 | 
				
			||||||
        "errors": outcomes.get("errors", 0),
 | 
					        "errors": outcomes.get("errors", 0),
 | 
				
			||||||
        "xpassed": outcomes.get("xpassed", 0),
 | 
					        "xpassed": outcomes.get("xpassed", 0),
 | 
				
			||||||
        "xfailed": outcomes.get("xfailed", 0),
 | 
					        "xfailed": outcomes.get("xfailed", 0),
 | 
				
			||||||
 | 
					        "warnings": outcomes.get("warnings", 0),
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    expected = {
 | 
					    expected = {
 | 
				
			||||||
        "passed": passed,
 | 
					        "passed": passed,
 | 
				
			||||||
| 
						 | 
					@ -62,5 +64,6 @@ def assert_outcomes(
 | 
				
			||||||
        "errors": errors,
 | 
					        "errors": errors,
 | 
				
			||||||
        "xpassed": xpassed,
 | 
					        "xpassed": xpassed,
 | 
				
			||||||
        "xfailed": xfailed,
 | 
					        "xfailed": xfailed,
 | 
				
			||||||
 | 
					        "warnings": warnings,
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    assert obtained == expected
 | 
					    assert obtained == expected
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -335,7 +335,7 @@ def test_SkipTest_during_collection(pytester: Pytester) -> None:
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
    result = pytester.runpytest(p)
 | 
					    result = pytester.runpytest(p)
 | 
				
			||||||
    result.assert_outcomes(skipped=1)
 | 
					    result.assert_outcomes(skipped=1, warnings=1)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def test_SkipTest_in_test(pytester: Pytester) -> None:
 | 
					def test_SkipTest_in_test(pytester: Pytester) -> None:
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -847,3 +847,17 @@ def test_testdir_makefile_ext_empty_string_makes_file(testdir) -> None:
 | 
				
			||||||
    """For backwards compat #8192"""
 | 
					    """For backwards compat #8192"""
 | 
				
			||||||
    p1 = testdir.makefile("", "")
 | 
					    p1 = testdir.makefile("", "")
 | 
				
			||||||
    assert "test_testdir_makefile" in str(p1)
 | 
					    assert "test_testdir_makefile" in str(p1)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@pytest.mark.filterwarnings("default")
 | 
				
			||||||
 | 
					def test_pytester_assert_outcomes_warnings(pytester: Pytester) -> None:
 | 
				
			||||||
 | 
					    pytester.makepyfile(
 | 
				
			||||||
 | 
					        """
 | 
				
			||||||
 | 
					        import warnings
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def test_with_warning():
 | 
				
			||||||
 | 
					            warnings.warn(UserWarning("some custom warning"))
 | 
				
			||||||
 | 
					        """
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    result = pytester.runpytest()
 | 
				
			||||||
 | 
					    result.assert_outcomes(passed=1, warnings=1)
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in New Issue