Merge pull request #5333 from blueyed/fix-lf
Fix regression with --lf and non-selected failures
This commit is contained in:
commit
f0a4a13e48
|
@ -0,0 +1 @@
|
||||||
|
Fix regression with ``--lf`` not re-running all tests with known failures from non-selected tests.
|
|
@ -164,28 +164,27 @@ class LFPlugin(object):
|
||||||
def last_failed_paths(self):
|
def last_failed_paths(self):
|
||||||
"""Returns a set with all Paths()s of the previously failed nodeids (cached).
|
"""Returns a set with all Paths()s of the previously failed nodeids (cached).
|
||||||
"""
|
"""
|
||||||
result = getattr(self, "_last_failed_paths", None)
|
try:
|
||||||
if result is None:
|
return self._last_failed_paths
|
||||||
|
except AttributeError:
|
||||||
rootpath = Path(self.config.rootdir)
|
rootpath = Path(self.config.rootdir)
|
||||||
result = {rootpath / nodeid.split("::")[0] for nodeid in self.lastfailed}
|
result = {rootpath / nodeid.split("::")[0] for nodeid in self.lastfailed}
|
||||||
|
result = {x for x in result if x.exists()}
|
||||||
self._last_failed_paths = result
|
self._last_failed_paths = result
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def pytest_ignore_collect(self, path):
|
def pytest_ignore_collect(self, path):
|
||||||
"""
|
"""
|
||||||
Ignore this file path if we are in --lf mode and it is not in the list of
|
Ignore this file path if we are in --lf mode and it is not in the list of
|
||||||
previously failed files.
|
previously failed files.
|
||||||
"""
|
"""
|
||||||
if (
|
if self.active and self.config.getoption("lf") and path.isfile():
|
||||||
self.active
|
last_failed_paths = self.last_failed_paths()
|
||||||
and self.config.getoption("lf")
|
if last_failed_paths:
|
||||||
and path.isfile()
|
skip_it = Path(path) not in self.last_failed_paths()
|
||||||
and self.lastfailed
|
if skip_it:
|
||||||
):
|
self._skipped_files += 1
|
||||||
skip_it = Path(path) not in self.last_failed_paths()
|
return skip_it
|
||||||
if skip_it:
|
|
||||||
self._skipped_files += 1
|
|
||||||
return skip_it
|
|
||||||
|
|
||||||
def pytest_report_collectionfinish(self):
|
def pytest_report_collectionfinish(self):
|
||||||
if self.active and self.config.getoption("verbose") >= 0:
|
if self.active and self.config.getoption("verbose") >= 0:
|
||||||
|
@ -234,19 +233,15 @@ class LFPlugin(object):
|
||||||
items[:] = previously_failed + previously_passed
|
items[:] = previously_failed + previously_passed
|
||||||
|
|
||||||
noun = "failure" if self._previously_failed_count == 1 else "failures"
|
noun = "failure" if self._previously_failed_count == 1 else "failures"
|
||||||
if self._skipped_files > 0:
|
|
||||||
files_noun = "file" if self._skipped_files == 1 else "files"
|
|
||||||
skipped_files_msg = " (skipped {files} {files_noun})".format(
|
|
||||||
files=self._skipped_files, files_noun=files_noun
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
skipped_files_msg = ""
|
|
||||||
suffix = " first" if self.config.getoption("failedfirst") else ""
|
suffix = " first" if self.config.getoption("failedfirst") else ""
|
||||||
self._report_status = "rerun previous {count} {noun}{suffix}{skipped_files}".format(
|
self._report_status = "rerun previous {count} {noun}{suffix}".format(
|
||||||
count=self._previously_failed_count,
|
count=self._previously_failed_count, suffix=suffix, noun=noun
|
||||||
suffix=suffix,
|
)
|
||||||
noun=noun,
|
|
||||||
skipped_files=skipped_files_msg,
|
if self._skipped_files > 0:
|
||||||
|
files_noun = "file" if self._skipped_files == 1 else "files"
|
||||||
|
self._report_status += " (skipped {files} {files_noun})".format(
|
||||||
|
files=self._skipped_files, files_noun=files_noun
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self._report_status = "no previously failed tests, "
|
self._report_status = "no previously failed tests, "
|
||||||
|
|
|
@ -832,6 +832,48 @@ class TestLastFailed(object):
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_lastfailed_with_known_failures_not_being_selected(self, testdir):
|
||||||
|
testdir.makepyfile(
|
||||||
|
**{
|
||||||
|
"pkg1/test_1.py": """def test_1(): assert 0""",
|
||||||
|
"pkg1/test_2.py": """def test_2(): pass""",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.stdout.fnmatch_lines(["collected 2 items", "* 1 failed, 1 passed in *"])
|
||||||
|
|
||||||
|
py.path.local("pkg1/test_1.py").remove()
|
||||||
|
result = testdir.runpytest("--lf")
|
||||||
|
result.stdout.fnmatch_lines(
|
||||||
|
[
|
||||||
|
"collected 1 item",
|
||||||
|
"run-last-failure: 1 known failures not in selected tests",
|
||||||
|
"* 1 passed in *",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Recreate file with known failure.
|
||||||
|
testdir.makepyfile(**{"pkg1/test_1.py": """def test_1(): assert 0"""})
|
||||||
|
result = testdir.runpytest("--lf")
|
||||||
|
result.stdout.fnmatch_lines(
|
||||||
|
[
|
||||||
|
"collected 1 item",
|
||||||
|
"run-last-failure: rerun previous 1 failure (skipped 1 file)",
|
||||||
|
"* 1 failed in *",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Remove/rename test.
|
||||||
|
testdir.makepyfile(**{"pkg1/test_1.py": """def test_renamed(): assert 0"""})
|
||||||
|
result = testdir.runpytest("--lf")
|
||||||
|
result.stdout.fnmatch_lines(
|
||||||
|
[
|
||||||
|
"collected 1 item",
|
||||||
|
"run-last-failure: 1 known failures not in selected tests (skipped 1 file)",
|
||||||
|
"* 1 failed in *",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestNewFirst(object):
|
class TestNewFirst(object):
|
||||||
def test_newfirst_usecase(self, testdir):
|
def test_newfirst_usecase(self, testdir):
|
||||||
|
|
Loading…
Reference in New Issue