diff --git a/_pytest/cacheprovider.py b/_pytest/cacheprovider.py index 27dadb328..04dacf837 100755 --- a/_pytest/cacheprovider.py +++ b/_pytest/cacheprovider.py @@ -6,6 +6,8 @@ ignores the external pytest-cache """ from __future__ import absolute_import, division, print_function import py +from _pytest.python import Function + import pytest import json import os @@ -168,6 +170,41 @@ class LFPlugin(object): config.cache.set("cache/lastfailed", self.lastfailed) +class NFPlugin(object): + """ Plugin which implements the --nf (run new-first) option """ + + def __init__(self, config): + self.config = config + self.active = config.option.newfirst + self.all_items = config.cache.get("cache/allitems", {}) + + def pytest_collection_modifyitems(self, session, config, items): + if self.active: + new_items = [] + other_items = [] + for item in items: + mod_timestamp = os.path.getmtime(str(item.fspath)) + if self.all_items and item.nodeid not in self.all_items: + new_items.append((item, mod_timestamp)) + else: + other_items.append((item, mod_timestamp)) + + items[:] = self._get_increasing_order(new_items) + \ + self._get_increasing_order(other_items) + self.all_items = items + + def _get_increasing_order(self, test_list): + test_list = sorted(test_list, key=lambda x: x[1], reverse=True) + return [test[0] for test in test_list] + + def pytest_sessionfinish(self, session): + config = self.config + if config.getoption("cacheshow") or hasattr(config, "slaveinput"): + return + config.cache.set("cache/allitems", + [item.nodeid for item in self.all_items if isinstance(item, Function)]) + + def pytest_addoption(parser): group = parser.getgroup("general") group.addoption( @@ -179,6 +216,10 @@ def pytest_addoption(parser): help="run all tests but run the last failures first. " "This may re-order tests and thus lead to " "repeated fixture setup/teardown") + group.addoption( + '--nf', '--new-first', action='store_true', dest="newfirst", + help="run all tests but run new tests first, then tests from " + "last modified files, then other tests") group.addoption( '--cache-show', action='store_true', dest="cacheshow", help="show cache contents, don't perform collection or tests") @@ -200,6 +241,7 @@ def pytest_cmdline_main(config): def pytest_configure(config): config.cache = Cache(config) config.pluginmanager.register(LFPlugin(config), "lfplugin") + config.pluginmanager.register(NFPlugin(config), "nfplugin") @pytest.fixture diff --git a/changelog/3034.feature b/changelog/3034.feature new file mode 100644 index 000000000..62c7ba78d --- /dev/null +++ b/changelog/3034.feature @@ -0,0 +1 @@ +Added new option `--nf`, `--new-first`. This option enables run tests in next order: first new tests, then last modified files with tests in descending order (default order inside file). diff --git a/doc/en/cache.rst b/doc/en/cache.rst index e3423e95b..138ff6dfb 100644 --- a/doc/en/cache.rst +++ b/doc/en/cache.rst @@ -266,3 +266,13 @@ dumps/loads API of the json stdlib module .. automethod:: Cache.get .. automethod:: Cache.set .. automethod:: Cache.makedir + + +New tests first +----------------- + +The plugin provides command line options to run tests in another order: + +* ``--nf``, ``--new-first`` - to run tests in next order: first new tests, then + last modified files with tests in descending order (default order inside file). + diff --git a/testing/test_cacheprovider.py b/testing/test_cacheprovider.py index 038fd229e..b03b02d34 100644 --- a/testing/test_cacheprovider.py +++ b/testing/test_cacheprovider.py @@ -56,7 +56,7 @@ class TestNewAPI(object): assert result.ret == 1 result.stdout.fnmatch_lines([ "*could not create cache path*", - "*1 warnings*", + "*2 warnings*", ]) def test_config_cache(self, testdir): @@ -495,15 +495,15 @@ class TestLastFailed(object): # Issue #1342 testdir.makepyfile(test_empty='') testdir.runpytest('-q', '--lf') - assert not os.path.exists('.pytest_cache') + assert not os.path.exists('.pytest_cache/v/cache/lastfailed') testdir.makepyfile(test_successful='def test_success():\n assert True') testdir.runpytest('-q', '--lf') - assert not os.path.exists('.pytest_cache') + assert not os.path.exists('.pytest_cache/v/cache/lastfailed') testdir.makepyfile(test_errored='def test_error():\n assert False') testdir.runpytest('-q', '--lf') - assert os.path.exists('.pytest_cache') + assert os.path.exists('.pytest_cache/v/cache/lastfailed') def test_xfail_not_considered_failure(self, testdir): testdir.makepyfile(''' @@ -603,3 +603,116 @@ class TestLastFailed(object): result = testdir.runpytest('--last-failed') result.stdout.fnmatch_lines('*4 passed*') assert self.get_cached_last_failed(testdir) == [] + + +class TestNewFirst(object): + def test_newfirst_usecase(self, testdir): + t1 = testdir.mkdir("test_1") + t2 = testdir.mkdir("test_2") + + t1.join("test_1.py").write( + "def test_1(): assert 1\n" + "def test_2(): assert 1\n" + "def test_3(): assert 1\n" + ) + t2.join("test_2.py").write( + "def test_1(): assert 1\n" + "def test_2(): assert 1\n" + "def test_3(): assert 1\n" + ) + + path_to_test_1 = str('{}/test_1/test_1.py'.format(testdir.tmpdir)) + os.utime(path_to_test_1, (1, 1)) + + result = testdir.runpytest("-v") + result.stdout.fnmatch_lines([ + "*test_1/test_1.py::test_1 PASSED*", + "*test_1/test_1.py::test_2 PASSED*", + "*test_1/test_1.py::test_3 PASSED*", + "*test_2/test_2.py::test_1 PASSED*", + "*test_2/test_2.py::test_2 PASSED*", + "*test_2/test_2.py::test_3 PASSED*", + ]) + + result = testdir.runpytest("-v", "--nf") + + result.stdout.fnmatch_lines([ + "*test_2/test_2.py::test_1 PASSED*", + "*test_2/test_2.py::test_2 PASSED*", + "*test_2/test_2.py::test_3 PASSED*", + "*test_1/test_1.py::test_1 PASSED*", + "*test_1/test_1.py::test_2 PASSED*", + "*test_1/test_1.py::test_3 PASSED*", + ]) + + t1.join("test_1.py").write( + "def test_1(): assert 1\n" + "def test_2(): assert 1\n" + "def test_3(): assert 1\n" + "def test_4(): assert 1\n" + ) + os.utime(path_to_test_1, (1, 1)) + + result = testdir.runpytest("-v", "--nf") + + result.stdout.fnmatch_lines([ + "*test_1/test_1.py::test_4 PASSED*", + "*test_2/test_2.py::test_1 PASSED*", + "*test_2/test_2.py::test_2 PASSED*", + "*test_2/test_2.py::test_3 PASSED*", + "*test_1/test_1.py::test_1 PASSED*", + "*test_1/test_1.py::test_2 PASSED*", + "*test_1/test_1.py::test_3 PASSED*", + ]) + + def test_newfirst_parametrize(self, testdir): + t1 = testdir.mkdir("test_1") + t2 = testdir.mkdir("test_2") + + t1.join("test_1.py").write( + "import pytest\n" + "@pytest.mark.parametrize('num', [1, 2])\n" + "def test_1(num): assert num\n" + ) + t2.join("test_2.py").write( + "import pytest\n" + "@pytest.mark.parametrize('num', [1, 2])\n" + "def test_1(num): assert num\n" + ) + + path_to_test_1 = str('{}/test_1/test_1.py'.format(testdir.tmpdir)) + os.utime(path_to_test_1, (1, 1)) + + result = testdir.runpytest("-v") + result.stdout.fnmatch_lines([ + "*test_1/test_1.py::test_1[1*", + "*test_1/test_1.py::test_1[2*", + "*test_2/test_2.py::test_1[1*", + "*test_2/test_2.py::test_1[2*" + ]) + + result = testdir.runpytest("-v", "--nf") + + result.stdout.fnmatch_lines([ + "*test_2/test_2.py::test_1[1*", + "*test_2/test_2.py::test_1[2*", + "*test_1/test_1.py::test_1[1*", + "*test_1/test_1.py::test_1[2*", + ]) + + t1.join("test_1.py").write( + "import pytest\n" + "@pytest.mark.parametrize('num', [1, 2, 3])\n" + "def test_1(num): assert num\n" + ) + os.utime(path_to_test_1, (1, 1)) + + result = testdir.runpytest("-v", "--nf") + + result.stdout.fnmatch_lines([ + "*test_1/test_1.py::test_1[3*", + "*test_2/test_2.py::test_1[1*", + "*test_2/test_2.py::test_1[2*", + "*test_1/test_1.py::test_1[1*", + "*test_1/test_1.py::test_1[2*", + ])