Report correct reason when using multiple skip/xfail markers
This commit is contained in:
parent
23aaa8a62c
commit
953916df49
|
@ -16,6 +16,10 @@
|
||||||
fixtures declared on the first one.
|
fixtures declared on the first one.
|
||||||
Thanks Florian Bruhin for reporting and Bruno Oliveira for the PR.
|
Thanks Florian Bruhin for reporting and Bruno Oliveira for the PR.
|
||||||
|
|
||||||
|
- fix issue863: skipped tests now report the correct reason when a skip/xfail
|
||||||
|
condition is met when using multiple markers.
|
||||||
|
Thanks Raphael Pierzina for reporting and Bruno Oliveira for the PR.
|
||||||
|
|
||||||
- optimized tmpdir fixture initialization, which should make test sessions
|
- optimized tmpdir fixture initialization, which should make test sessions
|
||||||
faster (specially when using pytest-xdist). The only visible effect
|
faster (specially when using pytest-xdist). The only visible effect
|
||||||
is that now pytest uses a subdirectory in the $TEMP directory for all
|
is that now pytest uses a subdirectory in the $TEMP directory for all
|
||||||
|
|
|
@ -98,24 +98,36 @@ class MarkEvaluator:
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _istrue(self):
|
def _istrue(self):
|
||||||
|
if hasattr(self, 'result'):
|
||||||
|
return self.result
|
||||||
if self.holder:
|
if self.holder:
|
||||||
d = self._getglobals()
|
d = self._getglobals()
|
||||||
if self.holder.args:
|
if self.holder.args:
|
||||||
self.result = False
|
self.result = False
|
||||||
for expr in self.holder.args:
|
# "holder" might be a MarkInfo or a MarkDecorator; only
|
||||||
self.expr = expr
|
# MarkInfo keeps track of all parameters it received in an
|
||||||
if isinstance(expr, py.builtin._basestring):
|
# _arglist attribute
|
||||||
result = cached_eval(self.item.config, expr, d)
|
if hasattr(self.holder, '_arglist'):
|
||||||
else:
|
arglist = self.holder._arglist
|
||||||
if self.get("reason") is None:
|
else:
|
||||||
# XXX better be checked at collection time
|
arglist = [(self.holder.args, self.holder.kwargs)]
|
||||||
pytest.fail("you need to specify reason=STRING "
|
for args, kwargs in arglist:
|
||||||
"when using booleans as conditions.")
|
for expr in args:
|
||||||
result = bool(expr)
|
|
||||||
if result:
|
|
||||||
self.result = True
|
|
||||||
self.expr = expr
|
self.expr = expr
|
||||||
break
|
if isinstance(expr, py.builtin._basestring):
|
||||||
|
result = cached_eval(self.item.config, expr, d)
|
||||||
|
else:
|
||||||
|
if "reason" not in kwargs:
|
||||||
|
# XXX better be checked at collection time
|
||||||
|
msg = "you need to specify reason=STRING " \
|
||||||
|
"when using booleans as conditions."
|
||||||
|
pytest.fail(msg)
|
||||||
|
result = bool(expr)
|
||||||
|
if result:
|
||||||
|
self.result = True
|
||||||
|
self.reason = kwargs.get('reason', None)
|
||||||
|
self.expr = expr
|
||||||
|
return self.result
|
||||||
else:
|
else:
|
||||||
self.result = True
|
self.result = True
|
||||||
return getattr(self, 'result', False)
|
return getattr(self, 'result', False)
|
||||||
|
@ -124,7 +136,7 @@ class MarkEvaluator:
|
||||||
return self.holder.kwargs.get(attr, default)
|
return self.holder.kwargs.get(attr, default)
|
||||||
|
|
||||||
def getexplanation(self):
|
def getexplanation(self):
|
||||||
expl = self.get('reason', None)
|
expl = getattr(self, 'reason', None) or self.get('reason', None)
|
||||||
if not expl:
|
if not expl:
|
||||||
if not hasattr(self, 'expr'):
|
if not hasattr(self, 'expr'):
|
||||||
return ""
|
return ""
|
||||||
|
|
|
@ -409,6 +409,26 @@ class TestSkipif:
|
||||||
])
|
])
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('marker, msg1, msg2', [
|
||||||
|
('skipif', 'SKIP', 'skipped'),
|
||||||
|
('xfail', 'XPASS', 'xpassed'),
|
||||||
|
])
|
||||||
|
def test_skipif_reporting_multiple(self, testdir, marker, msg1, msg2):
|
||||||
|
testdir.makepyfile(test_foo="""
|
||||||
|
import pytest
|
||||||
|
@pytest.mark.{marker}(False, reason='first_condition')
|
||||||
|
@pytest.mark.{marker}(True, reason='second_condition')
|
||||||
|
def test_foobar():
|
||||||
|
assert 1
|
||||||
|
""".format(marker=marker))
|
||||||
|
result = testdir.runpytest('-s', '-rsxX')
|
||||||
|
result.stdout.fnmatch_lines([
|
||||||
|
"*{msg1}*test_foo.py*second_condition*".format(msg1=msg1),
|
||||||
|
"*1 {msg2}*".format(msg2=msg2),
|
||||||
|
])
|
||||||
|
assert result.ret == 0
|
||||||
|
|
||||||
|
|
||||||
def test_skip_not_report_default(testdir):
|
def test_skip_not_report_default(testdir):
|
||||||
p = testdir.makepyfile(test_one="""
|
p = testdir.makepyfile(test_one="""
|
||||||
import pytest
|
import pytest
|
||||||
|
|
Loading…
Reference in New Issue